New archi: domain driven development
Working but need to check out code
This commit is contained in:
29
.env.example
29
.env.example
@@ -1,16 +1,37 @@
|
||||
# DeepSeek LLM Configuration
|
||||
DEEPSEEK_API_KEY=your_deepseek_api_key_here
|
||||
# LLM Provider Selection
|
||||
# Options: "deepseek" or "ollama"
|
||||
LLM_PROVIDER=ollama
|
||||
|
||||
# DeepSeek LLM Configuration (if using DeepSeek)
|
||||
DEEPSEEK_API_KEY=your_deepseek_api_key
|
||||
DEEPSEEK_BASE_URL=https://api.deepseek.com
|
||||
DEEPSEEK_MODEL=deepseek-chat
|
||||
|
||||
# Ollama LLM Configuration (if using Ollama)
|
||||
OLLAMA_BASE_URL=http://localhost:11434
|
||||
OLLAMA_MODEL=llama3.2
|
||||
|
||||
# LLM Settings
|
||||
TEMPERATURE=0.2
|
||||
|
||||
# TMDB API Configuration
|
||||
TMDB_API_KEY=your_tmdb_api_key_here
|
||||
TMDB_API_KEY=your_tmdb_api_key
|
||||
TMDB_BASE_URL=https://api.themoviedb.org/3
|
||||
|
||||
# Storage Configuration
|
||||
MEMORY_FILE=memory.json
|
||||
|
||||
# qBittorrent Configuration
|
||||
QBIT_HOST=http://192.168.178.47:30024
|
||||
QBIT_USER=admin
|
||||
QBIT_PASS=adminadmin
|
||||
|
||||
# Security Configuration
|
||||
MAX_TOOL_ITERATIONS=5
|
||||
MAX_TOOL_ITERATIONS=10
|
||||
REQUEST_TIMEOUT=30
|
||||
|
||||
# Memory Configuration
|
||||
# Number of previous messages to include in context (default: 10)
|
||||
# Higher = more context but slower/more expensive
|
||||
# Lower = less context but faster
|
||||
MAX_HISTORY_MESSAGES=10
|
||||
|
||||
308
ARCHITECTURE_FINALE.md
Normal file
308
ARCHITECTURE_FINALE.md
Normal file
@@ -0,0 +1,308 @@
|
||||
# 🎯 Architecture Finale - 100% DDD
|
||||
|
||||
## ✅ Migration Complète Terminée
|
||||
|
||||
Toute la couche de compatibilité a été supprimée. L'architecture est maintenant **100% Domain-Driven Development**.
|
||||
|
||||
---
|
||||
|
||||
## 📁 Structure Finale
|
||||
|
||||
```
|
||||
agent_media/
|
||||
│
|
||||
├── domain/ # 🎯 LOGIQUE MÉTIER PURE
|
||||
│ ├── shared/
|
||||
│ │ ├── exceptions.py
|
||||
│ │ └── value_objects.py
|
||||
│ ├── movies/
|
||||
│ │ ├── entities.py
|
||||
│ │ ├── value_objects.py
|
||||
│ │ ├── repositories.py
|
||||
│ │ ├── services.py
|
||||
│ │ └── exceptions.py
|
||||
│ ├── tv_shows/
|
||||
│ │ ├── entities.py
|
||||
│ │ ├── value_objects.py
|
||||
│ │ ├── repositories.py
|
||||
│ │ ├── services.py
|
||||
│ │ └── exceptions.py
|
||||
│ └── subtitles/
|
||||
│ ├── entities.py
|
||||
│ ├── value_objects.py
|
||||
│ ├── repositories.py
|
||||
│ ├── services.py
|
||||
│ └── exceptions.py
|
||||
│
|
||||
├── infrastructure/ # 🔧 DÉTAILS TECHNIQUES
|
||||
│ ├── api/
|
||||
│ │ ├── tmdb/
|
||||
│ │ ├── knaben/
|
||||
│ │ └── qbittorrent/
|
||||
│ ├── persistence/
|
||||
│ │ ├── memory.py
|
||||
│ │ └── json/
|
||||
│ └── filesystem/
|
||||
│ ├── file_manager.py
|
||||
│ ├── organizer.py
|
||||
│ └── exceptions.py
|
||||
│
|
||||
├── application/ # 🎬 USE CASES
|
||||
│ ├── movies/
|
||||
│ │ ├── search_movie.py
|
||||
│ │ └── dto.py
|
||||
│ ├── torrents/
|
||||
│ │ ├── search_torrents.py
|
||||
│ │ ├── add_torrent.py
|
||||
│ │ └── dto.py
|
||||
│ └── filesystem/
|
||||
│ ├── set_folder_path.py
|
||||
│ ├── list_folder.py
|
||||
│ └── dto.py
|
||||
│
|
||||
├── agent/ # 🤖 INTERFACE LLM
|
||||
│ ├── llm/
|
||||
│ │ ├── __init__.py
|
||||
│ │ └── deepseek.py
|
||||
│ ├── tools/
|
||||
│ │ ├── __init__.py
|
||||
│ │ ├── api.py
|
||||
│ │ └── filesystem.py
|
||||
│ ├── agent.py
|
||||
│ ├── registry.py
|
||||
│ ├── prompts.py
|
||||
│ ├── parameters.py
|
||||
│ └── config.py
|
||||
│
|
||||
└── app.py # 🚀 FASTAPI
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔄 Imports Mis à Jour
|
||||
|
||||
### **app.py**
|
||||
```python
|
||||
# AVANT
|
||||
from agent.memory import Memory
|
||||
|
||||
# APRÈS
|
||||
from infrastructure.persistence.memory import Memory
|
||||
```
|
||||
|
||||
### **agent/agent.py**
|
||||
```python
|
||||
# AVANT
|
||||
from .memory import Memory
|
||||
|
||||
# APRÈS
|
||||
from infrastructure.persistence.memory import Memory
|
||||
```
|
||||
|
||||
### **agent/tools/api.py**
|
||||
```python
|
||||
# Utilise directement les use cases
|
||||
from application.movies import SearchMovieUseCase
|
||||
from infrastructure.api.tmdb import tmdb_client
|
||||
```
|
||||
|
||||
### **agent/tools/filesystem.py**
|
||||
```python
|
||||
# Utilise directement les use cases
|
||||
from application.filesystem import SetFolderPathUseCase
|
||||
from infrastructure.filesystem import FileManager
|
||||
from infrastructure.persistence.memory import Memory
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🗑️ Fichiers Supprimés
|
||||
|
||||
### **Ancienne Architecture**
|
||||
```
|
||||
❌ agent/api/themoviedb.py
|
||||
❌ agent/api/knaben.py
|
||||
❌ agent/api/qbittorrent.py
|
||||
❌ agent/api/__init__.py
|
||||
❌ agent/models/tv_show.py
|
||||
❌ agent/models/__init__.py
|
||||
❌ agent/memory.py
|
||||
```
|
||||
|
||||
### **Dossiers Supprimés**
|
||||
```
|
||||
❌ agent/api/
|
||||
❌ agent/models/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## ✅ Fichiers Conservés
|
||||
|
||||
### **Agent Core**
|
||||
```
|
||||
✅ agent/agent.py # Agent principal (imports mis à jour)
|
||||
✅ agent/registry.py # Registry des tools
|
||||
✅ agent/prompts.py # Construction des prompts
|
||||
✅ agent/parameters.py # Schéma des paramètres
|
||||
✅ agent/config.py # Configuration
|
||||
```
|
||||
|
||||
### **Agent LLM**
|
||||
```
|
||||
✅ agent/llm/__init__.py
|
||||
✅ agent/llm/deepseek.py # Client DeepSeek
|
||||
```
|
||||
|
||||
### **Agent Tools**
|
||||
```
|
||||
✅ agent/tools/__init__.py
|
||||
✅ agent/tools/api.py # Wrappers vers use cases
|
||||
✅ agent/tools/filesystem.py # Wrappers vers use cases
|
||||
```
|
||||
|
||||
### **Application**
|
||||
```
|
||||
✅ app.py # FastAPI (imports mis à jour)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Flux de Données
|
||||
|
||||
```
|
||||
USER
|
||||
↓
|
||||
LibreChat
|
||||
↓
|
||||
app.py (FastAPI)
|
||||
↓
|
||||
Agent (agent/agent.py)
|
||||
↓
|
||||
Tools (agent/tools/)
|
||||
↓
|
||||
Use Cases (application/)
|
||||
↓
|
||||
Domain Services (domain/)
|
||||
↓
|
||||
Infrastructure (infrastructure/)
|
||||
↓
|
||||
External APIs / Storage
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔑 Principes DDD Appliqués
|
||||
|
||||
### **1. Layered Architecture**
|
||||
✅ Séparation stricte : Domain → Application → Infrastructure → Interface
|
||||
|
||||
### **2. Dependency Inversion**
|
||||
✅ Domain ne dépend de rien
|
||||
✅ Infrastructure dépend de Domain
|
||||
✅ Application orchestre Domain et Infrastructure
|
||||
|
||||
### **3. Bounded Contexts**
|
||||
✅ Movies, TV Shows, Subtitles sont des domaines séparés
|
||||
|
||||
### **4. Ubiquitous Language**
|
||||
✅ Vocabulaire métier partagé (Movie, TVShow, Episode, etc.)
|
||||
|
||||
### **5. Entities & Value Objects**
|
||||
✅ Entities : Movie, TVShow, Episode, Subtitle
|
||||
✅ Value Objects : ImdbId, MovieTitle, SeasonNumber, etc.
|
||||
|
||||
### **6. Repositories**
|
||||
✅ Interfaces abstraites dans domain/
|
||||
✅ Implémentations concrètes dans infrastructure/
|
||||
|
||||
### **7. Domain Services**
|
||||
✅ MovieService, TVShowService, SubtitleService
|
||||
|
||||
### **8. Application Services (Use Cases)**
|
||||
✅ SearchMovieUseCase, SearchTorrentsUseCase, etc.
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Commandes de Nettoyage
|
||||
|
||||
### **Script Automatique**
|
||||
```bash
|
||||
chmod +x FINAL_CLEANUP.sh
|
||||
./FINAL_CLEANUP.sh
|
||||
```
|
||||
|
||||
### **Manuel**
|
||||
```bash
|
||||
# Supprimer les dossiers
|
||||
rm -rf agent/api/
|
||||
rm -rf agent/models/
|
||||
|
||||
# Supprimer le fichier
|
||||
rm -f agent/memory.py
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📊 Statistiques
|
||||
|
||||
### **Avant le Nettoyage**
|
||||
- Fichiers dans agent/ : ~15
|
||||
- Couches de compatibilité : 3 (api, models, memory)
|
||||
- Architecture : Hybride
|
||||
|
||||
### **Après le Nettoyage**
|
||||
- Fichiers dans agent/ : ~8
|
||||
- Couches de compatibilité : 0
|
||||
- Architecture : 100% DDD
|
||||
|
||||
---
|
||||
|
||||
## 🎉 Résultat
|
||||
|
||||
### **Architecture Propre** ✅
|
||||
Plus aucune couche de compatibilité
|
||||
|
||||
### **Imports Directs** ✅
|
||||
Tous les imports pointent vers la nouvelle architecture
|
||||
|
||||
### **DDD Pur** ✅
|
||||
Respect strict des principes Domain-Driven Development
|
||||
|
||||
### **Maintenable** ✅
|
||||
Code clair, organisé, facile à comprendre
|
||||
|
||||
### **Évolutif** ✅
|
||||
Facile d'ajouter de nouvelles fonctionnalités
|
||||
|
||||
---
|
||||
|
||||
## 📚 Documentation
|
||||
|
||||
- `DDD_PHASE1_COMPLETE.md` - Phase 1 (Domain + Infrastructure)
|
||||
- `DDD_PHASE2_COMPLETE.md` - Phase 2 (Application + Agent)
|
||||
- `DDD_MIGRATION_COMPLETE.md` - Récapitulatif complet
|
||||
- `ARCHITECTURE_FINALE.md` - Ce fichier (architecture finale)
|
||||
- `DELETED_FILES.md` - Liste des fichiers supprimés
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Prochaines Étapes
|
||||
|
||||
1. **Tester l'application** : `uvicorn app:app --reload`
|
||||
2. **Vérifier que tout fonctionne**
|
||||
3. **Commencer à utiliser la nouvelle architecture**
|
||||
4. **Ajouter de nouveaux use cases si nécessaire**
|
||||
|
||||
---
|
||||
|
||||
## 🏆 Mission Accomplie
|
||||
|
||||
L'architecture est maintenant **100% Domain-Driven Development** !
|
||||
|
||||
✅ Aucune couche de compatibilité
|
||||
✅ Imports directs vers la nouvelle architecture
|
||||
✅ Code propre et maintenable
|
||||
✅ Prêt pour l'avenir
|
||||
|
||||
🎉 **Félicitations !** 🎉
|
||||
@@ -3,9 +3,10 @@ from typing import Any, Dict, List
|
||||
import json
|
||||
|
||||
from .llm import DeepSeekClient
|
||||
from .memory import Memory
|
||||
from infrastructure.persistence.memory import Memory
|
||||
from .registry import make_tools, Tool
|
||||
from .prompts import PromptBuilder
|
||||
from .config import settings
|
||||
|
||||
class Agent:
|
||||
def __init__(self, llm: DeepSeekClient, memory: Memory, max_tool_iterations: int = 5):
|
||||
@@ -69,18 +70,35 @@ class Agent:
|
||||
# Build system prompt using PromptBuilder
|
||||
system_prompt = self.prompt_builder.build_system_prompt(self.memory.data)
|
||||
|
||||
# Initialize conversation with user input
|
||||
# Initialize conversation with system prompt
|
||||
messages: List[Dict[str, Any]] = [
|
||||
{"role": "system", "content": system_prompt},
|
||||
{"role": "user", "content": user_input},
|
||||
]
|
||||
|
||||
# Add conversation history from memory (last N messages for context)
|
||||
# Only add user/assistant messages, NOT system messages
|
||||
history = self.memory.get("history", [])
|
||||
max_history = settings.max_history_messages
|
||||
if history and max_history > 0:
|
||||
# Filter to keep only user and assistant messages
|
||||
filtered_history = [
|
||||
msg for msg in history
|
||||
if msg.get("role") in ("user", "assistant")
|
||||
]
|
||||
recent_history = filtered_history[-max_history:]
|
||||
messages.extend(recent_history)
|
||||
print(f"Added {len(recent_history)} messages from history (filtered)")
|
||||
|
||||
# Add current user input
|
||||
messages.append({"role": "user", "content": user_input})
|
||||
|
||||
# Tool execution loop
|
||||
iteration = 0
|
||||
while iteration < self.max_tool_iterations:
|
||||
print(f"\n--- Iteration {iteration + 1} ---")
|
||||
|
||||
# Get LLM response
|
||||
print(messages)
|
||||
llm_response = self.llm.complete(messages)
|
||||
print("LLM response:", llm_response)
|
||||
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
"""API clients module."""
|
||||
from .themoviedb import (
|
||||
TMDBClient,
|
||||
tmdb_client,
|
||||
TMDBError,
|
||||
TMDBConfigurationError,
|
||||
TMDBAPIError,
|
||||
TMDBNotFoundError,
|
||||
MediaResult
|
||||
)
|
||||
|
||||
from .knaben import (
|
||||
KnabenClient,
|
||||
knaben_client,
|
||||
KnabenError,
|
||||
KnabenConfigurationError,
|
||||
KnabenAPIError,
|
||||
KnabenNotFoundError,
|
||||
TorrentResult
|
||||
)
|
||||
|
||||
from .qbittorrent import (
|
||||
QBittorrentClient,
|
||||
qbittorrent_client,
|
||||
QBittorrentError,
|
||||
QBittorrentConfigurationError,
|
||||
QBittorrentAPIError,
|
||||
QBittorrentAuthError,
|
||||
TorrentInfo
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# TMDB
|
||||
'TMDBClient',
|
||||
'tmdb_client',
|
||||
'TMDBError',
|
||||
'TMDBConfigurationError',
|
||||
'TMDBAPIError',
|
||||
'TMDBNotFoundError',
|
||||
'MediaResult',
|
||||
# Knaben
|
||||
'KnabenClient',
|
||||
'knaben_client',
|
||||
'KnabenError',
|
||||
'KnabenConfigurationError',
|
||||
'KnabenAPIError',
|
||||
'KnabenNotFoundError',
|
||||
'TorrentResult',
|
||||
# qBittorrent
|
||||
'QBittorrentClient',
|
||||
'qbittorrent_client',
|
||||
'QBittorrentError',
|
||||
'QBittorrentConfigurationError',
|
||||
'QBittorrentAPIError',
|
||||
'QBittorrentAuthError',
|
||||
'TorrentInfo'
|
||||
]
|
||||
@@ -34,6 +34,9 @@ class Settings:
|
||||
# Security Configuration
|
||||
max_tool_iterations: int = field(default_factory=lambda: int(os.getenv("MAX_TOOL_ITERATIONS", "5")))
|
||||
request_timeout: int = field(default_factory=lambda: int(os.getenv("REQUEST_TIMEOUT", "30")))
|
||||
|
||||
# Memory Configuration
|
||||
max_history_messages: int = field(default_factory=lambda: int(os.getenv("MAX_HISTORY_MESSAGES", "10")))
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate settings after initialization."""
|
||||
|
||||
@@ -1,2 +1,5 @@
|
||||
"""LLM client module."""
|
||||
from .deepseek import DeepSeekClient
|
||||
from .ollama import OllamaClient
|
||||
|
||||
__all__ = ['DeepSeekClient', 'OllamaClient']
|
||||
|
||||
193
agent/llm/ollama.py
Normal file
193
agent/llm/ollama.py
Normal file
@@ -0,0 +1,193 @@
|
||||
"""Ollama LLM client with robust error handling."""
|
||||
from typing import List, Dict, Any, Optional
|
||||
import logging
|
||||
import os
|
||||
import requests
|
||||
|
||||
from requests.exceptions import RequestException, Timeout, HTTPError
|
||||
|
||||
from ..config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LLMError(Exception):
|
||||
"""Base exception for LLM-related errors."""
|
||||
pass
|
||||
|
||||
|
||||
class LLMConfigurationError(LLMError):
|
||||
"""Raised when LLM is not properly configured."""
|
||||
pass
|
||||
|
||||
|
||||
class LLMAPIError(LLMError):
|
||||
"""Raised when LLM API returns an error."""
|
||||
pass
|
||||
|
||||
|
||||
class OllamaClient:
|
||||
"""
|
||||
Client for interacting with Ollama API.
|
||||
|
||||
Ollama runs locally and provides an OpenAI-compatible API.
|
||||
|
||||
Example:
|
||||
>>> client = OllamaClient(model="llama3.2")
|
||||
>>> messages = [{"role": "user", "content": "Hello!"}]
|
||||
>>> response = client.complete(messages)
|
||||
>>> print(response)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_url: Optional[str] = None,
|
||||
model: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
temperature: Optional[float] = None,
|
||||
):
|
||||
"""
|
||||
Initialize Ollama client.
|
||||
|
||||
Args:
|
||||
base_url: Ollama API base URL (defaults to http://localhost:11434)
|
||||
model: Model name to use (e.g., "llama3.2", "mistral", "codellama")
|
||||
timeout: Request timeout in seconds (defaults to settings)
|
||||
temperature: Temperature for generation (defaults to settings)
|
||||
|
||||
Raises:
|
||||
LLMConfigurationError: If configuration is invalid
|
||||
"""
|
||||
self.base_url = base_url or os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
|
||||
self.model = model or os.getenv("OLLAMA_MODEL", "llama3.2")
|
||||
self.timeout = timeout or settings.request_timeout
|
||||
self.temperature = temperature if temperature is not None else settings.temperature
|
||||
|
||||
if not self.base_url:
|
||||
raise LLMConfigurationError(
|
||||
"Ollama base URL is required. Set OLLAMA_BASE_URL environment variable."
|
||||
)
|
||||
|
||||
if not self.model:
|
||||
raise LLMConfigurationError(
|
||||
"Ollama model is required. Set OLLAMA_MODEL environment variable."
|
||||
)
|
||||
|
||||
logger.info(f"Ollama client initialized with model: {self.model}")
|
||||
|
||||
def complete(self, messages: List[Dict[str, Any]]) -> str:
|
||||
"""
|
||||
Generate a completion from the LLM.
|
||||
|
||||
Args:
|
||||
messages: List of message dicts with 'role' and 'content' keys
|
||||
|
||||
Returns:
|
||||
Generated text response
|
||||
|
||||
Raises:
|
||||
LLMAPIError: If API request fails
|
||||
ValueError: If messages format is invalid
|
||||
"""
|
||||
# Validate messages format
|
||||
if not messages:
|
||||
raise ValueError("Messages list cannot be empty")
|
||||
|
||||
for msg in messages:
|
||||
if not isinstance(msg, dict):
|
||||
raise ValueError(f"Each message must be a dict, got {type(msg)}")
|
||||
if "role" not in msg or "content" not in msg:
|
||||
raise ValueError(f"Each message must have 'role' and 'content' keys, got {msg.keys()}")
|
||||
if msg["role"] not in ("system", "user", "assistant"):
|
||||
raise ValueError(f"Invalid role: {msg['role']}")
|
||||
|
||||
url = f"{self.base_url}/api/chat"
|
||||
payload = {
|
||||
"model": self.model,
|
||||
"messages": messages,
|
||||
"stream": False,
|
||||
"options": {
|
||||
"temperature": self.temperature,
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
logger.debug(f"Sending request to {url} with {len(messages)} messages")
|
||||
response = requests.post(
|
||||
url,
|
||||
json=payload,
|
||||
timeout=self.timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
# Validate response structure
|
||||
if "message" not in data:
|
||||
raise LLMAPIError("Invalid API response: missing 'message'")
|
||||
|
||||
if "content" not in data["message"]:
|
||||
raise LLMAPIError("Invalid API response: missing 'content' in message")
|
||||
|
||||
content = data["message"]["content"]
|
||||
logger.debug(f"Received response with {len(content)} characters")
|
||||
|
||||
return content
|
||||
|
||||
except Timeout as e:
|
||||
logger.error(f"Request timeout after {self.timeout}s: {e}")
|
||||
raise LLMAPIError(f"Request timeout after {self.timeout} seconds") from e
|
||||
|
||||
except HTTPError as e:
|
||||
logger.error(f"HTTP error from Ollama API: {e}")
|
||||
if e.response is not None:
|
||||
try:
|
||||
error_data = e.response.json()
|
||||
error_msg = error_data.get("error", str(e))
|
||||
except Exception:
|
||||
error_msg = str(e)
|
||||
raise LLMAPIError(f"Ollama API error: {error_msg}") from e
|
||||
raise LLMAPIError(f"HTTP error: {e}") from e
|
||||
|
||||
except RequestException as e:
|
||||
logger.error(f"Request failed: {e}")
|
||||
raise LLMAPIError(f"Failed to connect to Ollama API: {e}") from e
|
||||
|
||||
except (KeyError, IndexError, TypeError) as e:
|
||||
logger.error(f"Failed to parse API response: {e}")
|
||||
raise LLMAPIError(f"Invalid API response format: {e}") from e
|
||||
|
||||
def list_models(self) -> List[str]:
|
||||
"""
|
||||
List available models in Ollama.
|
||||
|
||||
Returns:
|
||||
List of model names
|
||||
"""
|
||||
url = f"{self.base_url}/api/tags"
|
||||
|
||||
try:
|
||||
response = requests.get(url, timeout=self.timeout)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
models = [model["name"] for model in data.get("models", [])]
|
||||
logger.info(f"Found {len(models)} models: {models}")
|
||||
return models
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list models: {e}")
|
||||
return []
|
||||
|
||||
def is_available(self) -> bool:
|
||||
"""
|
||||
Check if Ollama is running and accessible.
|
||||
|
||||
Returns:
|
||||
True if Ollama is available, False otherwise
|
||||
"""
|
||||
try:
|
||||
url = f"{self.base_url}/api/tags"
|
||||
response = requests.get(url, timeout=5)
|
||||
return response.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
@@ -1,2 +0,0 @@
|
||||
"""Models module."""
|
||||
from .tv_show import TVShow, ShowStatus, validate_tv_shows_structure
|
||||
@@ -1,58 +0,0 @@
|
||||
"""TV Show models and validation."""
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
|
||||
class ShowStatus(Enum):
|
||||
"""Status of a TV show - whether it's still airing or has ended."""
|
||||
ONGOING = "ongoing"
|
||||
ENDED = "ended"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TVShow:
|
||||
"""Represents a TV show."""
|
||||
imdb_id: str
|
||||
title: str
|
||||
seasons_count: int
|
||||
status: ShowStatus # ongoing or ended
|
||||
|
||||
|
||||
def validate_tv_shows_structure(tv_shows: Any) -> bool:
|
||||
"""
|
||||
Validate the structure of the tv_shows parameter.
|
||||
|
||||
Expected structure: list of TV show objects
|
||||
[
|
||||
{
|
||||
"imdb_id": str,
|
||||
"title": str,
|
||||
"seasons_count": int,
|
||||
"status": str # "ongoing" or "ended"
|
||||
}
|
||||
]
|
||||
"""
|
||||
if not isinstance(tv_shows, list):
|
||||
return False
|
||||
|
||||
for show in tv_shows:
|
||||
if not isinstance(show, dict):
|
||||
return False
|
||||
|
||||
# Check required fields
|
||||
required_fields = {"imdb_id", "title", "seasons_count", "status"}
|
||||
if not all(field in show for field in required_fields):
|
||||
return False
|
||||
|
||||
# Validate field types
|
||||
if not isinstance(show["imdb_id"], str):
|
||||
return False
|
||||
if not isinstance(show["title"], str):
|
||||
return False
|
||||
if not isinstance(show["seasons_count"], int):
|
||||
return False
|
||||
if show["status"] not in ["ongoing", "ended"]:
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -3,7 +3,7 @@ from dataclasses import dataclass
|
||||
from typing import Callable, Any, Dict
|
||||
from functools import partial
|
||||
|
||||
from .memory import Memory
|
||||
from infrastructure.persistence.memory import Memory
|
||||
from .tools.filesystem import set_path_for_folder, list_folder
|
||||
from .tools.api import find_media_imdb_id, find_torrent, add_torrent_to_qbittorrent
|
||||
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
"""Tools module - filesystem and API tools."""
|
||||
from .filesystem import FolderName, set_path_for_folder, list_folder
|
||||
from .api import find_media_imdb_id
|
||||
from .filesystem import set_path_for_folder, list_folder
|
||||
from .api import find_media_imdb_id, find_torrent, add_torrent_to_qbittorrent
|
||||
|
||||
__all__ = [
|
||||
'set_path_for_folder',
|
||||
'list_folder',
|
||||
'find_media_imdb_id',
|
||||
'find_torrent',
|
||||
'add_torrent_to_qbittorrent',
|
||||
]
|
||||
|
||||
@@ -1,224 +1,87 @@
|
||||
"""API tools for interacting with external services."""
|
||||
"""API tools for interacting with external services - Adapted for DDD architecture."""
|
||||
from typing import Dict, Any
|
||||
import logging
|
||||
|
||||
from ..api import tmdb_client, TMDBError, TMDBNotFoundError, TMDBAPIError, TMDBConfigurationError
|
||||
from ..api.knaben import knaben_client, KnabenError, KnabenNotFoundError, KnabenAPIError
|
||||
from ..api.qbittorrent import qbittorrent_client, QBittorrentError, QBittorrentAuthError, QBittorrentAPIError
|
||||
# Import use cases instead of direct API clients
|
||||
from application.movies import SearchMovieUseCase
|
||||
from application.torrents import SearchTorrentsUseCase, AddTorrentUseCase
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
# Import infrastructure clients
|
||||
from infrastructure.api.tmdb import tmdb_client
|
||||
from infrastructure.api.knaben import knaben_client
|
||||
from infrastructure.api.qbittorrent import qbittorrent_client
|
||||
|
||||
|
||||
def find_media_imdb_id(media_title: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Find the IMDb ID for a given media title using TMDB API.
|
||||
|
||||
This is a wrapper around the TMDB client that returns a standardized
|
||||
dict format for compatibility with the agent's tool system.
|
||||
This is a wrapper that uses the SearchMovieUseCase.
|
||||
|
||||
Args:
|
||||
media_title: Title of the media to search for
|
||||
|
||||
Returns:
|
||||
Dict with IMDb ID or error information:
|
||||
- Success: {"status": "ok", "imdb_id": str, "title": str, ...}
|
||||
- Error: {"error": str, "message": str}
|
||||
Dict with IMDb ID or error information
|
||||
|
||||
Example:
|
||||
>>> result = find_media_imdb_id("Inception")
|
||||
>>> print(result)
|
||||
{'status': 'ok', 'imdb_id': 'tt1375666', 'title': 'Inception', ...}
|
||||
"""
|
||||
try:
|
||||
# Use the TMDB client to search for media
|
||||
result = tmdb_client.search_media(media_title)
|
||||
|
||||
# Check if IMDb ID was found
|
||||
if result.imdb_id:
|
||||
logger.info(f"IMDb ID found for '{media_title}': {result.imdb_id}")
|
||||
return {
|
||||
"status": "ok",
|
||||
"imdb_id": result.imdb_id,
|
||||
"title": result.title,
|
||||
"media_type": result.media_type,
|
||||
"tmdb_id": result.tmdb_id,
|
||||
"overview": result.overview,
|
||||
"release_date": result.release_date,
|
||||
"vote_average": result.vote_average
|
||||
}
|
||||
else:
|
||||
logger.warning(f"No IMDb ID available for '{media_title}'")
|
||||
return {
|
||||
"error": "no_imdb_id",
|
||||
"message": f"No IMDb ID available for '{result.title}'",
|
||||
"title": result.title,
|
||||
"media_type": result.media_type,
|
||||
"tmdb_id": result.tmdb_id
|
||||
}
|
||||
|
||||
except TMDBNotFoundError as e:
|
||||
logger.info(f"Media not found: {e}")
|
||||
return {
|
||||
"error": "not_found",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
except TMDBConfigurationError as e:
|
||||
logger.error(f"TMDB configuration error: {e}")
|
||||
return {
|
||||
"error": "configuration_error",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
except TMDBAPIError as e:
|
||||
logger.error(f"TMDB API error: {e}")
|
||||
return {
|
||||
"error": "api_error",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return {
|
||||
"error": "validation_failed",
|
||||
"message": str(e)
|
||||
}
|
||||
# Create use case with TMDB client
|
||||
use_case = SearchMovieUseCase(tmdb_client)
|
||||
|
||||
# Execute use case
|
||||
response = use_case.execute(media_title)
|
||||
|
||||
# Return as dict
|
||||
return response.to_dict()
|
||||
|
||||
|
||||
def find_torrent(media_title: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Find torrents for a given media title using Knaben API.
|
||||
|
||||
This is a wrapper around the Knaben client that returns a standardized
|
||||
dict format for compatibility with the agent's tool system.
|
||||
This is a wrapper that uses the SearchTorrentsUseCase.
|
||||
|
||||
Args:
|
||||
media_title: Title of the media to search for
|
||||
|
||||
Returns:
|
||||
Dict with torrent information or error details:
|
||||
- Success: {"status": "ok", "torrents": List[Dict[str, Any]]}
|
||||
- Error: {"error": str, "message": str}
|
||||
Dict with torrent information or error details
|
||||
"""
|
||||
try:
|
||||
# Search for torrents
|
||||
results = knaben_client.search(media_title, limit=10)
|
||||
|
||||
if not results:
|
||||
logger.info(f"No torrents found for '{media_title}'")
|
||||
return {
|
||||
"error": "not_found",
|
||||
"message": f"No torrents found for '{media_title}'"
|
||||
}
|
||||
|
||||
# Convert to dict format
|
||||
torrents = []
|
||||
for torrent in results:
|
||||
torrents.append({
|
||||
"name": torrent.title,
|
||||
"size": torrent.size,
|
||||
"seeders": torrent.seeders,
|
||||
"leechers": torrent.leechers,
|
||||
"magnet": torrent.magnet,
|
||||
"info_hash": torrent.info_hash,
|
||||
"tracker": torrent.tracker,
|
||||
"upload_date": torrent.upload_date,
|
||||
"category": torrent.category
|
||||
})
|
||||
|
||||
logger.info(f"Found {len(torrents)} torrents for '{media_title}'")
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"torrents": torrents,
|
||||
"count": len(torrents)
|
||||
}
|
||||
|
||||
except KnabenNotFoundError as e:
|
||||
logger.info(f"Torrents not found: {e}")
|
||||
return {
|
||||
"error": "not_found",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
except KnabenAPIError as e:
|
||||
logger.error(f"Knaben API error: {e}")
|
||||
return {
|
||||
"error": "api_error",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return {
|
||||
"error": "validation_failed",
|
||||
"message": str(e)
|
||||
}
|
||||
# Create use case with Knaben client
|
||||
use_case = SearchTorrentsUseCase(knaben_client)
|
||||
|
||||
# Execute use case
|
||||
response = use_case.execute(media_title, limit=10)
|
||||
|
||||
# Return as dict
|
||||
return response.to_dict()
|
||||
|
||||
|
||||
def add_torrent_to_qbittorrent(magnet_link: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Add a torrent to qBittorrent using a magnet link.
|
||||
|
||||
This is a wrapper around the qBittorrent client that returns a standardized
|
||||
dict format for compatibility with the agent's tool system.
|
||||
This is a wrapper that uses the AddTorrentUseCase.
|
||||
|
||||
Args:
|
||||
magnet_link: Magnet link of the torrent to add
|
||||
|
||||
Returns:
|
||||
Dict with success or error information:
|
||||
- Success: {"status": "ok", "message": str}
|
||||
- Error: {"error": str, "message": str}
|
||||
Dict with success or error information
|
||||
|
||||
Example:
|
||||
>>> result = add_torrent_to_qbittorrent("magnet:?xt=urn:btih:...")
|
||||
>>> print(result)
|
||||
{'status': 'ok', 'message': 'Torrent added successfully'}
|
||||
"""
|
||||
try:
|
||||
# Validate magnet link
|
||||
if not magnet_link or not isinstance(magnet_link, str):
|
||||
raise ValueError("Magnet link must be a non-empty string")
|
||||
|
||||
if not magnet_link.startswith("magnet:"):
|
||||
raise ValueError("Invalid magnet link format")
|
||||
|
||||
logger.info("Adding torrent to qBittorrent")
|
||||
|
||||
# Add torrent to qBittorrent
|
||||
success = qbittorrent_client.add_torrent(magnet_link)
|
||||
|
||||
if success:
|
||||
logger.info("Torrent added successfully to qBittorrent")
|
||||
return {
|
||||
"status": "ok",
|
||||
"message": "Torrent added successfully to qBittorrent"
|
||||
}
|
||||
else:
|
||||
logger.warning("Failed to add torrent to qBittorrent")
|
||||
return {
|
||||
"error": "add_failed",
|
||||
"message": "Failed to add torrent to qBittorrent"
|
||||
}
|
||||
|
||||
except QBittorrentAuthError as e:
|
||||
logger.error(f"qBittorrent authentication error: {e}")
|
||||
return {
|
||||
"error": "authentication_failed",
|
||||
"message": "Failed to authenticate with qBittorrent"
|
||||
}
|
||||
|
||||
except QBittorrentAPIError as e:
|
||||
logger.error(f"qBittorrent API error: {e}")
|
||||
return {
|
||||
"error": "api_error",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return {
|
||||
"error": "validation_failed",
|
||||
"message": str(e)
|
||||
}
|
||||
# Create use case with qBittorrent client
|
||||
use_case = AddTorrentUseCase(qbittorrent_client)
|
||||
|
||||
# Execute use case
|
||||
response = use_case.execute(magnet_link)
|
||||
|
||||
# Return as dict
|
||||
return response.to_dict()
|
||||
|
||||
@@ -1,111 +1,17 @@
|
||||
"""Filesystem tools for managing folders and files with security."""
|
||||
"""Filesystem tools - Adapted for DDD architecture."""
|
||||
from typing import Dict, Any
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ..memory import Memory
|
||||
# Import use cases
|
||||
from application.filesystem import SetFolderPathUseCase, ListFolderUseCase
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FolderName(Enum):
|
||||
"""Types of folders that can be managed."""
|
||||
DOWNLOAD = "download"
|
||||
TVSHOW = "tvshow"
|
||||
MOVIE = "movie"
|
||||
TORRENT = "torrent"
|
||||
|
||||
|
||||
class FilesystemError(Exception):
|
||||
"""Base exception for filesystem operations."""
|
||||
pass
|
||||
|
||||
|
||||
class PathTraversalError(FilesystemError):
|
||||
"""Raised when path traversal attack is detected."""
|
||||
pass
|
||||
|
||||
|
||||
def _validate_folder_name(folder_name: str) -> bool:
|
||||
"""
|
||||
Validate folder name against allowed values.
|
||||
|
||||
Args:
|
||||
folder_name: Name to validate
|
||||
|
||||
Returns:
|
||||
True if valid
|
||||
|
||||
Raises:
|
||||
ValueError: If folder name is invalid
|
||||
"""
|
||||
valid_names = [fn.value for fn in FolderName]
|
||||
if folder_name not in valid_names:
|
||||
raise ValueError(
|
||||
f"Invalid folder_name '{folder_name}'. Must be one of: {', '.join(valid_names)}"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def _sanitize_path(path: str) -> str:
|
||||
"""
|
||||
Sanitize path to prevent path traversal attacks.
|
||||
|
||||
Args:
|
||||
path: Path to sanitize
|
||||
|
||||
Returns:
|
||||
Sanitized path
|
||||
|
||||
Raises:
|
||||
PathTraversalError: If path contains dangerous patterns
|
||||
"""
|
||||
# Normalize path
|
||||
normalized = os.path.normpath(path)
|
||||
|
||||
# Check for absolute paths
|
||||
if os.path.isabs(normalized):
|
||||
raise PathTraversalError("Absolute paths are not allowed")
|
||||
|
||||
# Check for parent directory references
|
||||
if normalized.startswith("..") or "/.." in normalized or "\\.." in normalized:
|
||||
raise PathTraversalError("Parent directory references are not allowed")
|
||||
|
||||
# Check for null bytes
|
||||
if "\x00" in normalized:
|
||||
raise PathTraversalError("Null bytes in path are not allowed")
|
||||
|
||||
return normalized
|
||||
|
||||
|
||||
def _is_safe_path(base_path: Path, target_path: Path) -> bool:
|
||||
"""
|
||||
Check if target path is within base path (prevents path traversal).
|
||||
|
||||
Args:
|
||||
base_path: Base directory path
|
||||
target_path: Target path to check
|
||||
|
||||
Returns:
|
||||
True if safe, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Resolve both paths to absolute paths
|
||||
base_resolved = base_path.resolve()
|
||||
target_resolved = target_path.resolve()
|
||||
|
||||
# Check if target is relative to base
|
||||
target_resolved.relative_to(base_resolved)
|
||||
return True
|
||||
except (ValueError, OSError):
|
||||
return False
|
||||
# Import infrastructure
|
||||
from infrastructure.filesystem import FileManager
|
||||
from infrastructure.persistence.memory import Memory
|
||||
|
||||
|
||||
def set_path_for_folder(memory: Memory, folder_name: str, path_value: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Set a path in the config with validation.
|
||||
Set a path in the configuration.
|
||||
|
||||
Args:
|
||||
memory: Memory instance to store the configuration
|
||||
@@ -115,60 +21,22 @@ def set_path_for_folder(memory: Memory, folder_name: str, path_value: str) -> Di
|
||||
Returns:
|
||||
Dict with status or error information
|
||||
"""
|
||||
try:
|
||||
# Validate folder name
|
||||
_validate_folder_name(folder_name)
|
||||
|
||||
# Convert to Path object for better handling
|
||||
path_obj = Path(path_value).resolve()
|
||||
|
||||
# Validate path exists and is a directory
|
||||
if not path_obj.exists():
|
||||
logger.warning(f"Path does not exist: {path_value}")
|
||||
return {
|
||||
"error": "invalid_path",
|
||||
"message": f"Path does not exist: {path_value}"
|
||||
}
|
||||
|
||||
if not path_obj.is_dir():
|
||||
logger.warning(f"Path is not a directory: {path_value}")
|
||||
return {
|
||||
"error": "invalid_path",
|
||||
"message": f"Path is not a directory: {path_value}"
|
||||
}
|
||||
|
||||
# Check if path is readable
|
||||
if not os.access(path_obj, os.R_OK):
|
||||
logger.warning(f"Path is not readable: {path_value}")
|
||||
return {
|
||||
"error": "permission_denied",
|
||||
"message": f"Path is not readable: {path_value}"
|
||||
}
|
||||
|
||||
# Store in memory
|
||||
config = memory.get("config", {})
|
||||
config[f"{folder_name}_folder"] = str(path_obj)
|
||||
memory.set("config", config)
|
||||
|
||||
logger.info(f"Set {folder_name}_folder to: {path_obj}")
|
||||
return {
|
||||
"status": "ok",
|
||||
"folder_name": folder_name,
|
||||
"path": str(path_obj)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return {"error": "validation_failed", "message": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error setting path: {e}", exc_info=True)
|
||||
return {"error": "internal_error", "message": "Failed to set path"}
|
||||
# Create file manager
|
||||
file_manager = FileManager(memory)
|
||||
|
||||
# Create use case
|
||||
use_case = SetFolderPathUseCase(file_manager)
|
||||
|
||||
# Execute use case
|
||||
response = use_case.execute(folder_name, path_value)
|
||||
|
||||
# Return as dict
|
||||
return response.to_dict()
|
||||
|
||||
|
||||
def list_folder(memory: Memory, folder_type: str, path: str = ".") -> Dict[str, Any]:
|
||||
"""
|
||||
List contents of a folder with security checks.
|
||||
List contents of a folder.
|
||||
|
||||
Args:
|
||||
memory: Memory instance to retrieve the configuration
|
||||
@@ -178,271 +46,14 @@ def list_folder(memory: Memory, folder_type: str, path: str = ".") -> Dict[str,
|
||||
Returns:
|
||||
Dict with folder contents or error information
|
||||
"""
|
||||
try:
|
||||
# Validate folder type
|
||||
_validate_folder_name(folder_type)
|
||||
|
||||
# Sanitize the path
|
||||
safe_path = _sanitize_path(path)
|
||||
|
||||
# Get root folder from config
|
||||
folder_key = f"{folder_type}_folder"
|
||||
config = memory.get("config", {})
|
||||
|
||||
if folder_key not in config or not config[folder_key]:
|
||||
logger.warning(f"Folder not configured: {folder_type}")
|
||||
return {
|
||||
"error": "folder_not_set",
|
||||
"message": f"{folder_type.capitalize()} folder not set in config."
|
||||
}
|
||||
|
||||
root = Path(config[folder_key])
|
||||
target = root / safe_path
|
||||
|
||||
# Security check: ensure target is within root
|
||||
if not _is_safe_path(root, target):
|
||||
logger.warning(f"Path traversal attempt detected: {path}")
|
||||
return {
|
||||
"error": "forbidden",
|
||||
"message": "Access denied: path outside allowed directory"
|
||||
}
|
||||
|
||||
# Check if target exists
|
||||
if not target.exists():
|
||||
logger.warning(f"Path does not exist: {target}")
|
||||
return {
|
||||
"error": "not_found",
|
||||
"message": f"Path does not exist: {safe_path}"
|
||||
}
|
||||
|
||||
# Check if target is a directory
|
||||
if not target.is_dir():
|
||||
logger.warning(f"Path is not a directory: {target}")
|
||||
return {
|
||||
"error": "not_a_directory",
|
||||
"message": f"Path is not a directory: {safe_path}"
|
||||
}
|
||||
|
||||
# List directory contents
|
||||
try:
|
||||
entries = [entry.name for entry in target.iterdir()]
|
||||
logger.debug(f"Listed {len(entries)} entries in {target}")
|
||||
return {
|
||||
"status": "ok",
|
||||
"folder_type": folder_type,
|
||||
"path": safe_path,
|
||||
"entries": sorted(entries),
|
||||
"count": len(entries)
|
||||
}
|
||||
except PermissionError:
|
||||
logger.warning(f"Permission denied accessing: {target}")
|
||||
return {
|
||||
"error": "permission_denied",
|
||||
"message": f"Permission denied accessing: {safe_path}"
|
||||
}
|
||||
|
||||
except PathTraversalError as e:
|
||||
logger.warning(f"Path traversal attempt: {e}")
|
||||
return {
|
||||
"error": "forbidden",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return {"error": "validation_failed", "message": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error listing folder: {e}", exc_info=True)
|
||||
return {"error": "internal_error", "message": "Failed to list folder"}
|
||||
|
||||
def move_file(path: str, destination: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Move a file from one location to another with safety checks.
|
||||
|
||||
This function is designed to safely move files from downloads to movies/series
|
||||
folders with comprehensive validation and error handling to prevent data loss.
|
||||
|
||||
Args:
|
||||
path: Source file path (absolute or relative)
|
||||
destination: Destination file path (absolute or relative)
|
||||
|
||||
Returns:
|
||||
Dict with status or error information:
|
||||
- Success: {"status": "ok", "source": str, "destination": str, "size": int}
|
||||
- Error: {"error": str, "message": str}
|
||||
|
||||
Safety features:
|
||||
- Validates source file exists and is readable
|
||||
- Validates destination directory exists and is writable
|
||||
- Prevents overwriting existing files
|
||||
- Verifies file integrity after move (size check)
|
||||
- Atomic operation using shutil.move
|
||||
- Comprehensive logging
|
||||
|
||||
Example:
|
||||
>>> result = move_file(
|
||||
... "/downloads/movie.mkv",
|
||||
... "/movies/Inception (2010)/movie.mkv"
|
||||
... )
|
||||
>>> print(result)
|
||||
{'status': 'ok', 'source': '...', 'destination': '...', 'size': 1234567890}
|
||||
"""
|
||||
import shutil
|
||||
|
||||
try:
|
||||
# Convert to Path objects
|
||||
source_path = Path(path).resolve()
|
||||
dest_path = Path(destination).resolve()
|
||||
|
||||
logger.info(f"Moving file from {source_path} to {dest_path}")
|
||||
|
||||
# === VALIDATION: Source file ===
|
||||
|
||||
# Check source exists
|
||||
if not source_path.exists():
|
||||
logger.error(f"Source file does not exist: {source_path}")
|
||||
return {
|
||||
"error": "source_not_found",
|
||||
"message": f"Source file does not exist: {path}"
|
||||
}
|
||||
|
||||
# Check source is a file (not a directory)
|
||||
if not source_path.is_file():
|
||||
logger.error(f"Source is not a file: {source_path}")
|
||||
return {
|
||||
"error": "source_not_file",
|
||||
"message": f"Source is not a file: {path}"
|
||||
}
|
||||
|
||||
# Check source is readable
|
||||
if not os.access(source_path, os.R_OK):
|
||||
logger.error(f"Source file is not readable: {source_path}")
|
||||
return {
|
||||
"error": "permission_denied",
|
||||
"message": f"Source file is not readable: {path}"
|
||||
}
|
||||
|
||||
# Get source file size for verification
|
||||
source_size = source_path.stat().st_size
|
||||
logger.debug(f"Source file size: {source_size} bytes")
|
||||
|
||||
# === VALIDATION: Destination ===
|
||||
|
||||
# Check destination parent directory exists
|
||||
dest_parent = dest_path.parent
|
||||
if not dest_parent.exists():
|
||||
logger.error(f"Destination directory does not exist: {dest_parent}")
|
||||
return {
|
||||
"error": "destination_dir_not_found",
|
||||
"message": f"Destination directory does not exist: {dest_parent}"
|
||||
}
|
||||
|
||||
# Check destination parent is a directory
|
||||
if not dest_parent.is_dir():
|
||||
logger.error(f"Destination parent is not a directory: {dest_parent}")
|
||||
return {
|
||||
"error": "destination_not_dir",
|
||||
"message": f"Destination parent is not a directory: {dest_parent}"
|
||||
}
|
||||
|
||||
# Check destination parent is writable
|
||||
if not os.access(dest_parent, os.W_OK):
|
||||
logger.error(f"Destination directory is not writable: {dest_parent}")
|
||||
return {
|
||||
"error": "permission_denied",
|
||||
"message": f"Destination directory is not writable: {dest_parent}"
|
||||
}
|
||||
|
||||
# Check destination file doesn't already exist
|
||||
if dest_path.exists():
|
||||
logger.warning(f"Destination file already exists: {dest_path}")
|
||||
return {
|
||||
"error": "destination_exists",
|
||||
"message": f"Destination file already exists: {destination}"
|
||||
}
|
||||
|
||||
# === SAFETY CHECK: Prevent moving to same location ===
|
||||
|
||||
if source_path == dest_path:
|
||||
logger.warning("Source and destination are the same")
|
||||
return {
|
||||
"error": "same_location",
|
||||
"message": "Source and destination are the same"
|
||||
}
|
||||
|
||||
# === PERFORM MOVE ===
|
||||
|
||||
logger.info(f"Moving file: {source_path.name} ({source_size} bytes)")
|
||||
|
||||
try:
|
||||
# Use shutil.move for atomic operation
|
||||
# This handles cross-filesystem moves automatically
|
||||
shutil.move(str(source_path), str(dest_path))
|
||||
logger.info(f"File moved successfully to {dest_path}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to move file: {e}", exc_info=True)
|
||||
return {
|
||||
"error": "move_failed",
|
||||
"message": f"Failed to move file: {str(e)}"
|
||||
}
|
||||
|
||||
# === VERIFICATION: Ensure file was moved correctly ===
|
||||
|
||||
# Check destination file exists
|
||||
if not dest_path.exists():
|
||||
logger.error("Destination file does not exist after move!")
|
||||
# Try to recover by checking if source still exists
|
||||
if source_path.exists():
|
||||
logger.info("Source file still exists, move may have failed")
|
||||
return {
|
||||
"error": "move_verification_failed",
|
||||
"message": "File was not moved successfully (destination not found)"
|
||||
}
|
||||
else:
|
||||
logger.critical("Both source and destination missing after move!")
|
||||
return {
|
||||
"error": "file_lost",
|
||||
"message": "CRITICAL: File missing after move operation"
|
||||
}
|
||||
|
||||
# Check destination file size matches source
|
||||
dest_size = dest_path.stat().st_size
|
||||
if dest_size != source_size:
|
||||
logger.error(f"File size mismatch! Source: {source_size}, Dest: {dest_size}")
|
||||
return {
|
||||
"error": "size_mismatch",
|
||||
"message": f"File size mismatch after move (expected {source_size}, got {dest_size})"
|
||||
}
|
||||
|
||||
# Check source file no longer exists
|
||||
if source_path.exists():
|
||||
logger.warning("Source file still exists after move (copy instead of move?)")
|
||||
# This is not necessarily an error (shutil.move copies across filesystems)
|
||||
# but we should log it
|
||||
|
||||
# === SUCCESS ===
|
||||
|
||||
logger.info(f"File successfully moved and verified: {dest_path.name}")
|
||||
return {
|
||||
"status": "ok",
|
||||
"source": str(source_path),
|
||||
"destination": str(dest_path),
|
||||
"filename": dest_path.name,
|
||||
"size": dest_size
|
||||
}
|
||||
|
||||
except PermissionError as e:
|
||||
logger.error(f"Permission denied: {e}")
|
||||
return {
|
||||
"error": "permission_denied",
|
||||
"message": f"Permission denied: {str(e)}"
|
||||
}
|
||||
|
||||
except OSError as e:
|
||||
logger.error(f"OS error during move: {e}", exc_info=True)
|
||||
return {
|
||||
"error": "os_error",
|
||||
"message": f"OS error: {str(e)}"
|
||||
}
|
||||
# Create file manager
|
||||
file_manager = FileManager(memory)
|
||||
|
||||
# Create use case
|
||||
use_case = ListFolderUseCase(file_manager)
|
||||
|
||||
# Execute use case
|
||||
response = use_case.execute(folder_type, path)
|
||||
|
||||
# Return as dict
|
||||
return response.to_dict()
|
||||
|
||||
15
app.py
15
app.py
@@ -8,15 +8,26 @@ from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse, StreamingResponse
|
||||
|
||||
from agent.llm.deepseek import DeepSeekClient
|
||||
from agent.memory import Memory
|
||||
from agent.llm.ollama import OllamaClient
|
||||
from infrastructure.persistence.memory import Memory
|
||||
from agent.agent import Agent
|
||||
import os
|
||||
|
||||
app = FastAPI(
|
||||
title="LibreChat Agent Backend",
|
||||
version="0.1.0",
|
||||
)
|
||||
|
||||
llm = DeepSeekClient()
|
||||
# Choose LLM based on environment variable
|
||||
llm_provider = os.getenv("LLM_PROVIDER", "deepseek").lower()
|
||||
|
||||
if llm_provider == "ollama":
|
||||
print("🦙 Using Ollama LLM")
|
||||
llm = OllamaClient()
|
||||
else:
|
||||
print("🤖 Using DeepSeek LLM")
|
||||
llm = DeepSeekClient()
|
||||
|
||||
memory = Memory()
|
||||
agent = Agent(llm=llm, memory=memory)
|
||||
|
||||
|
||||
1
application/__init__.py
Normal file
1
application/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Application layer - Use cases and application services."""
|
||||
11
application/filesystem/__init__.py
Normal file
11
application/filesystem/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Filesystem use cases."""
|
||||
from .set_folder_path import SetFolderPathUseCase
|
||||
from .list_folder import ListFolderUseCase
|
||||
from .dto import SetFolderPathResponse, ListFolderResponse
|
||||
|
||||
__all__ = [
|
||||
"SetFolderPathUseCase",
|
||||
"ListFolderUseCase",
|
||||
"SetFolderPathResponse",
|
||||
"ListFolderResponse",
|
||||
]
|
||||
59
application/filesystem/dto.py
Normal file
59
application/filesystem/dto.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Filesystem application DTOs."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
@dataclass
|
||||
class SetFolderPathResponse:
|
||||
"""Response from setting a folder path."""
|
||||
status: str
|
||||
folder_name: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
error: Optional[str] = None
|
||||
message: Optional[str] = None
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dict for agent compatibility."""
|
||||
result = {"status": self.status}
|
||||
|
||||
if self.error:
|
||||
result["error"] = self.error
|
||||
result["message"] = self.message
|
||||
else:
|
||||
if self.folder_name:
|
||||
result["folder_name"] = self.folder_name
|
||||
if self.path:
|
||||
result["path"] = self.path
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class ListFolderResponse:
|
||||
"""Response from listing a folder."""
|
||||
status: str
|
||||
folder_type: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
entries: Optional[List[str]] = None
|
||||
count: Optional[int] = None
|
||||
error: Optional[str] = None
|
||||
message: Optional[str] = None
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dict for agent compatibility."""
|
||||
result = {"status": self.status}
|
||||
|
||||
if self.error:
|
||||
result["error"] = self.error
|
||||
result["message"] = self.message
|
||||
else:
|
||||
if self.folder_type:
|
||||
result["folder_type"] = self.folder_type
|
||||
if self.path:
|
||||
result["path"] = self.path
|
||||
if self.entries is not None:
|
||||
result["entries"] = self.entries
|
||||
if self.count is not None:
|
||||
result["count"] = self.count
|
||||
|
||||
return result
|
||||
52
application/filesystem/list_folder.py
Normal file
52
application/filesystem/list_folder.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""List folder use case."""
|
||||
import logging
|
||||
|
||||
from infrastructure.filesystem import FileManager
|
||||
from .dto import ListFolderResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ListFolderUseCase:
|
||||
"""
|
||||
Use case for listing folder contents.
|
||||
|
||||
This orchestrates the FileManager to list folders.
|
||||
"""
|
||||
|
||||
def __init__(self, file_manager: FileManager):
|
||||
"""
|
||||
Initialize use case.
|
||||
|
||||
Args:
|
||||
file_manager: FileManager instance
|
||||
"""
|
||||
self.file_manager = file_manager
|
||||
|
||||
def execute(self, folder_type: str, path: str = ".") -> ListFolderResponse:
|
||||
"""
|
||||
List contents of a folder.
|
||||
|
||||
Args:
|
||||
folder_type: Type of folder to list (download, tvshow, movie, torrent)
|
||||
path: Relative path within the folder (default: ".")
|
||||
|
||||
Returns:
|
||||
ListFolderResponse with folder contents or error information
|
||||
"""
|
||||
result = self.file_manager.list_folder(folder_type, path)
|
||||
|
||||
if result.get("status") == "ok":
|
||||
return ListFolderResponse(
|
||||
status="ok",
|
||||
folder_type=result.get("folder_type"),
|
||||
path=result.get("path"),
|
||||
entries=result.get("entries"),
|
||||
count=result.get("count")
|
||||
)
|
||||
else:
|
||||
return ListFolderResponse(
|
||||
status="error",
|
||||
error=result.get("error"),
|
||||
message=result.get("message")
|
||||
)
|
||||
50
application/filesystem/set_folder_path.py
Normal file
50
application/filesystem/set_folder_path.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Set folder path use case."""
|
||||
import logging
|
||||
|
||||
from infrastructure.filesystem import FileManager
|
||||
from .dto import SetFolderPathResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SetFolderPathUseCase:
|
||||
"""
|
||||
Use case for setting a folder path in configuration.
|
||||
|
||||
This orchestrates the FileManager to set folder paths.
|
||||
"""
|
||||
|
||||
def __init__(self, file_manager: FileManager):
|
||||
"""
|
||||
Initialize use case.
|
||||
|
||||
Args:
|
||||
file_manager: FileManager instance
|
||||
"""
|
||||
self.file_manager = file_manager
|
||||
|
||||
def execute(self, folder_name: str, path_value: str) -> SetFolderPathResponse:
|
||||
"""
|
||||
Set a folder path in configuration.
|
||||
|
||||
Args:
|
||||
folder_name: Name of folder to set (download, tvshow, movie, torrent)
|
||||
path_value: Absolute path to the folder
|
||||
|
||||
Returns:
|
||||
SetFolderPathResponse with success or error information
|
||||
"""
|
||||
result = self.file_manager.set_folder_path(folder_name, path_value)
|
||||
|
||||
if result.get("status") == "ok":
|
||||
return SetFolderPathResponse(
|
||||
status="ok",
|
||||
folder_name=result.get("folder_name"),
|
||||
path=result.get("path")
|
||||
)
|
||||
else:
|
||||
return SetFolderPathResponse(
|
||||
status="error",
|
||||
error=result.get("error"),
|
||||
message=result.get("message")
|
||||
)
|
||||
8
application/movies/__init__.py
Normal file
8
application/movies/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Movie use cases."""
|
||||
from .search_movie import SearchMovieUseCase
|
||||
from .dto import SearchMovieResponse
|
||||
|
||||
__all__ = [
|
||||
"SearchMovieUseCase",
|
||||
"SearchMovieResponse",
|
||||
]
|
||||
43
application/movies/dto.py
Normal file
43
application/movies/dto.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Movie application DTOs."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class SearchMovieResponse:
|
||||
"""Response from searching for a movie."""
|
||||
status: str
|
||||
imdb_id: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
media_type: Optional[str] = None
|
||||
tmdb_id: Optional[int] = None
|
||||
overview: Optional[str] = None
|
||||
release_date: Optional[str] = None
|
||||
vote_average: Optional[float] = None
|
||||
error: Optional[str] = None
|
||||
message: Optional[str] = None
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dict for agent compatibility."""
|
||||
result = {"status": self.status}
|
||||
|
||||
if self.error:
|
||||
result["error"] = self.error
|
||||
result["message"] = self.message
|
||||
else:
|
||||
if self.imdb_id:
|
||||
result["imdb_id"] = self.imdb_id
|
||||
if self.title:
|
||||
result["title"] = self.title
|
||||
if self.media_type:
|
||||
result["media_type"] = self.media_type
|
||||
if self.tmdb_id:
|
||||
result["tmdb_id"] = self.tmdb_id
|
||||
if self.overview:
|
||||
result["overview"] = self.overview
|
||||
if self.release_date:
|
||||
result["release_date"] = self.release_date
|
||||
if self.vote_average:
|
||||
result["vote_average"] = self.vote_average
|
||||
|
||||
return result
|
||||
95
application/movies/search_movie.py
Normal file
95
application/movies/search_movie.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Search movie use case."""
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from infrastructure.api.tmdb import TMDBClient, TMDBNotFoundError, TMDBAPIError, TMDBConfigurationError
|
||||
from .dto import SearchMovieResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchMovieUseCase:
|
||||
"""
|
||||
Use case for searching a movie and retrieving its IMDb ID.
|
||||
|
||||
This orchestrates the TMDB API client to find movie information.
|
||||
"""
|
||||
|
||||
def __init__(self, tmdb_client: TMDBClient):
|
||||
"""
|
||||
Initialize use case.
|
||||
|
||||
Args:
|
||||
tmdb_client: TMDB API client
|
||||
"""
|
||||
self.tmdb_client = tmdb_client
|
||||
|
||||
def execute(self, media_title: str) -> SearchMovieResponse:
|
||||
"""
|
||||
Search for a movie by title.
|
||||
|
||||
Args:
|
||||
media_title: Title of the movie to search for
|
||||
|
||||
Returns:
|
||||
SearchMovieResponse with movie information or error
|
||||
"""
|
||||
try:
|
||||
# Use the TMDB client to search for media
|
||||
result = self.tmdb_client.search_media(media_title)
|
||||
|
||||
# Check if IMDb ID was found
|
||||
if result.imdb_id:
|
||||
logger.info(f"IMDb ID found for '{media_title}': {result.imdb_id}")
|
||||
return SearchMovieResponse(
|
||||
status="ok",
|
||||
imdb_id=result.imdb_id,
|
||||
title=result.title,
|
||||
media_type=result.media_type,
|
||||
tmdb_id=result.tmdb_id,
|
||||
overview=result.overview,
|
||||
release_date=result.release_date,
|
||||
vote_average=result.vote_average
|
||||
)
|
||||
else:
|
||||
logger.warning(f"No IMDb ID available for '{media_title}'")
|
||||
return SearchMovieResponse(
|
||||
status="ok",
|
||||
title=result.title,
|
||||
media_type=result.media_type,
|
||||
tmdb_id=result.tmdb_id,
|
||||
error="no_imdb_id",
|
||||
message=f"No IMDb ID available for '{result.title}'"
|
||||
)
|
||||
|
||||
except TMDBNotFoundError as e:
|
||||
logger.info(f"Media not found: {e}")
|
||||
return SearchMovieResponse(
|
||||
status="error",
|
||||
error="not_found",
|
||||
message=str(e)
|
||||
)
|
||||
|
||||
except TMDBConfigurationError as e:
|
||||
logger.error(f"TMDB configuration error: {e}")
|
||||
return SearchMovieResponse(
|
||||
status="error",
|
||||
error="configuration_error",
|
||||
message=str(e)
|
||||
)
|
||||
|
||||
except TMDBAPIError as e:
|
||||
logger.error(f"TMDB API error: {e}")
|
||||
return SearchMovieResponse(
|
||||
status="error",
|
||||
error="api_error",
|
||||
message=str(e)
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return SearchMovieResponse(
|
||||
status="error",
|
||||
error="validation_failed",
|
||||
message=str(e)
|
||||
)
|
||||
11
application/torrents/__init__.py
Normal file
11
application/torrents/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Torrent use cases."""
|
||||
from .search_torrents import SearchTorrentsUseCase
|
||||
from .add_torrent import AddTorrentUseCase
|
||||
from .dto import SearchTorrentsResponse, AddTorrentResponse
|
||||
|
||||
__all__ = [
|
||||
"SearchTorrentsUseCase",
|
||||
"AddTorrentUseCase",
|
||||
"SearchTorrentsResponse",
|
||||
"AddTorrentResponse",
|
||||
]
|
||||
85
application/torrents/add_torrent.py
Normal file
85
application/torrents/add_torrent.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Add torrent use case."""
|
||||
import logging
|
||||
|
||||
from infrastructure.api.qbittorrent import QBittorrentClient, QBittorrentAuthError, QBittorrentAPIError
|
||||
from .dto import AddTorrentResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AddTorrentUseCase:
|
||||
"""
|
||||
Use case for adding a torrent to qBittorrent.
|
||||
|
||||
This orchestrates the qBittorrent API client to add torrents.
|
||||
"""
|
||||
|
||||
def __init__(self, qbittorrent_client: QBittorrentClient):
|
||||
"""
|
||||
Initialize use case.
|
||||
|
||||
Args:
|
||||
qbittorrent_client: qBittorrent API client
|
||||
"""
|
||||
self.qbittorrent_client = qbittorrent_client
|
||||
|
||||
def execute(self, magnet_link: str) -> AddTorrentResponse:
|
||||
"""
|
||||
Add a torrent to qBittorrent using a magnet link.
|
||||
|
||||
Args:
|
||||
magnet_link: Magnet link of the torrent to add
|
||||
|
||||
Returns:
|
||||
AddTorrentResponse with success or error information
|
||||
"""
|
||||
try:
|
||||
# Validate magnet link
|
||||
if not magnet_link or not isinstance(magnet_link, str):
|
||||
raise ValueError("Magnet link must be a non-empty string")
|
||||
|
||||
if not magnet_link.startswith("magnet:"):
|
||||
raise ValueError("Invalid magnet link format")
|
||||
|
||||
logger.info("Adding torrent to qBittorrent")
|
||||
|
||||
# Add torrent to qBittorrent
|
||||
success = self.qbittorrent_client.add_torrent(magnet_link)
|
||||
|
||||
if success:
|
||||
logger.info("Torrent added successfully to qBittorrent")
|
||||
return AddTorrentResponse(
|
||||
status="ok",
|
||||
message="Torrent added successfully to qBittorrent"
|
||||
)
|
||||
else:
|
||||
logger.warning("Failed to add torrent to qBittorrent")
|
||||
return AddTorrentResponse(
|
||||
status="error",
|
||||
error="add_failed",
|
||||
message="Failed to add torrent to qBittorrent"
|
||||
)
|
||||
|
||||
except QBittorrentAuthError as e:
|
||||
logger.error(f"qBittorrent authentication error: {e}")
|
||||
return AddTorrentResponse(
|
||||
status="error",
|
||||
error="authentication_failed",
|
||||
message="Failed to authenticate with qBittorrent"
|
||||
)
|
||||
|
||||
except QBittorrentAPIError as e:
|
||||
logger.error(f"qBittorrent API error: {e}")
|
||||
return AddTorrentResponse(
|
||||
status="error",
|
||||
error="api_error",
|
||||
message=str(e)
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return AddTorrentResponse(
|
||||
status="error",
|
||||
error="validation_failed",
|
||||
message=str(e)
|
||||
)
|
||||
47
application/torrents/dto.py
Normal file
47
application/torrents/dto.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Torrent application DTOs."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
|
||||
@dataclass
|
||||
class SearchTorrentsResponse:
|
||||
"""Response from searching for torrents."""
|
||||
status: str
|
||||
torrents: Optional[List[Dict[str, Any]]] = None
|
||||
count: Optional[int] = None
|
||||
error: Optional[str] = None
|
||||
message: Optional[str] = None
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dict for agent compatibility."""
|
||||
result = {"status": self.status}
|
||||
|
||||
if self.error:
|
||||
result["error"] = self.error
|
||||
result["message"] = self.message
|
||||
else:
|
||||
if self.torrents is not None:
|
||||
result["torrents"] = self.torrents
|
||||
if self.count is not None:
|
||||
result["count"] = self.count
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class AddTorrentResponse:
|
||||
"""Response from adding a torrent."""
|
||||
status: str
|
||||
message: Optional[str] = None
|
||||
error: Optional[str] = None
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dict for agent compatibility."""
|
||||
result = {"status": self.status}
|
||||
|
||||
if self.error:
|
||||
result["error"] = self.error
|
||||
if self.message:
|
||||
result["message"] = self.message
|
||||
|
||||
return result
|
||||
94
application/torrents/search_torrents.py
Normal file
94
application/torrents/search_torrents.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""Search torrents use case."""
|
||||
import logging
|
||||
|
||||
from infrastructure.api.knaben import KnabenClient, KnabenNotFoundError, KnabenAPIError
|
||||
from .dto import SearchTorrentsResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchTorrentsUseCase:
|
||||
"""
|
||||
Use case for searching torrents.
|
||||
|
||||
This orchestrates the Knaben API client to find torrents.
|
||||
"""
|
||||
|
||||
def __init__(self, knaben_client: KnabenClient):
|
||||
"""
|
||||
Initialize use case.
|
||||
|
||||
Args:
|
||||
knaben_client: Knaben API client
|
||||
"""
|
||||
self.knaben_client = knaben_client
|
||||
|
||||
def execute(self, media_title: str, limit: int = 10) -> SearchTorrentsResponse:
|
||||
"""
|
||||
Search for torrents by media title.
|
||||
|
||||
Args:
|
||||
media_title: Title of the media to search for
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
SearchTorrentsResponse with torrent information or error
|
||||
"""
|
||||
try:
|
||||
# Search for torrents
|
||||
results = self.knaben_client.search(media_title, limit=limit)
|
||||
|
||||
if not results:
|
||||
logger.info(f"No torrents found for '{media_title}'")
|
||||
return SearchTorrentsResponse(
|
||||
status="error",
|
||||
error="not_found",
|
||||
message=f"No torrents found for '{media_title}'"
|
||||
)
|
||||
|
||||
# Convert to dict format
|
||||
torrents = []
|
||||
for torrent in results:
|
||||
torrents.append({
|
||||
"name": torrent.title,
|
||||
"size": torrent.size,
|
||||
"seeders": torrent.seeders,
|
||||
"leechers": torrent.leechers,
|
||||
"magnet": torrent.magnet,
|
||||
"info_hash": torrent.info_hash,
|
||||
"tracker": torrent.tracker,
|
||||
"upload_date": torrent.upload_date,
|
||||
"category": torrent.category
|
||||
})
|
||||
|
||||
logger.info(f"Found {len(torrents)} torrents for '{media_title}'")
|
||||
|
||||
return SearchTorrentsResponse(
|
||||
status="ok",
|
||||
torrents=torrents,
|
||||
count=len(torrents)
|
||||
)
|
||||
|
||||
except KnabenNotFoundError as e:
|
||||
logger.info(f"Torrents not found: {e}")
|
||||
return SearchTorrentsResponse(
|
||||
status="error",
|
||||
error="not_found",
|
||||
message=str(e)
|
||||
)
|
||||
|
||||
except KnabenAPIError as e:
|
||||
logger.error(f"Knaben API error: {e}")
|
||||
return SearchTorrentsResponse(
|
||||
status="error",
|
||||
error="api_error",
|
||||
message=str(e)
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return SearchTorrentsResponse(
|
||||
status="error",
|
||||
error="validation_failed",
|
||||
message=str(e)
|
||||
)
|
||||
62
cleanup_old_files.sh
Normal file
62
cleanup_old_files.sh
Normal file
@@ -0,0 +1,62 @@
|
||||
#!/bin/bash
|
||||
# Script de nettoyage des fichiers obsolètes après migration DDD
|
||||
|
||||
echo "🗑️ Nettoyage des fichiers obsolètes..."
|
||||
|
||||
# Supprimer les anciens clients API (déplacés vers infrastructure/)
|
||||
echo "Suppression des anciens clients API..."
|
||||
rm -f agent/api/themoviedb.py
|
||||
rm -f agent/api/knaben.py
|
||||
rm -f agent/api/qbittorrent.py
|
||||
|
||||
echo "✅ Anciens clients API supprimés"
|
||||
|
||||
# Optionnel : Supprimer l'ancienne documentation
|
||||
read -p "Voulez-vous supprimer l'ancienne documentation ? (y/n) " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]
|
||||
then
|
||||
echo "Suppression de l'ancienne documentation..."
|
||||
rm -f CHANGELOG_QUALITY.md
|
||||
rm -f CLEANUP_FINAL.md
|
||||
rm -f CLEANUP_SUMMARY.md
|
||||
rm -f CODE_QUALITY.md
|
||||
rm -f COMMANDS_REMOVAL.md
|
||||
rm -f DEPENDENCY_INJECTION.md
|
||||
rm -f DOCUMENTATION_INDEX.md
|
||||
rm -f EXECUTIVE_SUMMARY.md
|
||||
rm -f FILES_CHANGED.md
|
||||
rm -f IMPROVEMENTS_SUMMARY.md
|
||||
rm -f KNABEN_CLIENT.md
|
||||
rm -f MIGRATION_GUIDE.md
|
||||
rm -f MULTI_TOOL_EXECUTION.md
|
||||
rm -f PARAMETERS.md
|
||||
rm -f PROJECT_STRUCTURE.md
|
||||
rm -f QUALITY_REVIEW_COMPLETE.md
|
||||
rm -f README_QUALITY.md
|
||||
rm -f REFACTORING_COMPLETE.md
|
||||
rm -f REFACTORING_FINAL.md
|
||||
rm -f REFACTORING_FOLDERS.md
|
||||
rm -f REFACTORING_SUMMARY.md
|
||||
rm -f SECURITY.md
|
||||
rm -f TMDB_CLIENT_ARCHITECTURE.md
|
||||
rm -f TMDB_CLIENT_SUMMARY.md
|
||||
rm -f TOOLS_REFACTORING.md
|
||||
rm -f TV_SHOWS.md
|
||||
rm -f VERIFICATION.md
|
||||
echo "✅ Ancienne documentation supprimée"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "🎉 Nettoyage terminé !"
|
||||
echo ""
|
||||
echo "📋 Fichiers conservés (nécessaires) :"
|
||||
echo " - agent/api/__init__.py (re-exporte infrastructure)"
|
||||
echo " - agent/models/__init__.py (re-exporte domain)"
|
||||
echo " - agent/models/tv_show.py (compatibilité)"
|
||||
echo " - agent/memory.py (re-exporte infrastructure)"
|
||||
echo ""
|
||||
echo "📚 Nouvelle documentation DDD :"
|
||||
echo " - DDD_PHASE1_COMPLETE.md"
|
||||
echo " - DDD_PHASE2_COMPLETE.md"
|
||||
echo " - DDD_MIGRATION_COMPLETE.md"
|
||||
1
domain/__init__.py
Normal file
1
domain/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Domain layer - Business logic and entities."""
|
||||
15
domain/movies/__init__.py
Normal file
15
domain/movies/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Movies domain - Business logic for movie management."""
|
||||
from .entities import Movie
|
||||
from .value_objects import MovieTitle, ReleaseYear, Quality
|
||||
from .exceptions import MovieNotFound, InvalidMovieData
|
||||
from .services import MovieService
|
||||
|
||||
__all__ = [
|
||||
"Movie",
|
||||
"MovieTitle",
|
||||
"ReleaseYear",
|
||||
"Quality",
|
||||
"MovieNotFound",
|
||||
"InvalidMovieData",
|
||||
"MovieService",
|
||||
]
|
||||
86
domain/movies/entities.py
Normal file
86
domain/movies/entities.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Movie domain entities."""
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
from ..shared.value_objects import ImdbId, FilePath, FileSize
|
||||
from .value_objects import MovieTitle, ReleaseYear, Quality
|
||||
|
||||
|
||||
@dataclass
|
||||
class Movie:
|
||||
"""
|
||||
Movie entity representing a movie in the media library.
|
||||
|
||||
This is the main aggregate root for the movies domain.
|
||||
"""
|
||||
imdb_id: ImdbId
|
||||
title: MovieTitle
|
||||
release_year: Optional[ReleaseYear] = None
|
||||
quality: Quality = Quality.UNKNOWN
|
||||
file_path: Optional[FilePath] = None
|
||||
file_size: Optional[FileSize] = None
|
||||
tmdb_id: Optional[int] = None
|
||||
overview: Optional[str] = None
|
||||
poster_path: Optional[str] = None
|
||||
vote_average: Optional[float] = None
|
||||
added_at: datetime = field(default_factory=datetime.now)
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate movie entity."""
|
||||
# Ensure ImdbId is actually an ImdbId instance
|
||||
if not isinstance(self.imdb_id, ImdbId):
|
||||
if isinstance(self.imdb_id, str):
|
||||
object.__setattr__(self, 'imdb_id', ImdbId(self.imdb_id))
|
||||
else:
|
||||
raise ValueError(f"imdb_id must be ImdbId or str, got {type(self.imdb_id)}")
|
||||
|
||||
# Ensure MovieTitle is actually a MovieTitle instance
|
||||
if not isinstance(self.title, MovieTitle):
|
||||
if isinstance(self.title, str):
|
||||
object.__setattr__(self, 'title', MovieTitle(self.title))
|
||||
else:
|
||||
raise ValueError(f"title must be MovieTitle or str, got {type(self.title)}")
|
||||
|
||||
def has_file(self) -> bool:
|
||||
"""Check if the movie has an associated file."""
|
||||
return self.file_path is not None and self.file_path.exists()
|
||||
|
||||
def is_downloaded(self) -> bool:
|
||||
"""Check if the movie is downloaded (has a file)."""
|
||||
return self.has_file()
|
||||
|
||||
def get_folder_name(self) -> str:
|
||||
"""
|
||||
Get the folder name for this movie.
|
||||
|
||||
Format: "Title (Year)"
|
||||
Example: "Inception (2010)"
|
||||
"""
|
||||
if self.release_year:
|
||||
return f"{self.title.value} ({self.release_year.value})"
|
||||
return self.title.value
|
||||
|
||||
def get_filename(self) -> str:
|
||||
"""
|
||||
Get the suggested filename for this movie.
|
||||
|
||||
Format: "Title.Year.Quality.ext"
|
||||
Example: "Inception.2010.1080p.mkv"
|
||||
"""
|
||||
parts = [self.title.normalized()]
|
||||
|
||||
if self.release_year:
|
||||
parts.append(str(self.release_year.value))
|
||||
|
||||
if self.quality != Quality.UNKNOWN:
|
||||
parts.append(self.quality.value)
|
||||
|
||||
# Extension will be added based on actual file
|
||||
return ".".join(parts)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.title.value} ({self.release_year.value if self.release_year else 'Unknown'})"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Movie(imdb_id={self.imdb_id}, title='{self.title.value}')"
|
||||
17
domain/movies/exceptions.py
Normal file
17
domain/movies/exceptions.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Movie domain exceptions."""
|
||||
from ..shared.exceptions import DomainException, NotFoundError
|
||||
|
||||
|
||||
class MovieNotFound(NotFoundError):
|
||||
"""Raised when a movie is not found."""
|
||||
pass
|
||||
|
||||
|
||||
class InvalidMovieData(DomainException):
|
||||
"""Raised when movie data is invalid."""
|
||||
pass
|
||||
|
||||
|
||||
class MovieAlreadyExists(DomainException):
|
||||
"""Raised when trying to add a movie that already exists."""
|
||||
pass
|
||||
73
domain/movies/repositories.py
Normal file
73
domain/movies/repositories.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""Movie repository interfaces (abstract)."""
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Optional
|
||||
|
||||
from ..shared.value_objects import ImdbId
|
||||
from .entities import Movie
|
||||
|
||||
|
||||
class MovieRepository(ABC):
|
||||
"""
|
||||
Abstract repository for movie persistence.
|
||||
|
||||
This defines the interface that infrastructure implementations must follow.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, movie: Movie) -> None:
|
||||
"""
|
||||
Save a movie to the repository.
|
||||
|
||||
Args:
|
||||
movie: Movie entity to save
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_imdb_id(self, imdb_id: ImdbId) -> Optional[Movie]:
|
||||
"""
|
||||
Find a movie by its IMDb ID.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID to search for
|
||||
|
||||
Returns:
|
||||
Movie if found, None otherwise
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_all(self) -> List[Movie]:
|
||||
"""
|
||||
Get all movies in the repository.
|
||||
|
||||
Returns:
|
||||
List of all movies
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete(self, imdb_id: ImdbId) -> bool:
|
||||
"""
|
||||
Delete a movie from the repository.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID of the movie to delete
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def exists(self, imdb_id: ImdbId) -> bool:
|
||||
"""
|
||||
Check if a movie exists in the repository.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID to check
|
||||
|
||||
Returns:
|
||||
True if exists, False otherwise
|
||||
"""
|
||||
pass
|
||||
188
domain/movies/services.py
Normal file
188
domain/movies/services.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""Movie domain services - Business logic."""
|
||||
import logging
|
||||
from typing import Optional, List
|
||||
import re
|
||||
|
||||
from ..shared.value_objects import ImdbId, FilePath
|
||||
from .entities import Movie
|
||||
from .value_objects import Quality
|
||||
from .repositories import MovieRepository
|
||||
from .exceptions import MovieNotFound, MovieAlreadyExists
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MovieService:
|
||||
"""
|
||||
Domain service for movie-related business logic.
|
||||
|
||||
This service contains business rules that don't naturally fit
|
||||
within a single entity.
|
||||
"""
|
||||
|
||||
def __init__(self, repository: MovieRepository):
|
||||
"""
|
||||
Initialize movie service.
|
||||
|
||||
Args:
|
||||
repository: Movie repository for persistence
|
||||
"""
|
||||
self.repository = repository
|
||||
|
||||
def add_movie(self, movie: Movie) -> None:
|
||||
"""
|
||||
Add a new movie to the library.
|
||||
|
||||
Args:
|
||||
movie: Movie entity to add
|
||||
|
||||
Raises:
|
||||
MovieAlreadyExists: If movie with same IMDb ID already exists
|
||||
"""
|
||||
if self.repository.exists(movie.imdb_id):
|
||||
raise MovieAlreadyExists(f"Movie with IMDb ID {movie.imdb_id} already exists")
|
||||
|
||||
self.repository.save(movie)
|
||||
logger.info(f"Added movie: {movie.title.value} ({movie.imdb_id})")
|
||||
|
||||
def get_movie(self, imdb_id: ImdbId) -> Movie:
|
||||
"""
|
||||
Get a movie by IMDb ID.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID of the movie
|
||||
|
||||
Returns:
|
||||
Movie entity
|
||||
|
||||
Raises:
|
||||
MovieNotFound: If movie not found
|
||||
"""
|
||||
movie = self.repository.find_by_imdb_id(imdb_id)
|
||||
if not movie:
|
||||
raise MovieNotFound(f"Movie with IMDb ID {imdb_id} not found")
|
||||
return movie
|
||||
|
||||
def get_all_movies(self) -> List[Movie]:
|
||||
"""
|
||||
Get all movies in the library.
|
||||
|
||||
Returns:
|
||||
List of all movies
|
||||
"""
|
||||
return self.repository.find_all()
|
||||
|
||||
def update_movie(self, movie: Movie) -> None:
|
||||
"""
|
||||
Update an existing movie.
|
||||
|
||||
Args:
|
||||
movie: Movie entity with updated data
|
||||
|
||||
Raises:
|
||||
MovieNotFound: If movie doesn't exist
|
||||
"""
|
||||
if not self.repository.exists(movie.imdb_id):
|
||||
raise MovieNotFound(f"Movie with IMDb ID {movie.imdb_id} not found")
|
||||
|
||||
self.repository.save(movie)
|
||||
logger.info(f"Updated movie: {movie.title.value} ({movie.imdb_id})")
|
||||
|
||||
def remove_movie(self, imdb_id: ImdbId) -> None:
|
||||
"""
|
||||
Remove a movie from the library.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID of the movie to remove
|
||||
|
||||
Raises:
|
||||
MovieNotFound: If movie not found
|
||||
"""
|
||||
if not self.repository.delete(imdb_id):
|
||||
raise MovieNotFound(f"Movie with IMDb ID {imdb_id} not found")
|
||||
|
||||
logger.info(f"Removed movie with IMDb ID: {imdb_id}")
|
||||
|
||||
def detect_quality_from_filename(self, filename: str) -> Quality:
|
||||
"""
|
||||
Detect video quality from filename.
|
||||
|
||||
Args:
|
||||
filename: Filename to analyze
|
||||
|
||||
Returns:
|
||||
Detected quality or UNKNOWN
|
||||
"""
|
||||
filename_lower = filename.lower()
|
||||
|
||||
# Check for quality indicators
|
||||
if '2160p' in filename_lower or '4k' in filename_lower:
|
||||
return Quality.UHD_4K
|
||||
elif '1080p' in filename_lower:
|
||||
return Quality.FULL_HD
|
||||
elif '720p' in filename_lower:
|
||||
return Quality.HD
|
||||
elif '480p' in filename_lower:
|
||||
return Quality.SD
|
||||
|
||||
return Quality.UNKNOWN
|
||||
|
||||
def extract_year_from_filename(self, filename: str) -> Optional[int]:
|
||||
"""
|
||||
Extract release year from filename.
|
||||
|
||||
Args:
|
||||
filename: Filename to analyze
|
||||
|
||||
Returns:
|
||||
Year if found, None otherwise
|
||||
"""
|
||||
# Look for 4-digit year in parentheses or standalone
|
||||
# Examples: "Movie (2010)", "Movie.2010.1080p"
|
||||
patterns = [
|
||||
r'\((\d{4})\)', # (2010)
|
||||
r'\.(\d{4})\.', # .2010.
|
||||
r'\s(\d{4})\s', # 2010
|
||||
]
|
||||
|
||||
for pattern in patterns:
|
||||
match = re.search(pattern, filename)
|
||||
if match:
|
||||
year = int(match.group(1))
|
||||
# Validate year is reasonable
|
||||
if 1888 <= year <= 2100:
|
||||
return year
|
||||
|
||||
return None
|
||||
|
||||
def validate_movie_file(self, file_path: FilePath) -> bool:
|
||||
"""
|
||||
Validate that a file is a valid movie file.
|
||||
|
||||
Args:
|
||||
file_path: Path to the file
|
||||
|
||||
Returns:
|
||||
True if valid movie file, False otherwise
|
||||
"""
|
||||
if not file_path.exists():
|
||||
logger.warning(f"File does not exist: {file_path}")
|
||||
return False
|
||||
|
||||
if not file_path.is_file():
|
||||
logger.warning(f"Path is not a file: {file_path}")
|
||||
return False
|
||||
|
||||
# Check file extension
|
||||
valid_extensions = {'.mkv', '.mp4', '.avi', '.mov', '.wmv', '.flv', '.webm'}
|
||||
if file_path.value.suffix.lower() not in valid_extensions:
|
||||
logger.warning(f"Invalid file extension: {file_path.value.suffix}")
|
||||
return False
|
||||
|
||||
# Check file size (should be at least 100 MB for a movie)
|
||||
min_size = 100 * 1024 * 1024 # 100 MB
|
||||
if file_path.value.stat().st_size < min_size:
|
||||
logger.warning(f"File too small to be a movie: {file_path.value.stat().st_size} bytes")
|
||||
return False
|
||||
|
||||
return True
|
||||
99
domain/movies/value_objects.py
Normal file
99
domain/movies/value_objects.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Movie domain value objects."""
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from ..shared.exceptions import ValidationError
|
||||
|
||||
|
||||
class Quality(Enum):
|
||||
"""Video quality levels."""
|
||||
SD = "480p"
|
||||
HD = "720p"
|
||||
FULL_HD = "1080p"
|
||||
UHD_4K = "2160p"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, quality_str: str) -> "Quality":
|
||||
"""
|
||||
Parse quality from string.
|
||||
|
||||
Args:
|
||||
quality_str: Quality string (e.g., "1080p", "720p")
|
||||
|
||||
Returns:
|
||||
Quality enum value
|
||||
"""
|
||||
quality_map = {
|
||||
"480p": cls.SD,
|
||||
"720p": cls.HD,
|
||||
"1080p": cls.FULL_HD,
|
||||
"2160p": cls.UHD_4K,
|
||||
}
|
||||
return quality_map.get(quality_str, cls.UNKNOWN)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MovieTitle:
|
||||
"""
|
||||
Value object representing a movie title.
|
||||
|
||||
Ensures the title is valid and normalized.
|
||||
"""
|
||||
value: str
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate movie title."""
|
||||
if not self.value:
|
||||
raise ValidationError("Movie title cannot be empty")
|
||||
|
||||
if not isinstance(self.value, str):
|
||||
raise ValidationError(f"Movie title must be a string, got {type(self.value)}")
|
||||
|
||||
if len(self.value) > 500:
|
||||
raise ValidationError(f"Movie title too long: {len(self.value)} characters (max 500)")
|
||||
|
||||
def normalized(self) -> str:
|
||||
"""
|
||||
Return normalized title for file system usage.
|
||||
|
||||
Removes special characters and replaces spaces with dots.
|
||||
"""
|
||||
import re
|
||||
# Remove special characters except spaces, dots, and hyphens
|
||||
cleaned = re.sub(r'[^\w\s\.\-]', '', self.value)
|
||||
# Replace spaces with dots
|
||||
normalized = cleaned.replace(' ', '.')
|
||||
return normalized
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.value
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"MovieTitle('{self.value}')"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReleaseYear:
|
||||
"""
|
||||
Value object representing a movie release year.
|
||||
|
||||
Validates that the year is reasonable.
|
||||
"""
|
||||
value: int
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate release year."""
|
||||
if not isinstance(self.value, int):
|
||||
raise ValidationError(f"Release year must be an integer, got {type(self.value)}")
|
||||
|
||||
# Movies started around 1888, and we shouldn't have movies from the future
|
||||
if self.value < 1888 or self.value > 2100:
|
||||
raise ValidationError(f"Invalid release year: {self.value}")
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.value)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"ReleaseYear({self.value})"
|
||||
11
domain/shared/__init__.py
Normal file
11
domain/shared/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Shared kernel - Common domain concepts used across subdomains."""
|
||||
from .exceptions import DomainException, ValidationError
|
||||
from .value_objects import ImdbId, FilePath, FileSize
|
||||
|
||||
__all__ = [
|
||||
"DomainException",
|
||||
"ValidationError",
|
||||
"ImdbId",
|
||||
"FilePath",
|
||||
"FileSize",
|
||||
]
|
||||
21
domain/shared/exceptions.py
Normal file
21
domain/shared/exceptions.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Shared domain exceptions."""
|
||||
|
||||
|
||||
class DomainException(Exception):
|
||||
"""Base exception for all domain-related errors."""
|
||||
pass
|
||||
|
||||
|
||||
class ValidationError(DomainException):
|
||||
"""Raised when domain validation fails."""
|
||||
pass
|
||||
|
||||
|
||||
class NotFoundError(DomainException):
|
||||
"""Raised when a domain entity is not found."""
|
||||
pass
|
||||
|
||||
|
||||
class AlreadyExistsError(DomainException):
|
||||
"""Raised when trying to create an entity that already exists."""
|
||||
pass
|
||||
128
domain/shared/value_objects.py
Normal file
128
domain/shared/value_objects.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""Shared value objects used across multiple domains."""
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
import re
|
||||
|
||||
from .exceptions import ValidationError
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ImdbId:
|
||||
"""
|
||||
Value object representing an IMDb ID.
|
||||
|
||||
IMDb IDs follow the format: tt followed by 7-8 digits (e.g., tt1375666)
|
||||
"""
|
||||
value: str
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate IMDb ID format."""
|
||||
if not self.value:
|
||||
raise ValidationError("IMDb ID cannot be empty")
|
||||
|
||||
if not isinstance(self.value, str):
|
||||
raise ValidationError(f"IMDb ID must be a string, got {type(self.value)}")
|
||||
|
||||
# IMDb ID format: tt + 7-8 digits
|
||||
pattern = r'^tt\d{7,8}$'
|
||||
if not re.match(pattern, self.value):
|
||||
raise ValidationError(
|
||||
f"Invalid IMDb ID format: {self.value}. "
|
||||
"Expected format: tt followed by 7-8 digits (e.g., tt1375666)"
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.value
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"ImdbId('{self.value}')"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FilePath:
|
||||
"""
|
||||
Value object representing a file path with validation.
|
||||
|
||||
Ensures the path is valid and optionally checks existence.
|
||||
"""
|
||||
value: Path
|
||||
|
||||
def __init__(self, path: Union[str, Path]):
|
||||
"""
|
||||
Initialize FilePath.
|
||||
|
||||
Args:
|
||||
path: String or Path object representing the file path
|
||||
"""
|
||||
if isinstance(path, str):
|
||||
path_obj = Path(path)
|
||||
elif isinstance(path, Path):
|
||||
path_obj = path
|
||||
else:
|
||||
raise ValidationError(f"Path must be str or Path, got {type(path)}")
|
||||
|
||||
# Use object.__setattr__ because dataclass is frozen
|
||||
object.__setattr__(self, 'value', path_obj)
|
||||
|
||||
def exists(self) -> bool:
|
||||
"""Check if the path exists."""
|
||||
return self.value.exists()
|
||||
|
||||
def is_file(self) -> bool:
|
||||
"""Check if the path is a file."""
|
||||
return self.value.is_file()
|
||||
|
||||
def is_dir(self) -> bool:
|
||||
"""Check if the path is a directory."""
|
||||
return self.value.is_dir()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.value)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"FilePath('{self.value}')"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FileSize:
|
||||
"""
|
||||
Value object representing a file size in bytes.
|
||||
|
||||
Provides human-readable formatting.
|
||||
"""
|
||||
bytes: int
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate file size."""
|
||||
if not isinstance(self.bytes, int):
|
||||
raise ValidationError(f"File size must be an integer, got {type(self.bytes)}")
|
||||
|
||||
if self.bytes < 0:
|
||||
raise ValidationError(f"File size cannot be negative: {self.bytes}")
|
||||
|
||||
def to_human_readable(self) -> str:
|
||||
"""
|
||||
Convert bytes to human-readable format.
|
||||
|
||||
Returns:
|
||||
String like "1.5 GB", "500 MB", etc.
|
||||
"""
|
||||
units = ['B', 'KB', 'MB', 'GB', 'TB']
|
||||
size = float(self.bytes)
|
||||
unit_index = 0
|
||||
|
||||
while size >= 1024 and unit_index < len(units) - 1:
|
||||
size /= 1024
|
||||
unit_index += 1
|
||||
|
||||
if unit_index == 0:
|
||||
return f"{int(size)} {units[unit_index]}"
|
||||
else:
|
||||
return f"{size:.2f} {units[unit_index]}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.to_human_readable()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"FileSize({self.bytes})"
|
||||
13
domain/subtitles/__init__.py
Normal file
13
domain/subtitles/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Subtitles domain - Business logic for subtitle management (shared across movies and TV shows)."""
|
||||
from .entities import Subtitle
|
||||
from .value_objects import Language, SubtitleFormat
|
||||
from .exceptions import SubtitleNotFound
|
||||
from .services import SubtitleService
|
||||
|
||||
__all__ = [
|
||||
"Subtitle",
|
||||
"Language",
|
||||
"SubtitleFormat",
|
||||
"SubtitleNotFound",
|
||||
"SubtitleService",
|
||||
]
|
||||
93
domain/subtitles/entities.py
Normal file
93
domain/subtitles/entities.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Subtitle domain entities."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from ..shared.value_objects import ImdbId, FilePath
|
||||
from .value_objects import Language, SubtitleFormat, TimingOffset
|
||||
|
||||
|
||||
@dataclass
|
||||
class Subtitle:
|
||||
"""
|
||||
Subtitle entity representing a subtitle file.
|
||||
|
||||
Can be associated with either a movie or a TV show episode.
|
||||
"""
|
||||
media_imdb_id: ImdbId
|
||||
language: Language
|
||||
format: SubtitleFormat
|
||||
file_path: FilePath
|
||||
|
||||
# Optional: for TV shows
|
||||
season_number: Optional[int] = None
|
||||
episode_number: Optional[int] = None
|
||||
|
||||
# Subtitle metadata
|
||||
timing_offset: TimingOffset = TimingOffset(0)
|
||||
hearing_impaired: bool = False
|
||||
forced: bool = False # Forced subtitles (for foreign language parts)
|
||||
|
||||
# Source information
|
||||
source: Optional[str] = None # e.g., "OpenSubtitles", "Subscene"
|
||||
uploader: Optional[str] = None
|
||||
download_count: Optional[int] = None
|
||||
rating: Optional[float] = None
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate subtitle entity."""
|
||||
# Ensure ImdbId is actually an ImdbId instance
|
||||
if not isinstance(self.media_imdb_id, ImdbId):
|
||||
if isinstance(self.media_imdb_id, str):
|
||||
object.__setattr__(self, 'media_imdb_id', ImdbId(self.media_imdb_id))
|
||||
|
||||
# Ensure Language is actually a Language instance
|
||||
if not isinstance(self.language, Language):
|
||||
if isinstance(self.language, str):
|
||||
object.__setattr__(self, 'language', Language.from_code(self.language))
|
||||
|
||||
# Ensure SubtitleFormat is actually a SubtitleFormat instance
|
||||
if not isinstance(self.format, SubtitleFormat):
|
||||
if isinstance(self.format, str):
|
||||
object.__setattr__(self, 'format', SubtitleFormat.from_extension(self.format))
|
||||
|
||||
# Ensure FilePath is actually a FilePath instance
|
||||
if not isinstance(self.file_path, FilePath):
|
||||
object.__setattr__(self, 'file_path', FilePath(self.file_path))
|
||||
|
||||
def is_for_movie(self) -> bool:
|
||||
"""Check if this subtitle is for a movie."""
|
||||
return self.season_number is None and self.episode_number is None
|
||||
|
||||
def is_for_episode(self) -> bool:
|
||||
"""Check if this subtitle is for a TV show episode."""
|
||||
return self.season_number is not None and self.episode_number is not None
|
||||
|
||||
def get_filename(self) -> str:
|
||||
"""
|
||||
Get the suggested filename for this subtitle.
|
||||
|
||||
Format for movies: "Movie.Title.{lang}.{format}"
|
||||
Format for episodes: "S01E05.{lang}.{format}"
|
||||
"""
|
||||
if self.is_for_episode():
|
||||
base = f"S{self.season_number:02d}E{self.episode_number:02d}"
|
||||
else:
|
||||
# For movies, use the file path stem
|
||||
base = self.file_path.value.stem
|
||||
|
||||
parts = [base, self.language.value]
|
||||
|
||||
if self.hearing_impaired:
|
||||
parts.append("hi")
|
||||
if self.forced:
|
||||
parts.append("forced")
|
||||
|
||||
return f"{'.'.join(parts)}.{self.format.value}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.is_for_episode():
|
||||
return f"Subtitle S{self.season_number:02d}E{self.episode_number:02d} ({self.language.value})"
|
||||
return f"Subtitle ({self.language.value})"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Subtitle(media={self.media_imdb_id}, lang={self.language.value})"
|
||||
12
domain/subtitles/exceptions.py
Normal file
12
domain/subtitles/exceptions.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Subtitle domain exceptions."""
|
||||
from ..shared.exceptions import DomainException, NotFoundError
|
||||
|
||||
|
||||
class SubtitleNotFound(NotFoundError):
|
||||
"""Raised when a subtitle is not found."""
|
||||
pass
|
||||
|
||||
|
||||
class InvalidSubtitleFormat(DomainException):
|
||||
"""Raised when subtitle format is invalid."""
|
||||
pass
|
||||
60
domain/subtitles/repositories.py
Normal file
60
domain/subtitles/repositories.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Subtitle repository interfaces (abstract)."""
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Optional
|
||||
|
||||
from ..shared.value_objects import ImdbId
|
||||
from .entities import Subtitle
|
||||
from .value_objects import Language
|
||||
|
||||
|
||||
class SubtitleRepository(ABC):
|
||||
"""
|
||||
Abstract repository for subtitle persistence.
|
||||
|
||||
This defines the interface that infrastructure implementations must follow.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, subtitle: Subtitle) -> None:
|
||||
"""
|
||||
Save a subtitle to the repository.
|
||||
|
||||
Args:
|
||||
subtitle: Subtitle entity to save
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_media(
|
||||
self,
|
||||
media_imdb_id: ImdbId,
|
||||
language: Optional[Language] = None,
|
||||
season: Optional[int] = None,
|
||||
episode: Optional[int] = None
|
||||
) -> List[Subtitle]:
|
||||
"""
|
||||
Find subtitles for a media item.
|
||||
|
||||
Args:
|
||||
media_imdb_id: IMDb ID of the media
|
||||
language: Optional language filter
|
||||
season: Optional season number (for TV shows)
|
||||
episode: Optional episode number (for TV shows)
|
||||
|
||||
Returns:
|
||||
List of matching subtitles
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete(self, subtitle: Subtitle) -> bool:
|
||||
"""
|
||||
Delete a subtitle from the repository.
|
||||
|
||||
Args:
|
||||
subtitle: Subtitle to delete
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
pass
|
||||
154
domain/subtitles/services.py
Normal file
154
domain/subtitles/services.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Subtitle domain services - Business logic."""
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from ..shared.value_objects import ImdbId, FilePath
|
||||
from .entities import Subtitle
|
||||
from .value_objects import Language, SubtitleFormat
|
||||
from .repositories import SubtitleRepository
|
||||
from .exceptions import SubtitleNotFound
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SubtitleService:
|
||||
"""
|
||||
Domain service for subtitle-related business logic.
|
||||
|
||||
This service is SHARED between movies and TV shows domains.
|
||||
Both can use this service to manage subtitles.
|
||||
"""
|
||||
|
||||
def __init__(self, repository: SubtitleRepository):
|
||||
"""
|
||||
Initialize subtitle service.
|
||||
|
||||
Args:
|
||||
repository: Subtitle repository for persistence
|
||||
"""
|
||||
self.repository = repository
|
||||
|
||||
def add_subtitle(self, subtitle: Subtitle) -> None:
|
||||
"""
|
||||
Add a subtitle to the library.
|
||||
|
||||
Args:
|
||||
subtitle: Subtitle entity to add
|
||||
"""
|
||||
self.repository.save(subtitle)
|
||||
logger.info(f"Added subtitle: {subtitle.language.value} for {subtitle.media_imdb_id}")
|
||||
|
||||
def find_subtitles_for_movie(
|
||||
self,
|
||||
imdb_id: ImdbId,
|
||||
languages: Optional[List[Language]] = None
|
||||
) -> List[Subtitle]:
|
||||
"""
|
||||
Find subtitles for a movie.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID of the movie
|
||||
languages: Optional list of languages to filter by
|
||||
|
||||
Returns:
|
||||
List of matching subtitles
|
||||
"""
|
||||
if languages:
|
||||
all_subtitles = []
|
||||
for lang in languages:
|
||||
subs = self.repository.find_by_media(imdb_id, language=lang)
|
||||
all_subtitles.extend(subs)
|
||||
return all_subtitles
|
||||
else:
|
||||
return self.repository.find_by_media(imdb_id)
|
||||
|
||||
def find_subtitles_for_episode(
|
||||
self,
|
||||
imdb_id: ImdbId,
|
||||
season: int,
|
||||
episode: int,
|
||||
languages: Optional[List[Language]] = None
|
||||
) -> List[Subtitle]:
|
||||
"""
|
||||
Find subtitles for a TV show episode.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID of the TV show
|
||||
season: Season number
|
||||
episode: Episode number
|
||||
languages: Optional list of languages to filter by
|
||||
|
||||
Returns:
|
||||
List of matching subtitles
|
||||
"""
|
||||
if languages:
|
||||
all_subtitles = []
|
||||
for lang in languages:
|
||||
subs = self.repository.find_by_media(
|
||||
imdb_id,
|
||||
language=lang,
|
||||
season=season,
|
||||
episode=episode
|
||||
)
|
||||
all_subtitles.extend(subs)
|
||||
return all_subtitles
|
||||
else:
|
||||
return self.repository.find_by_media(
|
||||
imdb_id,
|
||||
season=season,
|
||||
episode=episode
|
||||
)
|
||||
|
||||
def remove_subtitle(self, subtitle: Subtitle) -> None:
|
||||
"""
|
||||
Remove a subtitle from the library.
|
||||
|
||||
Args:
|
||||
subtitle: Subtitle to remove
|
||||
|
||||
Raises:
|
||||
SubtitleNotFound: If subtitle not found
|
||||
"""
|
||||
if not self.repository.delete(subtitle):
|
||||
raise SubtitleNotFound(f"Subtitle not found: {subtitle}")
|
||||
|
||||
logger.info(f"Removed subtitle: {subtitle}")
|
||||
|
||||
def detect_format_from_file(self, file_path: FilePath) -> SubtitleFormat:
|
||||
"""
|
||||
Detect subtitle format from file extension.
|
||||
|
||||
Args:
|
||||
file_path: Path to subtitle file
|
||||
|
||||
Returns:
|
||||
Detected subtitle format
|
||||
"""
|
||||
extension = file_path.value.suffix
|
||||
return SubtitleFormat.from_extension(extension)
|
||||
|
||||
def validate_subtitle_file(self, file_path: FilePath) -> bool:
|
||||
"""
|
||||
Validate that a file is a valid subtitle file.
|
||||
|
||||
Args:
|
||||
file_path: Path to the file
|
||||
|
||||
Returns:
|
||||
True if valid subtitle file, False otherwise
|
||||
"""
|
||||
if not file_path.exists():
|
||||
logger.warning(f"File does not exist: {file_path}")
|
||||
return False
|
||||
|
||||
if not file_path.is_file():
|
||||
logger.warning(f"Path is not a file: {file_path}")
|
||||
return False
|
||||
|
||||
# Check file extension
|
||||
try:
|
||||
self.detect_format_from_file(file_path)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid subtitle format: {e}")
|
||||
return False
|
||||
94
domain/subtitles/value_objects.py
Normal file
94
domain/subtitles/value_objects.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""Subtitle domain value objects."""
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
from ..shared.exceptions import ValidationError
|
||||
|
||||
|
||||
class Language(Enum):
|
||||
"""Supported subtitle languages."""
|
||||
ENGLISH = "en"
|
||||
FRENCH = "fr"
|
||||
SPANISH = "es"
|
||||
GERMAN = "de"
|
||||
ITALIAN = "it"
|
||||
PORTUGUESE = "pt"
|
||||
RUSSIAN = "ru"
|
||||
JAPANESE = "ja"
|
||||
KOREAN = "ko"
|
||||
CHINESE = "zh"
|
||||
ARABIC = "ar"
|
||||
|
||||
@classmethod
|
||||
def from_code(cls, code: str) -> "Language":
|
||||
"""
|
||||
Get language from ISO 639-1 code.
|
||||
|
||||
Args:
|
||||
code: Two-letter language code
|
||||
|
||||
Returns:
|
||||
Language enum value
|
||||
|
||||
Raises:
|
||||
ValidationError: If code is not supported
|
||||
"""
|
||||
code_lower = code.lower()
|
||||
for lang in cls:
|
||||
if lang.value == code_lower:
|
||||
return lang
|
||||
raise ValidationError(f"Unsupported language code: {code}")
|
||||
|
||||
|
||||
class SubtitleFormat(Enum):
|
||||
"""Supported subtitle formats."""
|
||||
SRT = "srt" # SubRip
|
||||
ASS = "ass" # Advanced SubStation Alpha
|
||||
SSA = "ssa" # SubStation Alpha
|
||||
VTT = "vtt" # WebVTT
|
||||
SUB = "sub" # MicroDVD
|
||||
|
||||
@classmethod
|
||||
def from_extension(cls, extension: str) -> "SubtitleFormat":
|
||||
"""
|
||||
Get format from file extension.
|
||||
|
||||
Args:
|
||||
extension: File extension (with or without dot)
|
||||
|
||||
Returns:
|
||||
SubtitleFormat enum value
|
||||
|
||||
Raises:
|
||||
ValidationError: If extension is not supported
|
||||
"""
|
||||
ext = extension.lower().lstrip('.')
|
||||
for fmt in cls:
|
||||
if fmt.value == ext:
|
||||
return fmt
|
||||
raise ValidationError(f"Unsupported subtitle format: {extension}")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class TimingOffset:
|
||||
"""
|
||||
Value object representing subtitle timing offset in milliseconds.
|
||||
|
||||
Used for synchronizing subtitles with video.
|
||||
"""
|
||||
milliseconds: int
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate timing offset."""
|
||||
if not isinstance(self.milliseconds, int):
|
||||
raise ValidationError(f"Timing offset must be an integer, got {type(self.milliseconds)}")
|
||||
|
||||
def to_seconds(self) -> float:
|
||||
"""Convert to seconds."""
|
||||
return self.milliseconds / 1000.0
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.milliseconds}ms"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"TimingOffset({self.milliseconds})"
|
||||
18
domain/tv_shows/__init__.py
Normal file
18
domain/tv_shows/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""TV Shows domain - Business logic for TV show management."""
|
||||
from .entities import TVShow, Season, Episode
|
||||
from .value_objects import ShowStatus, SeasonNumber, EpisodeNumber
|
||||
from .exceptions import TVShowNotFound, InvalidEpisode, SeasonNotFound
|
||||
from .services import TVShowService
|
||||
|
||||
__all__ = [
|
||||
"TVShow",
|
||||
"Season",
|
||||
"Episode",
|
||||
"ShowStatus",
|
||||
"SeasonNumber",
|
||||
"EpisodeNumber",
|
||||
"TVShowNotFound",
|
||||
"InvalidEpisode",
|
||||
"SeasonNotFound",
|
||||
"TVShowService",
|
||||
]
|
||||
191
domain/tv_shows/entities.py
Normal file
191
domain/tv_shows/entities.py
Normal file
@@ -0,0 +1,191 @@
|
||||
"""TV Show domain entities."""
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
|
||||
from ..shared.value_objects import ImdbId, FilePath, FileSize
|
||||
from .value_objects import ShowStatus, SeasonNumber, EpisodeNumber
|
||||
|
||||
|
||||
@dataclass
|
||||
class TVShow:
|
||||
"""
|
||||
TV Show entity representing a TV show in the media library.
|
||||
|
||||
This is the main aggregate root for the TV shows domain.
|
||||
Migrated from agent/models/tv_show.py
|
||||
"""
|
||||
imdb_id: ImdbId
|
||||
title: str
|
||||
seasons_count: int
|
||||
status: ShowStatus
|
||||
tmdb_id: Optional[int] = None
|
||||
overview: Optional[str] = None
|
||||
poster_path: Optional[str] = None
|
||||
first_air_date: Optional[str] = None
|
||||
vote_average: Optional[float] = None
|
||||
added_at: datetime = field(default_factory=datetime.now)
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate TV show entity."""
|
||||
# Ensure ImdbId is actually an ImdbId instance
|
||||
if not isinstance(self.imdb_id, ImdbId):
|
||||
if isinstance(self.imdb_id, str):
|
||||
object.__setattr__(self, 'imdb_id', ImdbId(self.imdb_id))
|
||||
else:
|
||||
raise ValueError(f"imdb_id must be ImdbId or str, got {type(self.imdb_id)}")
|
||||
|
||||
# Ensure ShowStatus is actually a ShowStatus instance
|
||||
if not isinstance(self.status, ShowStatus):
|
||||
if isinstance(self.status, str):
|
||||
object.__setattr__(self, 'status', ShowStatus.from_string(self.status))
|
||||
else:
|
||||
raise ValueError(f"status must be ShowStatus or str, got {type(self.status)}")
|
||||
|
||||
# Validate seasons_count
|
||||
if not isinstance(self.seasons_count, int) or self.seasons_count < 0:
|
||||
raise ValueError(f"seasons_count must be a non-negative integer, got {self.seasons_count}")
|
||||
|
||||
def is_ongoing(self) -> bool:
|
||||
"""Check if the show is still ongoing."""
|
||||
return self.status == ShowStatus.ONGOING
|
||||
|
||||
def is_ended(self) -> bool:
|
||||
"""Check if the show has ended."""
|
||||
return self.status == ShowStatus.ENDED
|
||||
|
||||
def get_folder_name(self) -> str:
|
||||
"""
|
||||
Get the folder name for this TV show.
|
||||
|
||||
Format: "Title"
|
||||
Example: "Breaking.Bad"
|
||||
"""
|
||||
import re
|
||||
# Remove special characters and replace spaces with dots
|
||||
cleaned = re.sub(r'[^\w\s\.\-]', '', self.title)
|
||||
return cleaned.replace(' ', '.')
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.title} ({self.status.value}, {self.seasons_count} seasons)"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"TVShow(imdb_id={self.imdb_id}, title='{self.title}')"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Season:
|
||||
"""
|
||||
Season entity representing a season of a TV show.
|
||||
"""
|
||||
show_imdb_id: ImdbId
|
||||
season_number: SeasonNumber
|
||||
episode_count: int
|
||||
name: Optional[str] = None
|
||||
overview: Optional[str] = None
|
||||
air_date: Optional[str] = None
|
||||
poster_path: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate season entity."""
|
||||
# Ensure ImdbId is actually an ImdbId instance
|
||||
if not isinstance(self.show_imdb_id, ImdbId):
|
||||
if isinstance(self.show_imdb_id, str):
|
||||
object.__setattr__(self, 'show_imdb_id', ImdbId(self.show_imdb_id))
|
||||
|
||||
# Ensure SeasonNumber is actually a SeasonNumber instance
|
||||
if not isinstance(self.season_number, SeasonNumber):
|
||||
if isinstance(self.season_number, int):
|
||||
object.__setattr__(self, 'season_number', SeasonNumber(self.season_number))
|
||||
|
||||
# Validate episode_count
|
||||
if not isinstance(self.episode_count, int) or self.episode_count < 0:
|
||||
raise ValueError(f"episode_count must be a non-negative integer, got {self.episode_count}")
|
||||
|
||||
def is_special(self) -> bool:
|
||||
"""Check if this is the specials season."""
|
||||
return self.season_number.is_special()
|
||||
|
||||
def get_folder_name(self) -> str:
|
||||
"""
|
||||
Get the folder name for this season.
|
||||
|
||||
Format: "Season 01" or "Specials" for season 0
|
||||
"""
|
||||
if self.is_special():
|
||||
return "Specials"
|
||||
return f"Season {self.season_number.value:02d}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.name:
|
||||
return f"Season {self.season_number.value}: {self.name}"
|
||||
return f"Season {self.season_number.value}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Season(show={self.show_imdb_id}, number={self.season_number.value})"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Episode:
|
||||
"""
|
||||
Episode entity representing an episode of a TV show.
|
||||
"""
|
||||
show_imdb_id: ImdbId
|
||||
season_number: SeasonNumber
|
||||
episode_number: EpisodeNumber
|
||||
title: str
|
||||
file_path: Optional[FilePath] = None
|
||||
file_size: Optional[FileSize] = None
|
||||
overview: Optional[str] = None
|
||||
air_date: Optional[str] = None
|
||||
still_path: Optional[str] = None
|
||||
vote_average: Optional[float] = None
|
||||
runtime: Optional[int] = None # in minutes
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate episode entity."""
|
||||
# Ensure ImdbId is actually an ImdbId instance
|
||||
if not isinstance(self.show_imdb_id, ImdbId):
|
||||
if isinstance(self.show_imdb_id, str):
|
||||
object.__setattr__(self, 'show_imdb_id', ImdbId(self.show_imdb_id))
|
||||
|
||||
# Ensure SeasonNumber is actually a SeasonNumber instance
|
||||
if not isinstance(self.season_number, SeasonNumber):
|
||||
if isinstance(self.season_number, int):
|
||||
object.__setattr__(self, 'season_number', SeasonNumber(self.season_number))
|
||||
|
||||
# Ensure EpisodeNumber is actually an EpisodeNumber instance
|
||||
if not isinstance(self.episode_number, EpisodeNumber):
|
||||
if isinstance(self.episode_number, int):
|
||||
object.__setattr__(self, 'episode_number', EpisodeNumber(self.episode_number))
|
||||
|
||||
def has_file(self) -> bool:
|
||||
"""Check if the episode has an associated file."""
|
||||
return self.file_path is not None and self.file_path.exists()
|
||||
|
||||
def is_downloaded(self) -> bool:
|
||||
"""Check if the episode is downloaded."""
|
||||
return self.has_file()
|
||||
|
||||
def get_filename(self) -> str:
|
||||
"""
|
||||
Get the suggested filename for this episode.
|
||||
|
||||
Format: "S01E01 - Episode Title.ext"
|
||||
Example: "S01E05 - Pilot.mkv"
|
||||
"""
|
||||
season_str = f"S{self.season_number.value:02d}"
|
||||
episode_str = f"E{self.episode_number.value:02d}"
|
||||
|
||||
# Clean title for filename
|
||||
import re
|
||||
clean_title = re.sub(r'[^\w\s\-]', '', self.title)
|
||||
clean_title = clean_title.replace(' ', '.')
|
||||
|
||||
return f"{season_str}{episode_str}.{clean_title}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"S{self.season_number.value:02d}E{self.episode_number.value:02d} - {self.title}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Episode(show={self.show_imdb_id}, S{self.season_number.value:02d}E{self.episode_number.value:02d})"
|
||||
27
domain/tv_shows/exceptions.py
Normal file
27
domain/tv_shows/exceptions.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""TV Show domain exceptions."""
|
||||
from ..shared.exceptions import DomainException, NotFoundError
|
||||
|
||||
|
||||
class TVShowNotFound(NotFoundError):
|
||||
"""Raised when a TV show is not found."""
|
||||
pass
|
||||
|
||||
|
||||
class SeasonNotFound(NotFoundError):
|
||||
"""Raised when a season is not found."""
|
||||
pass
|
||||
|
||||
|
||||
class EpisodeNotFound(NotFoundError):
|
||||
"""Raised when an episode is not found."""
|
||||
pass
|
||||
|
||||
|
||||
class InvalidEpisode(DomainException):
|
||||
"""Raised when episode data is invalid."""
|
||||
pass
|
||||
|
||||
|
||||
class TVShowAlreadyExists(DomainException):
|
||||
"""Raised when trying to add a TV show that already exists."""
|
||||
pass
|
||||
130
domain/tv_shows/repositories.py
Normal file
130
domain/tv_shows/repositories.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""TV Show repository interfaces (abstract)."""
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Optional
|
||||
|
||||
from ..shared.value_objects import ImdbId
|
||||
from .entities import TVShow, Season, Episode
|
||||
from .value_objects import SeasonNumber, EpisodeNumber
|
||||
|
||||
|
||||
class TVShowRepository(ABC):
|
||||
"""
|
||||
Abstract repository for TV show persistence.
|
||||
|
||||
This defines the interface that infrastructure implementations must follow.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, show: TVShow) -> None:
|
||||
"""
|
||||
Save a TV show to the repository.
|
||||
|
||||
Args:
|
||||
show: TVShow entity to save
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_imdb_id(self, imdb_id: ImdbId) -> Optional[TVShow]:
|
||||
"""
|
||||
Find a TV show by its IMDb ID.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID to search for
|
||||
|
||||
Returns:
|
||||
TVShow if found, None otherwise
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_all(self) -> List[TVShow]:
|
||||
"""
|
||||
Get all TV shows in the repository.
|
||||
|
||||
Returns:
|
||||
List of all TV shows
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete(self, imdb_id: ImdbId) -> bool:
|
||||
"""
|
||||
Delete a TV show from the repository.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID of the show to delete
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def exists(self, imdb_id: ImdbId) -> bool:
|
||||
"""
|
||||
Check if a TV show exists in the repository.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID to check
|
||||
|
||||
Returns:
|
||||
True if exists, False otherwise
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class SeasonRepository(ABC):
|
||||
"""Abstract repository for season persistence."""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, season: Season) -> None:
|
||||
"""Save a season."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_show_and_number(
|
||||
self,
|
||||
show_imdb_id: ImdbId,
|
||||
season_number: SeasonNumber
|
||||
) -> Optional[Season]:
|
||||
"""Find a season by show and season number."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_all_by_show(self, show_imdb_id: ImdbId) -> List[Season]:
|
||||
"""Get all seasons for a show."""
|
||||
pass
|
||||
|
||||
|
||||
class EpisodeRepository(ABC):
|
||||
"""Abstract repository for episode persistence."""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, episode: Episode) -> None:
|
||||
"""Save an episode."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_show_season_episode(
|
||||
self,
|
||||
show_imdb_id: ImdbId,
|
||||
season_number: SeasonNumber,
|
||||
episode_number: EpisodeNumber
|
||||
) -> Optional[Episode]:
|
||||
"""Find an episode by show, season, and episode number."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_all_by_season(
|
||||
self,
|
||||
show_imdb_id: ImdbId,
|
||||
season_number: SeasonNumber
|
||||
) -> List[Episode]:
|
||||
"""Get all episodes for a season."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_all_by_show(self, show_imdb_id: ImdbId) -> List[Episode]:
|
||||
"""Get all episodes for a show."""
|
||||
pass
|
||||
228
domain/tv_shows/services.py
Normal file
228
domain/tv_shows/services.py
Normal file
@@ -0,0 +1,228 @@
|
||||
"""TV Show domain services - Business logic."""
|
||||
import logging
|
||||
from typing import Optional, List
|
||||
import re
|
||||
|
||||
from ..shared.value_objects import ImdbId
|
||||
from .entities import TVShow, Season, Episode
|
||||
from .value_objects import SeasonNumber, EpisodeNumber
|
||||
from .repositories import TVShowRepository, SeasonRepository, EpisodeRepository
|
||||
from .exceptions import TVShowNotFound, TVShowAlreadyExists, SeasonNotFound, EpisodeNotFound
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TVShowService:
|
||||
"""
|
||||
Domain service for TV show-related business logic.
|
||||
|
||||
This service contains business rules that don't naturally fit
|
||||
within a single entity.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
show_repository: TVShowRepository,
|
||||
season_repository: Optional[SeasonRepository] = None,
|
||||
episode_repository: Optional[EpisodeRepository] = None
|
||||
):
|
||||
"""
|
||||
Initialize TV show service.
|
||||
|
||||
Args:
|
||||
show_repository: TV show repository for persistence
|
||||
season_repository: Optional season repository
|
||||
episode_repository: Optional episode repository
|
||||
"""
|
||||
self.show_repository = show_repository
|
||||
self.season_repository = season_repository
|
||||
self.episode_repository = episode_repository
|
||||
|
||||
def track_show(self, show: TVShow) -> None:
|
||||
"""
|
||||
Start tracking a TV show.
|
||||
|
||||
Args:
|
||||
show: TVShow entity to track
|
||||
|
||||
Raises:
|
||||
TVShowAlreadyExists: If show is already being tracked
|
||||
"""
|
||||
if self.show_repository.exists(show.imdb_id):
|
||||
raise TVShowAlreadyExists(f"TV show with IMDb ID {show.imdb_id} is already tracked")
|
||||
|
||||
self.show_repository.save(show)
|
||||
logger.info(f"Started tracking TV show: {show.title} ({show.imdb_id})")
|
||||
|
||||
def get_show(self, imdb_id: ImdbId) -> TVShow:
|
||||
"""
|
||||
Get a TV show by IMDb ID.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID of the show
|
||||
|
||||
Returns:
|
||||
TVShow entity
|
||||
|
||||
Raises:
|
||||
TVShowNotFound: If show not found
|
||||
"""
|
||||
show = self.show_repository.find_by_imdb_id(imdb_id)
|
||||
if not show:
|
||||
raise TVShowNotFound(f"TV show with IMDb ID {imdb_id} not found")
|
||||
return show
|
||||
|
||||
def get_all_shows(self) -> List[TVShow]:
|
||||
"""
|
||||
Get all tracked TV shows.
|
||||
|
||||
Returns:
|
||||
List of all TV shows
|
||||
"""
|
||||
return self.show_repository.find_all()
|
||||
|
||||
def get_ongoing_shows(self) -> List[TVShow]:
|
||||
"""
|
||||
Get all ongoing TV shows.
|
||||
|
||||
Returns:
|
||||
List of ongoing TV shows
|
||||
"""
|
||||
all_shows = self.show_repository.find_all()
|
||||
return [show for show in all_shows if show.is_ongoing()]
|
||||
|
||||
def get_ended_shows(self) -> List[TVShow]:
|
||||
"""
|
||||
Get all ended TV shows.
|
||||
|
||||
Returns:
|
||||
List of ended TV shows
|
||||
"""
|
||||
all_shows = self.show_repository.find_all()
|
||||
return [show for show in all_shows if show.is_ended()]
|
||||
|
||||
def update_show(self, show: TVShow) -> None:
|
||||
"""
|
||||
Update an existing TV show.
|
||||
|
||||
Args:
|
||||
show: TVShow entity with updated data
|
||||
|
||||
Raises:
|
||||
TVShowNotFound: If show doesn't exist
|
||||
"""
|
||||
if not self.show_repository.exists(show.imdb_id):
|
||||
raise TVShowNotFound(f"TV show with IMDb ID {show.imdb_id} not found")
|
||||
|
||||
self.show_repository.save(show)
|
||||
logger.info(f"Updated TV show: {show.title} ({show.imdb_id})")
|
||||
|
||||
def untrack_show(self, imdb_id: ImdbId) -> None:
|
||||
"""
|
||||
Stop tracking a TV show.
|
||||
|
||||
Args:
|
||||
imdb_id: IMDb ID of the show to untrack
|
||||
|
||||
Raises:
|
||||
TVShowNotFound: If show not found
|
||||
"""
|
||||
if not self.show_repository.delete(imdb_id):
|
||||
raise TVShowNotFound(f"TV show with IMDb ID {imdb_id} not found")
|
||||
|
||||
logger.info(f"Stopped tracking TV show with IMDb ID: {imdb_id}")
|
||||
|
||||
def parse_episode_from_filename(self, filename: str) -> Optional[tuple[int, int]]:
|
||||
"""
|
||||
Parse season and episode numbers from filename.
|
||||
|
||||
Supports formats:
|
||||
- S01E05
|
||||
- 1x05
|
||||
- Season 1 Episode 5
|
||||
|
||||
Args:
|
||||
filename: Filename to parse
|
||||
|
||||
Returns:
|
||||
Tuple of (season, episode) if found, None otherwise
|
||||
"""
|
||||
filename_lower = filename.lower()
|
||||
|
||||
# Pattern 1: S01E05
|
||||
pattern1 = r's(\d{1,2})e(\d{1,2})'
|
||||
match = re.search(pattern1, filename_lower)
|
||||
if match:
|
||||
return (int(match.group(1)), int(match.group(2)))
|
||||
|
||||
# Pattern 2: 1x05
|
||||
pattern2 = r'(\d{1,2})x(\d{1,2})'
|
||||
match = re.search(pattern2, filename_lower)
|
||||
if match:
|
||||
return (int(match.group(1)), int(match.group(2)))
|
||||
|
||||
# Pattern 3: Season 1 Episode 5
|
||||
pattern3 = r'season\s*(\d{1,2})\s*episode\s*(\d{1,2})'
|
||||
match = re.search(pattern3, filename_lower)
|
||||
if match:
|
||||
return (int(match.group(1)), int(match.group(2)))
|
||||
|
||||
return None
|
||||
|
||||
def validate_episode_file(self, filename: str) -> bool:
|
||||
"""
|
||||
Validate that a file is a valid episode file.
|
||||
|
||||
Args:
|
||||
filename: Filename to validate
|
||||
|
||||
Returns:
|
||||
True if valid episode file, False otherwise
|
||||
"""
|
||||
# Check file extension
|
||||
valid_extensions = {'.mkv', '.mp4', '.avi', '.mov', '.wmv', '.flv', '.webm'}
|
||||
extension = filename[filename.rfind('.'):].lower() if '.' in filename else ''
|
||||
|
||||
if extension not in valid_extensions:
|
||||
logger.warning(f"Invalid file extension: {extension}")
|
||||
return False
|
||||
|
||||
# Check if we can parse episode info
|
||||
episode_info = self.parse_episode_from_filename(filename)
|
||||
if not episode_info:
|
||||
logger.warning(f"Could not parse episode info from filename: {filename}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def find_next_episode(self, show: TVShow, last_season: int, last_episode: int) -> Optional[tuple[int, int]]:
|
||||
"""
|
||||
Find the next episode to download for a show.
|
||||
|
||||
Args:
|
||||
show: TVShow entity
|
||||
last_season: Last downloaded season number
|
||||
last_episode: Last downloaded episode number
|
||||
|
||||
Returns:
|
||||
Tuple of (season, episode) for next episode, or None if show is complete
|
||||
"""
|
||||
# If show has ended and we've watched all seasons, no next episode
|
||||
if show.is_ended() and last_season >= show.seasons_count:
|
||||
return None
|
||||
|
||||
# Simple logic: next episode in same season, or first episode of next season
|
||||
# This could be enhanced with actual episode counts per season
|
||||
next_episode = last_episode + 1
|
||||
next_season = last_season
|
||||
|
||||
# Assume max 50 episodes per season (could be improved with actual data)
|
||||
if next_episode > 50:
|
||||
next_season += 1
|
||||
next_episode = 1
|
||||
|
||||
# Don't go beyond known seasons
|
||||
if next_season > show.seasons_count:
|
||||
return None
|
||||
|
||||
return (next_season, next_episode)
|
||||
96
domain/tv_shows/value_objects.py
Normal file
96
domain/tv_shows/value_objects.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""TV Show domain value objects."""
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
from ..shared.exceptions import ValidationError
|
||||
|
||||
|
||||
class ShowStatus(Enum):
|
||||
"""Status of a TV show - whether it's still airing or has ended."""
|
||||
ONGOING = "ongoing"
|
||||
ENDED = "ended"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, status_str: str) -> "ShowStatus":
|
||||
"""
|
||||
Parse status from string.
|
||||
|
||||
Args:
|
||||
status_str: Status string (e.g., "ongoing", "ended")
|
||||
|
||||
Returns:
|
||||
ShowStatus enum value
|
||||
"""
|
||||
status_map = {
|
||||
"ongoing": cls.ONGOING,
|
||||
"ended": cls.ENDED,
|
||||
}
|
||||
return status_map.get(status_str.lower(), cls.UNKNOWN)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SeasonNumber:
|
||||
"""
|
||||
Value object representing a season number.
|
||||
|
||||
Validates that the season number is valid (>= 0).
|
||||
Season 0 is used for specials.
|
||||
"""
|
||||
value: int
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate season number."""
|
||||
if not isinstance(self.value, int):
|
||||
raise ValidationError(f"Season number must be an integer, got {type(self.value)}")
|
||||
|
||||
if self.value < 0:
|
||||
raise ValidationError(f"Season number cannot be negative: {self.value}")
|
||||
|
||||
# Reasonable upper limit
|
||||
if self.value > 100:
|
||||
raise ValidationError(f"Season number too high: {self.value}")
|
||||
|
||||
def is_special(self) -> bool:
|
||||
"""Check if this is the specials season (season 0)."""
|
||||
return self.value == 0
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.value)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"SeasonNumber({self.value})"
|
||||
|
||||
def __int__(self) -> int:
|
||||
return self.value
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EpisodeNumber:
|
||||
"""
|
||||
Value object representing an episode number.
|
||||
|
||||
Validates that the episode number is valid (>= 1).
|
||||
"""
|
||||
value: int
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate episode number."""
|
||||
if not isinstance(self.value, int):
|
||||
raise ValidationError(f"Episode number must be an integer, got {type(self.value)}")
|
||||
|
||||
if self.value < 1:
|
||||
raise ValidationError(f"Episode number must be >= 1, got {self.value}")
|
||||
|
||||
# Reasonable upper limit
|
||||
if self.value > 1000:
|
||||
raise ValidationError(f"Episode number too high: {self.value}")
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.value)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"EpisodeNumber({self.value})"
|
||||
|
||||
def __int__(self) -> int:
|
||||
return self.value
|
||||
1
infrastructure/__init__.py
Normal file
1
infrastructure/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Infrastructure layer - External services, persistence, and technical concerns."""
|
||||
1
infrastructure/api/__init__.py
Normal file
1
infrastructure/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""API clients for external services."""
|
||||
22
infrastructure/api/knaben/__init__.py
Normal file
22
infrastructure/api/knaben/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Knaben API client."""
|
||||
from .client import KnabenClient
|
||||
from .dto import TorrentResult
|
||||
from .exceptions import (
|
||||
KnabenError,
|
||||
KnabenConfigurationError,
|
||||
KnabenAPIError,
|
||||
KnabenNotFoundError,
|
||||
)
|
||||
|
||||
# Global Knaben client instance (singleton)
|
||||
knaben_client = KnabenClient()
|
||||
|
||||
__all__ = [
|
||||
"KnabenClient",
|
||||
"TorrentResult",
|
||||
"KnabenError",
|
||||
"KnabenConfigurationError",
|
||||
"KnabenAPIError",
|
||||
"KnabenNotFoundError",
|
||||
"knaben_client",
|
||||
]
|
||||
@@ -1,49 +1,16 @@
|
||||
"""Knaben torrent search API client."""
|
||||
from typing import Dict, Any, Optional, List
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import requests
|
||||
from requests.exceptions import RequestException, Timeout, HTTPError
|
||||
|
||||
from ..config import Settings, settings
|
||||
from agent.config import Settings, settings
|
||||
from .dto import TorrentResult
|
||||
from .exceptions import KnabenError, KnabenAPIError, KnabenNotFoundError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KnabenError(Exception):
|
||||
"""Base exception for Knaben-related errors."""
|
||||
pass
|
||||
|
||||
|
||||
class KnabenConfigurationError(KnabenError):
|
||||
"""Raised when Knaben API is not properly configured."""
|
||||
pass
|
||||
|
||||
|
||||
class KnabenAPIError(KnabenError):
|
||||
"""Raised when Knaben API returns an error."""
|
||||
pass
|
||||
|
||||
|
||||
class KnabenNotFoundError(KnabenError):
|
||||
"""Raised when no torrents are found."""
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class TorrentResult:
|
||||
"""Represents a torrent search result from Knaben."""
|
||||
title: str
|
||||
size: str
|
||||
seeders: int
|
||||
leechers: int
|
||||
magnet: str
|
||||
info_hash: Optional[str] = None
|
||||
tracker: Optional[str] = None
|
||||
upload_date: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
|
||||
|
||||
class KnabenClient:
|
||||
"""
|
||||
Client for interacting with Knaben torrent search API.
|
||||
@@ -89,7 +56,6 @@ class KnabenClient:
|
||||
Make a request to Knaben API.
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint (e.g., '/search')
|
||||
params: Query parameters
|
||||
|
||||
Returns:
|
||||
@@ -134,14 +100,13 @@ class KnabenClient:
|
||||
|
||||
Args:
|
||||
query: Search query (e.g., "Inception 1080p")
|
||||
limit: Maximum number of results (default: 50)
|
||||
limit: Maximum number of results (default: 10)
|
||||
|
||||
Returns:
|
||||
List of TorrentResult objects
|
||||
|
||||
Raises:
|
||||
KnabenAPIError: If request fails
|
||||
KnabenNotFoundError: If no results found
|
||||
ValueError: If query is invalid
|
||||
"""
|
||||
if not query or not isinstance(query, str):
|
||||
@@ -164,8 +129,7 @@ class KnabenClient:
|
||||
|
||||
try:
|
||||
data = self._make_request(params)
|
||||
except KnabenNotFoundError as e:
|
||||
# No results found
|
||||
except KnabenNotFoundError:
|
||||
logger.info(f"No torrents found for '{query}'")
|
||||
return []
|
||||
except Exception as e:
|
||||
@@ -225,6 +189,3 @@ class KnabenClient:
|
||||
upload_date=upload_date,
|
||||
category=category
|
||||
)
|
||||
|
||||
# Global Knaben client instance (singleton)
|
||||
knaben_client = KnabenClient()
|
||||
17
infrastructure/api/knaben/dto.py
Normal file
17
infrastructure/api/knaben/dto.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Knaben Data Transfer Objects."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class TorrentResult:
|
||||
"""Represents a torrent search result from Knaben."""
|
||||
title: str
|
||||
size: str
|
||||
seeders: int
|
||||
leechers: int
|
||||
magnet: str
|
||||
info_hash: Optional[str] = None
|
||||
tracker: Optional[str] = None
|
||||
upload_date: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
21
infrastructure/api/knaben/exceptions.py
Normal file
21
infrastructure/api/knaben/exceptions.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Knaben API exceptions."""
|
||||
|
||||
|
||||
class KnabenError(Exception):
|
||||
"""Base exception for Knaben-related errors."""
|
||||
pass
|
||||
|
||||
|
||||
class KnabenConfigurationError(KnabenError):
|
||||
"""Raised when Knaben API is not properly configured."""
|
||||
pass
|
||||
|
||||
|
||||
class KnabenAPIError(KnabenError):
|
||||
"""Raised when Knaben API returns an error."""
|
||||
pass
|
||||
|
||||
|
||||
class KnabenNotFoundError(KnabenError):
|
||||
"""Raised when no torrents are found."""
|
||||
pass
|
||||
22
infrastructure/api/qbittorrent/__init__.py
Normal file
22
infrastructure/api/qbittorrent/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""qBittorrent API client."""
|
||||
from .client import QBittorrentClient
|
||||
from .dto import TorrentInfo
|
||||
from .exceptions import (
|
||||
QBittorrentError,
|
||||
QBittorrentConfigurationError,
|
||||
QBittorrentAPIError,
|
||||
QBittorrentAuthError,
|
||||
)
|
||||
|
||||
# Global qBittorrent client instance (singleton)
|
||||
qbittorrent_client = QBittorrentClient()
|
||||
|
||||
__all__ = [
|
||||
"QBittorrentClient",
|
||||
"TorrentInfo",
|
||||
"QBittorrentError",
|
||||
"QBittorrentConfigurationError",
|
||||
"QBittorrentAPIError",
|
||||
"QBittorrentAuthError",
|
||||
"qbittorrent_client",
|
||||
]
|
||||
@@ -1,53 +1,16 @@
|
||||
"""qBittorrent Web API client."""
|
||||
from typing import Dict, Any, Optional, List
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import requests
|
||||
from requests.exceptions import RequestException, Timeout, HTTPError
|
||||
|
||||
from ..config import Settings, settings
|
||||
from agent.config import Settings, settings
|
||||
from .dto import TorrentInfo
|
||||
from .exceptions import QBittorrentError, QBittorrentAPIError, QBittorrentAuthError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class QBittorrentError(Exception):
|
||||
"""Base exception for qBittorrent-related errors."""
|
||||
pass
|
||||
|
||||
|
||||
class QBittorrentConfigurationError(QBittorrentError):
|
||||
"""Raised when qBittorrent is not properly configured."""
|
||||
pass
|
||||
|
||||
|
||||
class QBittorrentAPIError(QBittorrentError):
|
||||
"""Raised when qBittorrent API returns an error."""
|
||||
pass
|
||||
|
||||
|
||||
class QBittorrentAuthError(QBittorrentError):
|
||||
"""Raised when authentication fails."""
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class TorrentInfo:
|
||||
"""Represents a torrent in qBittorrent."""
|
||||
hash: str
|
||||
name: str
|
||||
size: int
|
||||
progress: float
|
||||
state: str
|
||||
download_speed: int
|
||||
upload_speed: int
|
||||
eta: int
|
||||
num_seeds: int
|
||||
num_leechs: int
|
||||
ratio: float
|
||||
category: Optional[str] = None
|
||||
save_path: Optional[str] = None
|
||||
|
||||
|
||||
class QBittorrentClient:
|
||||
"""
|
||||
Client for interacting with qBittorrent Web API.
|
||||
@@ -423,7 +386,3 @@ class QBittorrentClient:
|
||||
category=torrent.get("category"),
|
||||
save_path=torrent.get("save_path")
|
||||
)
|
||||
|
||||
|
||||
# Global qBittorrent client instance (singleton)
|
||||
qbittorrent_client = QBittorrentClient()
|
||||
21
infrastructure/api/qbittorrent/dto.py
Normal file
21
infrastructure/api/qbittorrent/dto.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""qBittorrent Data Transfer Objects."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class TorrentInfo:
|
||||
"""Represents a torrent in qBittorrent."""
|
||||
hash: str
|
||||
name: str
|
||||
size: int
|
||||
progress: float
|
||||
state: str
|
||||
download_speed: int
|
||||
upload_speed: int
|
||||
eta: int
|
||||
num_seeds: int
|
||||
num_leechs: int
|
||||
ratio: float
|
||||
category: Optional[str] = None
|
||||
save_path: Optional[str] = None
|
||||
21
infrastructure/api/qbittorrent/exceptions.py
Normal file
21
infrastructure/api/qbittorrent/exceptions.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""qBittorrent API exceptions."""
|
||||
|
||||
|
||||
class QBittorrentError(Exception):
|
||||
"""Base exception for qBittorrent-related errors."""
|
||||
pass
|
||||
|
||||
|
||||
class QBittorrentConfigurationError(QBittorrentError):
|
||||
"""Raised when qBittorrent is not properly configured."""
|
||||
pass
|
||||
|
||||
|
||||
class QBittorrentAPIError(QBittorrentError):
|
||||
"""Raised when qBittorrent API returns an error."""
|
||||
pass
|
||||
|
||||
|
||||
class QBittorrentAuthError(QBittorrentError):
|
||||
"""Raised when authentication fails."""
|
||||
pass
|
||||
23
infrastructure/api/tmdb/__init__.py
Normal file
23
infrastructure/api/tmdb/__init__.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""TMDB API client."""
|
||||
from .client import TMDBClient
|
||||
from .dto import MediaResult, ExternalIds
|
||||
from .exceptions import (
|
||||
TMDBError,
|
||||
TMDBConfigurationError,
|
||||
TMDBAPIError,
|
||||
TMDBNotFoundError,
|
||||
)
|
||||
|
||||
# Global TMDB client instance (singleton)
|
||||
tmdb_client = TMDBClient()
|
||||
|
||||
__all__ = [
|
||||
"TMDBClient",
|
||||
"MediaResult",
|
||||
"ExternalIds",
|
||||
"TMDBError",
|
||||
"TMDBConfigurationError",
|
||||
"TMDBAPIError",
|
||||
"TMDBNotFoundError",
|
||||
"tmdb_client",
|
||||
]
|
||||
@@ -1,48 +1,16 @@
|
||||
"""TMDB (The Movie Database) API client."""
|
||||
from typing import Dict, Any, Optional, List
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import requests
|
||||
from requests.exceptions import RequestException, Timeout, HTTPError
|
||||
|
||||
from ..config import Settings, settings
|
||||
from agent.config import Settings, settings
|
||||
from .dto import MediaResult
|
||||
from .exceptions import TMDBError, TMDBConfigurationError, TMDBAPIError, TMDBNotFoundError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TMDBError(Exception):
|
||||
"""Base exception for TMDB-related errors."""
|
||||
pass
|
||||
|
||||
|
||||
class TMDBConfigurationError(TMDBError):
|
||||
"""Raised when TMDB API is not properly configured."""
|
||||
pass
|
||||
|
||||
|
||||
class TMDBAPIError(TMDBError):
|
||||
"""Raised when TMDB API returns an error."""
|
||||
pass
|
||||
|
||||
|
||||
class TMDBNotFoundError(TMDBError):
|
||||
"""Raised when media is not found."""
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class MediaResult:
|
||||
"""Represents a media search result from TMDB."""
|
||||
tmdb_id: int
|
||||
title: str
|
||||
media_type: str # 'movie' or 'tv'
|
||||
imdb_id: Optional[str] = None
|
||||
overview: Optional[str] = None
|
||||
release_date: Optional[str] = None
|
||||
poster_path: Optional[str] = None
|
||||
vote_average: Optional[float] = None
|
||||
|
||||
|
||||
class TMDBClient:
|
||||
"""
|
||||
Client for interacting with The Movie Database (TMDB) API.
|
||||
@@ -311,7 +279,3 @@ class TMDBClient:
|
||||
True if configured, False otherwise
|
||||
"""
|
||||
return bool(self.api_key and self.base_url)
|
||||
|
||||
|
||||
# Global TMDB client instance (singleton)
|
||||
tmdb_client = TMDBClient()
|
||||
26
infrastructure/api/tmdb/dto.py
Normal file
26
infrastructure/api/tmdb/dto.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""TMDB Data Transfer Objects."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class MediaResult:
|
||||
"""Represents a media search result from TMDB."""
|
||||
tmdb_id: int
|
||||
title: str
|
||||
media_type: str # 'movie' or 'tv'
|
||||
imdb_id: Optional[str] = None
|
||||
overview: Optional[str] = None
|
||||
release_date: Optional[str] = None
|
||||
poster_path: Optional[str] = None
|
||||
vote_average: Optional[float] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExternalIds:
|
||||
"""External IDs for a media item."""
|
||||
imdb_id: Optional[str] = None
|
||||
tvdb_id: Optional[int] = None
|
||||
facebook_id: Optional[str] = None
|
||||
instagram_id: Optional[str] = None
|
||||
twitter_id: Optional[str] = None
|
||||
21
infrastructure/api/tmdb/exceptions.py
Normal file
21
infrastructure/api/tmdb/exceptions.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""TMDB API exceptions."""
|
||||
|
||||
|
||||
class TMDBError(Exception):
|
||||
"""Base exception for TMDB-related errors."""
|
||||
pass
|
||||
|
||||
|
||||
class TMDBConfigurationError(TMDBError):
|
||||
"""Raised when TMDB API is not properly configured."""
|
||||
pass
|
||||
|
||||
|
||||
class TMDBAPIError(TMDBError):
|
||||
"""Raised when TMDB API returns an error."""
|
||||
pass
|
||||
|
||||
|
||||
class TMDBNotFoundError(TMDBError):
|
||||
"""Raised when media is not found."""
|
||||
pass
|
||||
11
infrastructure/filesystem/__init__.py
Normal file
11
infrastructure/filesystem/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Filesystem operations."""
|
||||
from .file_manager import FileManager
|
||||
from .organizer import MediaOrganizer
|
||||
from .exceptions import FilesystemError, PathTraversalError
|
||||
|
||||
__all__ = [
|
||||
"FileManager",
|
||||
"MediaOrganizer",
|
||||
"FilesystemError",
|
||||
"PathTraversalError",
|
||||
]
|
||||
21
infrastructure/filesystem/exceptions.py
Normal file
21
infrastructure/filesystem/exceptions.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Filesystem exceptions."""
|
||||
|
||||
|
||||
class FilesystemError(Exception):
|
||||
"""Base exception for filesystem operations."""
|
||||
pass
|
||||
|
||||
|
||||
class PathTraversalError(FilesystemError):
|
||||
"""Raised when path traversal attack is detected."""
|
||||
pass
|
||||
|
||||
|
||||
class FileNotFoundError(FilesystemError):
|
||||
"""Raised when a file is not found."""
|
||||
pass
|
||||
|
||||
|
||||
class PermissionDeniedError(FilesystemError):
|
||||
"""Raised when permission is denied."""
|
||||
pass
|
||||
309
infrastructure/filesystem/file_manager.py
Normal file
309
infrastructure/filesystem/file_manager.py
Normal file
@@ -0,0 +1,309 @@
|
||||
"""File manager - Migrated from agent/tools/filesystem.py with domain logic extracted."""
|
||||
from typing import Dict, Any, List
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from .exceptions import FilesystemError, PathTraversalError
|
||||
from infrastructure.persistence.memory import Memory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FolderName(Enum):
|
||||
"""Types of folders that can be managed."""
|
||||
DOWNLOAD = "download"
|
||||
TVSHOW = "tvshow"
|
||||
MOVIE = "movie"
|
||||
TORRENT = "torrent"
|
||||
|
||||
|
||||
class FileManager:
|
||||
"""
|
||||
File manager for filesystem operations.
|
||||
|
||||
Handles folder configuration, listing, and file operations with security.
|
||||
"""
|
||||
|
||||
def __init__(self, memory: Memory):
|
||||
"""
|
||||
Initialize file manager.
|
||||
|
||||
Args:
|
||||
memory: Memory instance for folder configuration
|
||||
"""
|
||||
self.memory = memory
|
||||
|
||||
def set_folder_path(self, folder_name: str, path_value: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Set a folder path in the configuration with validation.
|
||||
|
||||
Args:
|
||||
folder_name: Name of folder to set (download, tvshow, movie, torrent)
|
||||
path_value: Absolute path to the folder
|
||||
|
||||
Returns:
|
||||
Dict with status or error information
|
||||
"""
|
||||
try:
|
||||
# Validate folder name
|
||||
self._validate_folder_name(folder_name)
|
||||
|
||||
# Convert to Path object for better handling
|
||||
path_obj = Path(path_value).resolve()
|
||||
|
||||
# Validate path exists and is a directory
|
||||
if not path_obj.exists():
|
||||
logger.warning(f"Path does not exist: {path_value}")
|
||||
return {
|
||||
"error": "invalid_path",
|
||||
"message": f"Path does not exist: {path_value}"
|
||||
}
|
||||
|
||||
if not path_obj.is_dir():
|
||||
logger.warning(f"Path is not a directory: {path_value}")
|
||||
return {
|
||||
"error": "invalid_path",
|
||||
"message": f"Path is not a directory: {path_value}"
|
||||
}
|
||||
|
||||
# Check if path is readable
|
||||
if not os.access(path_obj, os.R_OK):
|
||||
logger.warning(f"Path is not readable: {path_value}")
|
||||
return {
|
||||
"error": "permission_denied",
|
||||
"message": f"Path is not readable: {path_value}"
|
||||
}
|
||||
|
||||
# Store in memory
|
||||
config = self.memory.get("config", {})
|
||||
config[f"{folder_name}_folder"] = str(path_obj)
|
||||
self.memory.set("config", config)
|
||||
|
||||
logger.info(f"Set {folder_name}_folder to: {path_obj}")
|
||||
return {
|
||||
"status": "ok",
|
||||
"folder_name": folder_name,
|
||||
"path": str(path_obj)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return {"error": "validation_failed", "message": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error setting path: {e}", exc_info=True)
|
||||
return {"error": "internal_error", "message": "Failed to set path"}
|
||||
|
||||
def list_folder(self, folder_type: str, path: str = ".") -> Dict[str, Any]:
|
||||
"""
|
||||
List contents of a folder with security checks.
|
||||
|
||||
Args:
|
||||
folder_type: Type of folder to list (download, tvshow, movie, torrent)
|
||||
path: Relative path within the folder (default: ".")
|
||||
|
||||
Returns:
|
||||
Dict with folder contents or error information
|
||||
"""
|
||||
try:
|
||||
# Validate folder type
|
||||
self._validate_folder_name(folder_type)
|
||||
|
||||
# Sanitize the path
|
||||
safe_path = self._sanitize_path(path)
|
||||
|
||||
# Get root folder from config
|
||||
folder_key = f"{folder_type}_folder"
|
||||
config = self.memory.get("config", {})
|
||||
|
||||
if folder_key not in config or not config[folder_key]:
|
||||
logger.warning(f"Folder not configured: {folder_type}")
|
||||
return {
|
||||
"error": "folder_not_set",
|
||||
"message": f"{folder_type.capitalize()} folder not set in config."
|
||||
}
|
||||
|
||||
root = Path(config[folder_key])
|
||||
target = root / safe_path
|
||||
|
||||
# Security check: ensure target is within root
|
||||
if not self._is_safe_path(root, target):
|
||||
logger.warning(f"Path traversal attempt detected: {path}")
|
||||
return {
|
||||
"error": "forbidden",
|
||||
"message": "Access denied: path outside allowed directory"
|
||||
}
|
||||
|
||||
# Check if target exists
|
||||
if not target.exists():
|
||||
logger.warning(f"Path does not exist: {target}")
|
||||
return {
|
||||
"error": "not_found",
|
||||
"message": f"Path does not exist: {safe_path}"
|
||||
}
|
||||
|
||||
# Check if target is a directory
|
||||
if not target.is_dir():
|
||||
logger.warning(f"Path is not a directory: {target}")
|
||||
return {
|
||||
"error": "not_a_directory",
|
||||
"message": f"Path is not a directory: {safe_path}"
|
||||
}
|
||||
|
||||
# List directory contents
|
||||
try:
|
||||
entries = [entry.name for entry in target.iterdir()]
|
||||
logger.debug(f"Listed {len(entries)} entries in {target}")
|
||||
return {
|
||||
"status": "ok",
|
||||
"folder_type": folder_type,
|
||||
"path": safe_path,
|
||||
"entries": sorted(entries),
|
||||
"count": len(entries)
|
||||
}
|
||||
except PermissionError:
|
||||
logger.warning(f"Permission denied accessing: {target}")
|
||||
return {
|
||||
"error": "permission_denied",
|
||||
"message": f"Permission denied accessing: {safe_path}"
|
||||
}
|
||||
|
||||
except PathTraversalError as e:
|
||||
logger.warning(f"Path traversal attempt: {e}")
|
||||
return {
|
||||
"error": "forbidden",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}")
|
||||
return {"error": "validation_failed", "message": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error listing folder: {e}", exc_info=True)
|
||||
return {"error": "internal_error", "message": "Failed to list folder"}
|
||||
|
||||
def move_file(self, source: str, destination: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Move a file from one location to another with safety checks.
|
||||
|
||||
Args:
|
||||
source: Source file path
|
||||
destination: Destination file path
|
||||
|
||||
Returns:
|
||||
Dict with status or error information
|
||||
"""
|
||||
try:
|
||||
# Convert to Path objects
|
||||
source_path = Path(source).resolve()
|
||||
dest_path = Path(destination).resolve()
|
||||
|
||||
logger.info(f"Moving file from {source_path} to {dest_path}")
|
||||
|
||||
# Validate source
|
||||
if not source_path.exists():
|
||||
return {
|
||||
"error": "source_not_found",
|
||||
"message": f"Source file does not exist: {source}"
|
||||
}
|
||||
|
||||
if not source_path.is_file():
|
||||
return {
|
||||
"error": "source_not_file",
|
||||
"message": f"Source is not a file: {source}"
|
||||
}
|
||||
|
||||
# Get source file size for verification
|
||||
source_size = source_path.stat().st_size
|
||||
|
||||
# Validate destination
|
||||
dest_parent = dest_path.parent
|
||||
if not dest_parent.exists():
|
||||
return {
|
||||
"error": "destination_dir_not_found",
|
||||
"message": f"Destination directory does not exist: {dest_parent}"
|
||||
}
|
||||
|
||||
if dest_path.exists():
|
||||
return {
|
||||
"error": "destination_exists",
|
||||
"message": f"Destination file already exists: {destination}"
|
||||
}
|
||||
|
||||
# Perform move
|
||||
shutil.move(str(source_path), str(dest_path))
|
||||
|
||||
# Verify
|
||||
if not dest_path.exists():
|
||||
return {
|
||||
"error": "move_verification_failed",
|
||||
"message": "File was not moved successfully"
|
||||
}
|
||||
|
||||
dest_size = dest_path.stat().st_size
|
||||
if dest_size != source_size:
|
||||
return {
|
||||
"error": "size_mismatch",
|
||||
"message": f"File size mismatch after move"
|
||||
}
|
||||
|
||||
logger.info(f"File successfully moved: {dest_path.name}")
|
||||
return {
|
||||
"status": "ok",
|
||||
"source": str(source_path),
|
||||
"destination": str(dest_path),
|
||||
"filename": dest_path.name,
|
||||
"size": dest_size
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error moving file: {e}", exc_info=True)
|
||||
return {
|
||||
"error": "move_failed",
|
||||
"message": str(e)
|
||||
}
|
||||
|
||||
def _validate_folder_name(self, folder_name: str) -> bool:
|
||||
"""Validate folder name against allowed values."""
|
||||
valid_names = [fn.value for fn in FolderName]
|
||||
if folder_name not in valid_names:
|
||||
raise ValueError(
|
||||
f"Invalid folder_name '{folder_name}'. Must be one of: {', '.join(valid_names)}"
|
||||
)
|
||||
return True
|
||||
|
||||
def _sanitize_path(self, path: str) -> str:
|
||||
"""Sanitize path to prevent path traversal attacks."""
|
||||
# Normalize path
|
||||
normalized = os.path.normpath(path)
|
||||
|
||||
# Check for absolute paths
|
||||
if os.path.isabs(normalized):
|
||||
raise PathTraversalError("Absolute paths are not allowed")
|
||||
|
||||
# Check for parent directory references
|
||||
if normalized.startswith("..") or "/.." in normalized or "\\.." in normalized:
|
||||
raise PathTraversalError("Parent directory references are not allowed")
|
||||
|
||||
# Check for null bytes
|
||||
if "\x00" in normalized:
|
||||
raise PathTraversalError("Null bytes in path are not allowed")
|
||||
|
||||
return normalized
|
||||
|
||||
def _is_safe_path(self, base_path: Path, target_path: Path) -> bool:
|
||||
"""Check if target path is within base path (prevents path traversal)."""
|
||||
try:
|
||||
# Resolve both paths to absolute paths
|
||||
base_resolved = base_path.resolve()
|
||||
target_resolved = target_path.resolve()
|
||||
|
||||
# Check if target is relative to base
|
||||
target_resolved.relative_to(base_resolved)
|
||||
return True
|
||||
except (ValueError, OSError):
|
||||
return False
|
||||
150
infrastructure/filesystem/organizer.py
Normal file
150
infrastructure/filesystem/organizer.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""Media organizer - Organizes movies and TV shows into proper folder structures."""
|
||||
from pathlib import Path
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from domain.movies.entities import Movie
|
||||
from domain.tv_shows.entities import TVShow, Episode
|
||||
from domain.shared.value_objects import FilePath
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MediaOrganizer:
|
||||
"""
|
||||
Organizes media files into proper folder structures.
|
||||
|
||||
This service knows how to organize movies and TV shows according to
|
||||
common media server conventions (Plex, Jellyfin, etc.).
|
||||
"""
|
||||
|
||||
def __init__(self, movie_folder: Path, tvshow_folder: Path):
|
||||
"""
|
||||
Initialize media organizer.
|
||||
|
||||
Args:
|
||||
movie_folder: Root folder for movies
|
||||
tvshow_folder: Root folder for TV shows
|
||||
"""
|
||||
self.movie_folder = movie_folder
|
||||
self.tvshow_folder = tvshow_folder
|
||||
|
||||
def get_movie_destination(self, movie: Movie, filename: str) -> Path:
|
||||
"""
|
||||
Get the destination path for a movie file.
|
||||
|
||||
Structure: /movies/Movie Title (Year)/Movie.Title.Year.Quality.ext
|
||||
|
||||
Args:
|
||||
movie: Movie entity
|
||||
filename: Original filename (to extract extension)
|
||||
|
||||
Returns:
|
||||
Full destination path
|
||||
"""
|
||||
# Create movie folder
|
||||
folder_name = movie.get_folder_name()
|
||||
movie_dir = self.movie_folder / folder_name
|
||||
|
||||
# Get extension from original filename
|
||||
extension = Path(filename).suffix
|
||||
|
||||
# Create new filename
|
||||
new_filename = movie.get_filename() + extension
|
||||
|
||||
return movie_dir / new_filename
|
||||
|
||||
def get_episode_destination(
|
||||
self,
|
||||
show: TVShow,
|
||||
episode: Episode,
|
||||
filename: str
|
||||
) -> Path:
|
||||
"""
|
||||
Get the destination path for a TV show episode file.
|
||||
|
||||
Structure: /tvshows/Show.Name/Season 01/S01E05.Episode.Title.ext
|
||||
|
||||
Args:
|
||||
show: TVShow entity
|
||||
episode: Episode entity
|
||||
filename: Original filename (to extract extension)
|
||||
|
||||
Returns:
|
||||
Full destination path
|
||||
"""
|
||||
# Create show folder
|
||||
show_folder_name = show.get_folder_name()
|
||||
show_dir = self.tvshow_folder / show_folder_name
|
||||
|
||||
# Create season folder
|
||||
from domain.tv_shows.entities import Season
|
||||
season = Season(
|
||||
show_imdb_id=show.imdb_id,
|
||||
season_number=episode.season_number,
|
||||
episode_count=0 # Not needed for folder name
|
||||
)
|
||||
season_folder_name = season.get_folder_name()
|
||||
season_dir = show_dir / season_folder_name
|
||||
|
||||
# Get extension from original filename
|
||||
extension = Path(filename).suffix
|
||||
|
||||
# Create new filename
|
||||
new_filename = episode.get_filename() + extension
|
||||
|
||||
return season_dir / new_filename
|
||||
|
||||
def create_movie_directory(self, movie: Movie) -> bool:
|
||||
"""
|
||||
Create the directory structure for a movie.
|
||||
|
||||
Args:
|
||||
movie: Movie entity
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
folder_name = movie.get_folder_name()
|
||||
movie_dir = self.movie_folder / folder_name
|
||||
|
||||
try:
|
||||
movie_dir.mkdir(parents=True, exist_ok=True)
|
||||
logger.info(f"Created movie directory: {movie_dir}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create movie directory: {e}")
|
||||
return False
|
||||
|
||||
def create_episode_directory(self, show: TVShow, season_number: int) -> bool:
|
||||
"""
|
||||
Create the directory structure for a TV show season.
|
||||
|
||||
Args:
|
||||
show: TVShow entity
|
||||
season_number: Season number
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
from domain.tv_shows.entities import Season
|
||||
from domain.tv_shows.value_objects import SeasonNumber
|
||||
|
||||
show_folder_name = show.get_folder_name()
|
||||
show_dir = self.tvshow_folder / show_folder_name
|
||||
|
||||
season = Season(
|
||||
show_imdb_id=show.imdb_id,
|
||||
season_number=SeasonNumber(season_number),
|
||||
episode_count=0
|
||||
)
|
||||
season_folder_name = season.get_folder_name()
|
||||
season_dir = show_dir / season_folder_name
|
||||
|
||||
try:
|
||||
season_dir.mkdir(parents=True, exist_ok=True)
|
||||
logger.info(f"Created season directory: {season_dir}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create season directory: {e}")
|
||||
return False
|
||||
1
infrastructure/persistence/__init__.py
Normal file
1
infrastructure/persistence/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Persistence layer - Data storage implementations."""
|
||||
10
infrastructure/persistence/json/__init__.py
Normal file
10
infrastructure/persistence/json/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""JSON-based repository implementations."""
|
||||
from .movie_repository import JsonMovieRepository
|
||||
from .tvshow_repository import JsonTVShowRepository
|
||||
from .subtitle_repository import JsonSubtitleRepository
|
||||
|
||||
__all__ = [
|
||||
"JsonMovieRepository",
|
||||
"JsonTVShowRepository",
|
||||
"JsonSubtitleRepository",
|
||||
]
|
||||
115
infrastructure/persistence/json/movie_repository.py
Normal file
115
infrastructure/persistence/json/movie_repository.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""JSON-based movie repository implementation."""
|
||||
from typing import List, Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
from domain.movies.repositories import MovieRepository
|
||||
from domain.movies.entities import Movie
|
||||
from domain.shared.value_objects import ImdbId
|
||||
from ..memory import Memory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JsonMovieRepository(MovieRepository):
|
||||
"""
|
||||
JSON-based implementation of MovieRepository.
|
||||
|
||||
Stores movies in the memory.json file.
|
||||
"""
|
||||
|
||||
def __init__(self, memory: Memory):
|
||||
"""
|
||||
Initialize repository.
|
||||
|
||||
Args:
|
||||
memory: Memory instance for persistence
|
||||
"""
|
||||
self.memory = memory
|
||||
|
||||
def save(self, movie: Movie) -> None:
|
||||
"""Save a movie to the repository."""
|
||||
movies = self._load_all()
|
||||
|
||||
# Remove existing movie with same IMDb ID
|
||||
movies = [m for m in movies if m.get('imdb_id') != str(movie.imdb_id)]
|
||||
|
||||
# Add new movie
|
||||
movies.append(self._to_dict(movie))
|
||||
|
||||
# Save to memory
|
||||
self.memory.set('movies', movies)
|
||||
logger.debug(f"Saved movie: {movie.imdb_id}")
|
||||
|
||||
def find_by_imdb_id(self, imdb_id: ImdbId) -> Optional[Movie]:
|
||||
"""Find a movie by its IMDb ID."""
|
||||
movies = self._load_all()
|
||||
|
||||
for movie_dict in movies:
|
||||
if movie_dict.get('imdb_id') == str(imdb_id):
|
||||
return self._from_dict(movie_dict)
|
||||
|
||||
return None
|
||||
|
||||
def find_all(self) -> List[Movie]:
|
||||
"""Get all movies in the repository."""
|
||||
movies_dict = self._load_all()
|
||||
return [self._from_dict(m) for m in movies_dict]
|
||||
|
||||
def delete(self, imdb_id: ImdbId) -> bool:
|
||||
"""Delete a movie from the repository."""
|
||||
movies = self._load_all()
|
||||
initial_count = len(movies)
|
||||
|
||||
# Filter out the movie
|
||||
movies = [m for m in movies if m.get('imdb_id') != str(imdb_id)]
|
||||
|
||||
if len(movies) < initial_count:
|
||||
self.memory.set('movies', movies)
|
||||
logger.debug(f"Deleted movie: {imdb_id}")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def exists(self, imdb_id: ImdbId) -> bool:
|
||||
"""Check if a movie exists in the repository."""
|
||||
return self.find_by_imdb_id(imdb_id) is not None
|
||||
|
||||
def _load_all(self) -> List[Dict[str, Any]]:
|
||||
"""Load all movies from memory."""
|
||||
return self.memory.get('movies', [])
|
||||
|
||||
def _to_dict(self, movie: Movie) -> Dict[str, Any]:
|
||||
"""Convert Movie entity to dict for storage."""
|
||||
return {
|
||||
'imdb_id': str(movie.imdb_id),
|
||||
'title': movie.title.value,
|
||||
'release_year': movie.release_year.value if movie.release_year else None,
|
||||
'quality': movie.quality.value,
|
||||
'file_path': str(movie.file_path) if movie.file_path else None,
|
||||
'file_size': movie.file_size.bytes if movie.file_size else None,
|
||||
'tmdb_id': movie.tmdb_id,
|
||||
'overview': movie.overview,
|
||||
'poster_path': movie.poster_path,
|
||||
'vote_average': movie.vote_average,
|
||||
'added_at': movie.added_at.isoformat(),
|
||||
}
|
||||
|
||||
def _from_dict(self, data: Dict[str, Any]) -> Movie:
|
||||
"""Convert dict from storage to Movie entity."""
|
||||
from domain.movies.value_objects import MovieTitle, ReleaseYear, Quality
|
||||
from domain.shared.value_objects import FilePath, FileSize
|
||||
from datetime import datetime
|
||||
|
||||
return Movie(
|
||||
imdb_id=ImdbId(data['imdb_id']),
|
||||
title=MovieTitle(data['title']),
|
||||
release_year=ReleaseYear(data['release_year']) if data.get('release_year') else None,
|
||||
quality=Quality(data.get('quality', 'unknown')),
|
||||
file_path=FilePath(data['file_path']) if data.get('file_path') else None,
|
||||
file_size=FileSize(data['file_size']) if data.get('file_size') else None,
|
||||
tmdb_id=data.get('tmdb_id'),
|
||||
overview=data.get('overview'),
|
||||
poster_path=data.get('poster_path'),
|
||||
vote_average=data.get('vote_average'),
|
||||
added_at=datetime.fromisoformat(data['added_at']) if data.get('added_at') else datetime.now(),
|
||||
)
|
||||
127
infrastructure/persistence/json/subtitle_repository.py
Normal file
127
infrastructure/persistence/json/subtitle_repository.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""JSON-based subtitle repository implementation."""
|
||||
from typing import List, Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
from domain.subtitles.repositories import SubtitleRepository
|
||||
from domain.subtitles.entities import Subtitle
|
||||
from domain.subtitles.value_objects import Language, SubtitleFormat, TimingOffset
|
||||
from domain.shared.value_objects import ImdbId, FilePath
|
||||
from ..memory import Memory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JsonSubtitleRepository(SubtitleRepository):
|
||||
"""
|
||||
JSON-based implementation of SubtitleRepository.
|
||||
|
||||
Stores subtitles in the memory.json file.
|
||||
"""
|
||||
|
||||
def __init__(self, memory: Memory):
|
||||
"""
|
||||
Initialize repository.
|
||||
|
||||
Args:
|
||||
memory: Memory instance for persistence
|
||||
"""
|
||||
self.memory = memory
|
||||
|
||||
def save(self, subtitle: Subtitle) -> None:
|
||||
"""Save a subtitle to the repository."""
|
||||
subtitles = self._load_all()
|
||||
|
||||
# Add new subtitle (we allow multiple subtitles for same media)
|
||||
subtitles.append(self._to_dict(subtitle))
|
||||
|
||||
# Save to memory
|
||||
self.memory.set('subtitles', subtitles)
|
||||
logger.debug(f"Saved subtitle for: {subtitle.media_imdb_id}")
|
||||
|
||||
def find_by_media(
|
||||
self,
|
||||
media_imdb_id: ImdbId,
|
||||
language: Optional[Language] = None,
|
||||
season: Optional[int] = None,
|
||||
episode: Optional[int] = None
|
||||
) -> List[Subtitle]:
|
||||
"""Find subtitles for a media item."""
|
||||
subtitles = self._load_all()
|
||||
results = []
|
||||
|
||||
for sub_dict in subtitles:
|
||||
# Filter by IMDb ID
|
||||
if sub_dict.get('media_imdb_id') != str(media_imdb_id):
|
||||
continue
|
||||
|
||||
# Filter by language if specified
|
||||
if language and sub_dict.get('language') != language.value:
|
||||
continue
|
||||
|
||||
# Filter by season/episode if specified
|
||||
if season is not None and sub_dict.get('season_number') != season:
|
||||
continue
|
||||
if episode is not None and sub_dict.get('episode_number') != episode:
|
||||
continue
|
||||
|
||||
results.append(self._from_dict(sub_dict))
|
||||
|
||||
return results
|
||||
|
||||
def delete(self, subtitle: Subtitle) -> bool:
|
||||
"""Delete a subtitle from the repository."""
|
||||
subtitles = self._load_all()
|
||||
initial_count = len(subtitles)
|
||||
|
||||
# Filter out the subtitle (match by file path)
|
||||
subtitles = [
|
||||
s for s in subtitles
|
||||
if s.get('file_path') != str(subtitle.file_path)
|
||||
]
|
||||
|
||||
if len(subtitles) < initial_count:
|
||||
self.memory.set('subtitles', subtitles)
|
||||
logger.debug(f"Deleted subtitle: {subtitle.file_path}")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _load_all(self) -> List[Dict[str, Any]]:
|
||||
"""Load all subtitles from memory."""
|
||||
return self.memory.get('subtitles', [])
|
||||
|
||||
def _to_dict(self, subtitle: Subtitle) -> Dict[str, Any]:
|
||||
"""Convert Subtitle entity to dict for storage."""
|
||||
return {
|
||||
'media_imdb_id': str(subtitle.media_imdb_id),
|
||||
'language': subtitle.language.value,
|
||||
'format': subtitle.format.value,
|
||||
'file_path': str(subtitle.file_path),
|
||||
'season_number': subtitle.season_number,
|
||||
'episode_number': subtitle.episode_number,
|
||||
'timing_offset': subtitle.timing_offset.milliseconds,
|
||||
'hearing_impaired': subtitle.hearing_impaired,
|
||||
'forced': subtitle.forced,
|
||||
'source': subtitle.source,
|
||||
'uploader': subtitle.uploader,
|
||||
'download_count': subtitle.download_count,
|
||||
'rating': subtitle.rating,
|
||||
}
|
||||
|
||||
def _from_dict(self, data: Dict[str, Any]) -> Subtitle:
|
||||
"""Convert dict from storage to Subtitle entity."""
|
||||
return Subtitle(
|
||||
media_imdb_id=ImdbId(data['media_imdb_id']),
|
||||
language=Language.from_code(data['language']),
|
||||
format=SubtitleFormat.from_extension(data['format']),
|
||||
file_path=FilePath(data['file_path']),
|
||||
season_number=data.get('season_number'),
|
||||
episode_number=data.get('episode_number'),
|
||||
timing_offset=TimingOffset(data.get('timing_offset', 0)),
|
||||
hearing_impaired=data.get('hearing_impaired', False),
|
||||
forced=data.get('forced', False),
|
||||
source=data.get('source'),
|
||||
uploader=data.get('uploader'),
|
||||
download_count=data.get('download_count'),
|
||||
rating=data.get('rating'),
|
||||
)
|
||||
112
infrastructure/persistence/json/tvshow_repository.py
Normal file
112
infrastructure/persistence/json/tvshow_repository.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""JSON-based TV show repository implementation."""
|
||||
from typing import List, Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
from domain.tv_shows.repositories import TVShowRepository
|
||||
from domain.tv_shows.entities import TVShow
|
||||
from domain.tv_shows.value_objects import ShowStatus
|
||||
from domain.shared.value_objects import ImdbId
|
||||
from ..memory import Memory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JsonTVShowRepository(TVShowRepository):
|
||||
"""
|
||||
JSON-based implementation of TVShowRepository.
|
||||
|
||||
Stores TV shows in the memory.json file (compatible with existing tv_shows structure).
|
||||
"""
|
||||
|
||||
def __init__(self, memory: Memory):
|
||||
"""
|
||||
Initialize repository.
|
||||
|
||||
Args:
|
||||
memory: Memory instance for persistence
|
||||
"""
|
||||
self.memory = memory
|
||||
|
||||
def save(self, show: TVShow) -> None:
|
||||
"""Save a TV show to the repository."""
|
||||
shows = self._load_all()
|
||||
|
||||
# Remove existing show with same IMDb ID
|
||||
shows = [s for s in shows if s.get('imdb_id') != str(show.imdb_id)]
|
||||
|
||||
# Add new show
|
||||
shows.append(self._to_dict(show))
|
||||
|
||||
# Save to memory
|
||||
self.memory.set('tv_shows', shows)
|
||||
logger.debug(f"Saved TV show: {show.imdb_id}")
|
||||
|
||||
def find_by_imdb_id(self, imdb_id: ImdbId) -> Optional[TVShow]:
|
||||
"""Find a TV show by its IMDb ID."""
|
||||
shows = self._load_all()
|
||||
|
||||
for show_dict in shows:
|
||||
if show_dict.get('imdb_id') == str(imdb_id):
|
||||
return self._from_dict(show_dict)
|
||||
|
||||
return None
|
||||
|
||||
def find_all(self) -> List[TVShow]:
|
||||
"""Get all TV shows in the repository."""
|
||||
shows_dict = self._load_all()
|
||||
return [self._from_dict(s) for s in shows_dict]
|
||||
|
||||
def delete(self, imdb_id: ImdbId) -> bool:
|
||||
"""Delete a TV show from the repository."""
|
||||
shows = self._load_all()
|
||||
initial_count = len(shows)
|
||||
|
||||
# Filter out the show
|
||||
shows = [s for s in shows if s.get('imdb_id') != str(imdb_id)]
|
||||
|
||||
if len(shows) < initial_count:
|
||||
self.memory.set('tv_shows', shows)
|
||||
logger.debug(f"Deleted TV show: {imdb_id}")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def exists(self, imdb_id: ImdbId) -> bool:
|
||||
"""Check if a TV show exists in the repository."""
|
||||
return self.find_by_imdb_id(imdb_id) is not None
|
||||
|
||||
def _load_all(self) -> List[Dict[str, Any]]:
|
||||
"""Load all TV shows from memory."""
|
||||
return self.memory.get('tv_shows', [])
|
||||
|
||||
def _to_dict(self, show: TVShow) -> Dict[str, Any]:
|
||||
"""Convert TVShow entity to dict for storage."""
|
||||
return {
|
||||
'imdb_id': str(show.imdb_id),
|
||||
'title': show.title,
|
||||
'seasons_count': show.seasons_count,
|
||||
'status': show.status.value,
|
||||
'tmdb_id': show.tmdb_id,
|
||||
'overview': show.overview,
|
||||
'poster_path': show.poster_path,
|
||||
'first_air_date': show.first_air_date,
|
||||
'vote_average': show.vote_average,
|
||||
'added_at': show.added_at.isoformat(),
|
||||
}
|
||||
|
||||
def _from_dict(self, data: Dict[str, Any]) -> TVShow:
|
||||
"""Convert dict from storage to TVShow entity."""
|
||||
from datetime import datetime
|
||||
|
||||
return TVShow(
|
||||
imdb_id=ImdbId(data['imdb_id']),
|
||||
title=data['title'],
|
||||
seasons_count=data['seasons_count'],
|
||||
status=ShowStatus.from_string(data['status']),
|
||||
tmdb_id=data.get('tmdb_id'),
|
||||
overview=data.get('overview'),
|
||||
poster_path=data.get('poster_path'),
|
||||
first_air_date=data.get('first_air_date'),
|
||||
vote_average=data.get('vote_average'),
|
||||
added_at=datetime.fromisoformat(data['added_at']) if data.get('added_at') else datetime.now(),
|
||||
)
|
||||
@@ -1,10 +1,10 @@
|
||||
# agent/memory.py
|
||||
"""Memory storage - Migrated from agent/memory.py"""
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
import json
|
||||
|
||||
from .config import settings
|
||||
from .parameters import validate_parameter, get_parameter_schema
|
||||
from agent.config import settings
|
||||
from agent.parameters import validate_parameter, get_parameter_schema
|
||||
|
||||
|
||||
class Memory:
|
||||
Reference in New Issue
Block a user