Coverage for src/ollamapy/model_manager.py: 26%
38 statements
« prev ^ index » next coverage.py v7.10.6, created at 2025-09-01 12:29 -0400
« prev ^ index » next coverage.py v7.10.6, created at 2025-09-01 12:29 -0400
1"""Model management utilities for Ollama operations."""
3from typing import List, Tuple
4from .ollama_client import OllamaClient
7class ModelManager:
8 """Handles model availability checking, pulling, and validation."""
10 def __init__(self, client: OllamaClient):
11 """Initialize the model manager.
13 Args:
14 client: The OllamaClient instance to use
15 """
16 self.client = client
18 def is_server_available(self) -> bool:
19 """Check if Ollama server is running and accessible."""
20 return self.client.is_available()
22 def list_available_models(self) -> List[str]:
23 """Get list of available models."""
24 return self.client.list_models()
26 def pull_model_if_needed(self, model: str) -> bool:
27 """Pull a model if it's not available locally.
29 Args:
30 model: The model name to pull
32 Returns:
33 True if model is available or was successfully pulled, False otherwise
34 """
35 available_models = self.list_available_models()
36 model_available = any(
37 model in available_model for available_model in available_models
38 )
40 if not model_available:
41 print(f"📥 Model '{model}' not found locally. Pulling...")
42 if not self.client.pull_model(model):
43 print(f"❌ Failed to pull model '{model}'")
44 return False
46 return True
48 def ensure_models_available(
49 self, main_model: str, analysis_model: str = None
50 ) -> Tuple[bool, str, str]:
51 """Ensure required models are available, pulling them if necessary.
53 Args:
54 main_model: The main chat model to ensure is available
55 analysis_model: Optional separate analysis model (defaults to main_model)
57 Returns:
58 Tuple of (success, main_model_status, analysis_model_status)
59 """
60 # Use main model for analysis if no separate model specified
61 if analysis_model is None:
62 analysis_model = main_model
64 # Check if Ollama is running
65 if not self.is_server_available():
66 return False, "Server not available", "Server not available"
68 print("✅ Connected to Ollama server")
70 # Check main model
71 if not self.pull_model_if_needed(main_model):
72 return False, "Failed to pull", "Not checked"
74 # Check analysis model (if different from main model)
75 if analysis_model != main_model:
76 if not self.pull_model_if_needed(analysis_model):
77 return False, "Available", "Failed to pull"
79 return True, "Available", "Available"
81 def display_model_status(self, main_model: str, analysis_model: str = None):
82 """Display current model configuration.
84 Args:
85 main_model: The main chat model
86 analysis_model: Optional separate analysis model
87 """
88 print(f"🎯 Using chat model: {main_model}")
89 if analysis_model and analysis_model != main_model:
90 print(f"🔍 Using analysis model: {analysis_model}")
91 else:
92 print(f"🔍 Using same model for analysis and chat")
94 available_models = self.list_available_models()
95 if available_models:
96 print(
97 f"📚 Available models: {', '.join(available_models[:3])}{'...' if len(available_models) > 3 else ''}"
98 )