Coverage for src/chuck_data/config.py: 0%
153 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-06-05 22:56 -0700
« prev ^ index » next coverage.py v7.8.0, created at 2025-06-05 22:56 -0700
1"""
2Configuration management for Chuck TUI using Pydantic for schema validation.
3"""
5import json
6import os
7import logging
8import tempfile
9from typing import Optional
10from pydantic import BaseModel, Field
11from .databricks.url_utils import validate_workspace_url
14class ChuckConfig(BaseModel):
15 """Pydantic model for Chuck configuration"""
17 workspace_url: Optional[str] = Field(
18 default=None, description="Databricks workspace URL"
19 )
20 active_model: Optional[str] = Field(
21 default=None, description="Currently active model name"
22 )
23 warehouse_id: Optional[str] = Field(
24 default=None, description="SQL warehouse ID for table operations"
25 )
26 active_catalog: Optional[str] = Field(
27 default=None, description="Currently active Unity Catalog"
28 )
29 active_schema: Optional[str] = Field(
30 default=None, description="Currently active schema"
31 )
32 amperity_token: Optional[str] = Field(
33 default=None, description="Amperity authentication token"
34 )
35 databricks_token: Optional[str] = Field(
36 default=None, description="Databricks API token for authentication"
37 )
38 usage_tracking_consent: Optional[bool] = Field(
39 default=False, description="User consent for usage tracking"
40 )
42 # No validator - use defaults instead of failing
44 model_config = {
45 # Allow extra fields for backward compatibility
46 "extra": "allow"
47 }
50class ConfigManager:
51 """Configuration manager for Chuck"""
53 # Class variable for singleton pattern
54 _instance = None
55 # Track instances by config path to support testing with different paths
56 _instances_by_path = {}
58 def __new__(cls, config_path: Optional[str] = None):
59 """Singleton pattern that also respects different config paths for testing"""
60 if config_path is None:
61 config_path = os.path.join(os.path.expanduser("~"), ".chuck_config.json")
63 # For testing, allow different instances with different paths
64 if config_path in cls._instances_by_path:
65 return cls._instances_by_path[config_path]
67 if cls._instance is None or config_path not in cls._instances_by_path:
68 instance = super(ConfigManager, cls).__new__(cls)
70 # Only set as main instance if we don't have one yet
71 if cls._instance is None:
72 cls._instance = instance
74 # Track by path for testing support
75 cls._instances_by_path[config_path] = instance
76 instance._initialized = False
77 return instance
79 return cls._instance
81 def __init__(self, config_path: Optional[str] = None):
82 """Initialize config manager with optional custom path"""
83 if getattr(self, "_initialized", False):
84 return
86 if config_path:
87 self.config_path = config_path
88 else:
89 self.config_path = os.path.join(
90 os.path.expanduser("~"), ".chuck_config.json"
91 )
93 self._config: Optional[ChuckConfig] = None
94 self._initialized = True
96 def load(self) -> ChuckConfig:
97 """Load configuration from file or create default"""
98 # Don't cache in tests (always reload)
99 if not self.config_path.startswith(tempfile.gettempdir()):
100 if self._config:
101 return self._config
103 config_data = {}
104 if os.path.exists(self.config_path):
105 try:
106 with open(self.config_path, "r") as f:
107 config_data = json.load(f)
108 logging.debug(f"Loaded configuration from {self.config_path}")
109 except json.JSONDecodeError:
110 logging.error("Config file is corrupted. Using default config.")
111 except Exception as e:
112 logging.error(f"Error loading config: {e}")
114 # Override with environment variables if available
115 env_mappings = {
116 "workspace_url": ["CHUCK_WORKSPACE_URL"],
117 "active_model": ["CHUCK_ACTIVE_MODEL"],
118 "warehouse_id": ["CHUCK_WAREHOUSE_ID"],
119 "active_catalog": ["CHUCK_ACTIVE_CATALOG"],
120 "active_schema": ["CHUCK_ACTIVE_SCHEMA"],
121 "amperity_token": ["CHUCK_AMPERITY_TOKEN"],
122 "databricks_token": ["CHUCK_DATABRICKS_TOKEN"],
123 "usage_tracking_consent": ["CHUCK_USAGE_TRACKING_CONSENT"],
124 }
126 for field, env_vars in env_mappings.items():
127 for env_var in env_vars:
128 value = os.getenv(env_var)
129 if value is not None:
130 # Handle boolean conversion for usage_tracking_consent
131 if field == "usage_tracking_consent":
132 config_data[field] = value.lower() in ("true", "1", "yes", "on")
133 else:
134 config_data[field] = value
135 logging.debug(f"Using {field} from environment variable {env_var}")
136 break
138 # Create Pydantic model instance
139 self._config = ChuckConfig(**config_data)
140 return self._config
142 def save(self) -> bool:
143 """Save configuration to file"""
144 if not self._config:
145 return False
147 try:
148 # Ensure directory exists
149 directory = os.path.dirname(self.config_path)
150 if directory: # Check if directory is not empty
151 os.makedirs(directory, exist_ok=True)
153 # Write config
154 with open(self.config_path, "w") as f:
155 json.dump(self._config.model_dump(), f, indent=2)
156 logging.debug(f"Saved configuration to {self.config_path}")
157 return True
158 except Exception as e:
159 logging.error(f"Error saving config: {e}")
160 return False
162 def get_config(self) -> ChuckConfig:
163 """Get configuration object"""
164 return self.load()
166 def needs_setup(self) -> bool:
167 """Check if first-time setup is needed based on missing critical configuration"""
168 config = self.load()
169 critical_configs = [
170 config.amperity_token,
171 config.databricks_token,
172 config.workspace_url,
173 config.active_model,
174 ]
175 return any(item is None or item == "" for item in critical_configs)
177 def update(self, **kwargs) -> bool:
178 """Update configuration values"""
179 config = self.load()
181 # Handle workspace_url validation using the new utility
182 if "workspace_url" in kwargs:
183 is_valid, _ = validate_workspace_url(kwargs["workspace_url"])
184 if not is_valid:
185 logging.warning("Invalid workspace_url provided, using default instead")
186 # Remove invalid workspace_url so default is maintained
187 kwargs.pop("workspace_url")
189 # Set values
190 for key, value in kwargs.items():
191 if hasattr(config, key):
192 setattr(config, key, value)
194 return self.save()
197# Global config manager instance
198_config_manager = ConfigManager()
200# API functions for backward compatibility
203def get_workspace_url():
204 return _config_manager.get_config().workspace_url
207def set_workspace_url(workspace_url):
208 """Set the workspace URL in config after validation and normalization.
210 Args:
211 workspace_url: URL of the Databricks workspace
213 Returns:
214 True if successful, False otherwise
215 """
216 is_valid, error_message = validate_workspace_url(workspace_url)
217 if not is_valid:
218 logging.error(f"Invalid workspace URL: {error_message}")
219 return False
221 # Normalize the URL before saving
223 return _config_manager.update(workspace_url=workspace_url)
226def get_amperity_token():
227 """Get the Amperity token from config or environment."""
228 token = _config_manager.get_config().amperity_token
230 # Fall back to environment variable
231 if not token:
232 token = os.getenv("CHUCK_AMPERITY_TOKEN")
233 if token:
234 logging.debug("Using Amperity token from environment variable")
236 return token
239def set_amperity_token(token):
240 """Set the Amperity token in config."""
241 return _config_manager.update(amperity_token=token)
244def get_active_model():
245 """Get the active model from config."""
246 return _config_manager.get_config().active_model
249def set_active_model(model_name):
250 """Set the active model in config and clear agent history when changed."""
251 current_model = get_active_model()
252 result = _config_manager.update(active_model=model_name)
253 if current_model != model_name:
254 clear_agent_history()
255 return result
258def get_warehouse_id():
259 """Get the warehouse ID from config."""
260 return _config_manager.get_config().warehouse_id
263def set_warehouse_id(warehouse_id):
264 """Set the warehouse ID in config."""
265 return _config_manager.update(warehouse_id=warehouse_id)
268def get_active_catalog():
269 """Get the active catalog from config."""
270 return _config_manager.get_config().active_catalog
273def set_active_catalog(catalog_name):
274 """Set the active catalog in config."""
275 return _config_manager.update(active_catalog=catalog_name)
278def get_active_schema():
279 """Get the active schema from config."""
280 return _config_manager.get_config().active_schema
283def set_active_schema(schema_name):
284 """Set the active schema in config."""
285 return _config_manager.update(active_schema=schema_name)
288def get_databricks_token():
289 """Get the Databricks token from config."""
290 return _config_manager.get_config().databricks_token
293def set_databricks_token(token):
294 """Set the Databricks token in config."""
295 return _config_manager.update(databricks_token=token)
298# For direct access to config manager
299def get_config_manager():
300 """Get the global config manager instance"""
301 return _config_manager
304# ---- Agent conversation history management ----
305_agent_history = []
308def get_agent_history():
309 """Get current agent conversation history."""
310 return _agent_history.copy()
313def set_agent_history(history):
314 """Set the agent conversation history."""
315 global _agent_history
316 _agent_history = history
319def clear_agent_history():
320 """Clear the agent conversation history."""
321 global _agent_history
322 _agent_history = []
325# ---- Usage tracking consent management ----
328def get_usage_tracking_consent():
329 """Get the usage tracking consent status."""
330 return _config_manager.get_config().usage_tracking_consent
333def set_usage_tracking_consent(consent: bool):
334 """Set the usage tracking consent status.
336 Args:
337 consent: Boolean indicating whether user consents to usage tracking
339 Returns:
340 True if successful, False otherwise
341 """
342 return _config_manager.update(usage_tracking_consent=consent)