Spaces:
Running
Running
| """ | |
| Configuration for DS-STAR system. | |
| Centralizes LLM setup and system parameters. | |
| """ | |
| import os | |
| from typing import Optional | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| def get_llm( | |
| provider: str = "google", | |
| model: Optional[str] = None, | |
| api_key: Optional[str] = None, | |
| temperature: float = 0, | |
| base_url: Optional[str] = None, | |
| ): | |
| """ | |
| Get configured LLM instance. | |
| Args: | |
| provider: LLM provider ("google", "openai", "anthropic") | |
| model: Model name (uses default if not specified) | |
| api_key: API key (uses environment variable if not specified) | |
| temperature: Temperature for generation | |
| base_url: Custom base URL for OpenAI-compatible APIs | |
| Returns: | |
| Configured LLM instance | |
| """ | |
| if provider == "google": | |
| from langchain_google_genai import ChatGoogleGenerativeAI | |
| default_model = "gemini-flash-latest" | |
| api_key = api_key or os.getenv("GOOGLE_API_KEY", "") | |
| return ChatGoogleGenerativeAI( | |
| model=model or default_model, | |
| temperature=temperature, | |
| google_api_key=api_key, | |
| ) | |
| elif provider == "openai": | |
| from langchain_openai import ChatOpenAI | |
| default_model = "gpt-4" | |
| api_key = api_key or os.getenv("OPENAI_API_KEY") | |
| # Use provided base_url, then env var, then default | |
| effective_base_url = base_url or os.getenv( | |
| "LLM_BASE_URL", | |
| "https://api.openai.com/v1", | |
| ) | |
| return ChatOpenAI( | |
| model=model or default_model, | |
| temperature=temperature, | |
| api_key=api_key, | |
| base_url=effective_base_url, | |
| ) | |
| elif provider == "anthropic": | |
| from langchain_anthropic import ChatAnthropic | |
| default_model = "claude-3-5-sonnet-20241022" | |
| api_key = api_key or os.getenv("ANTHROPIC_API_KEY") | |
| return ChatAnthropic( | |
| model=model or default_model, | |
| temperature=temperature, | |
| api_key=api_key, | |
| ) | |
| else: | |
| raise ValueError( | |
| f"Unknown provider: {provider}. Choose from: google, openai, anthropic" | |
| ) | |
| # Default configuration | |
| DEFAULT_CONFIG = { | |
| "max_iterations": 20, | |
| "provider": "openai", | |
| "model": "google/gemini-2.5-flash", | |
| "temperature": 0, | |
| "data_dir": "data/", | |
| } | |