File size: 2,437 Bytes
8ff817c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
"""

Configuration for DS-STAR system.

Centralizes LLM setup and system parameters.

"""

import os
from typing import Optional

from dotenv import load_dotenv

load_dotenv()


def get_llm(

    provider: str = "google",

    model: Optional[str] = None,

    api_key: Optional[str] = None,

    temperature: float = 0,

    base_url: Optional[str] = None,

):
    """

    Get configured LLM instance.



    Args:

        provider: LLM provider ("google", "openai", "anthropic")

        model: Model name (uses default if not specified)

        api_key: API key (uses environment variable if not specified)

        temperature: Temperature for generation

        base_url: Custom base URL for OpenAI-compatible APIs



    Returns:

        Configured LLM instance

    """
    if provider == "google":
        from langchain_google_genai import ChatGoogleGenerativeAI

        default_model = "gemini-flash-latest"
        api_key = api_key or os.getenv("GOOGLE_API_KEY", "")

        return ChatGoogleGenerativeAI(
            model=model or default_model,
            temperature=temperature,
            google_api_key=api_key,
        )

    elif provider == "openai":
        from langchain_openai import ChatOpenAI

        default_model = "gpt-4"
        api_key = api_key or os.getenv("OPENAI_API_KEY")

        # Use provided base_url, then env var, then default
        effective_base_url = base_url or os.getenv(
            "LLM_BASE_URL",
            "https://api.openai.com/v1",
        )

        return ChatOpenAI(
            model=model or default_model,
            temperature=temperature,
            api_key=api_key,
            base_url=effective_base_url,
        )

    elif provider == "anthropic":
        from langchain_anthropic import ChatAnthropic

        default_model = "claude-3-5-sonnet-20241022"
        api_key = api_key or os.getenv("ANTHROPIC_API_KEY")

        return ChatAnthropic(
            model=model or default_model,
            temperature=temperature,
            api_key=api_key,
        )

    else:
        raise ValueError(
            f"Unknown provider: {provider}. Choose from: google, openai, anthropic"
        )


# Default configuration
DEFAULT_CONFIG = {
    "max_iterations": 20,
    "provider": "openai",
    "model": "google/gemini-2.5-flash",
    "temperature": 0,
    "data_dir": "data/",
}