ivanoctaviogaitansantos commited on
Commit
d4fff65
·
verified ·
1 Parent(s): abf3a92

Actualizar api_agent.py

Browse files
Files changed (1) hide show
  1. api_agent.py +25 -25
api_agent.py CHANGED
@@ -17,7 +17,7 @@ class APIAgent:
17
 
18
  def call_openai(self, prompt: str, is_code: bool = False) -> Optional[str]:
19
  """Intenta llamar a OpenAI API"""
20
- api_key = self.config.get("openai_api_key")
21
 
22
  if not api_key:
23
  return None
@@ -25,57 +25,57 @@ class APIAgent:
25
  try:
26
  openai.api_key = api_key
27
 
28
- model = "gpt-3.5-turbo"
29
- max_tokens = self.config.get("max_tokens", 400) # Reducido para HF
30
- temperature = self.config.get("temperature", 0.7)
31
 
32
  response = openai.ChatCompletion.create(
33
  model=model,
34
- messages=[{"role": "user", "content": prompt}],
35
  max_tokens=max_tokens,
36
  temperature=temperature,
37
- timeout=15 # Timeout reducido
38
  )
39
 
40
  return response.choices[0].message.content.strip()
41
 
42
  except Exception as e:
43
- logger.error(f"❌ Error llamando a OpenAI: {e}")
44
  return None
45
 
46
  def call_deepseek(self, prompt: str, is_code: bool = False) -> Optional[str]:
47
  """Intenta llamar a DeepSeek API"""
48
- api_key = self.config.get("deepseek_api_key")
49
 
50
  if not api_key:
51
  return None
52
 
53
  try:
54
- url = "https://api.deepseek.com/v1/chat/completions"
55
  headers = {
56
- "Content-Type": "application/json",
57
- "Authorization": f"Bearer {api_key}"
58
  }
59
 
60
- max_tokens = self.config.get("max_tokens", 400)
61
- temperature = self.config.get("temperature", 0.7)
62
 
63
  data = {
64
- "model": "deepseek-chat",
65
- "messages": [{"role": "user", "content": prompt}],
66
- "max_tokens": max_tokens,
67
- "temperature": temperature,
68
- "stream": False
69
  }
70
 
71
  response = requests.post(url, json=data, headers=headers, timeout=20)
72
  response.raise_for_status()
73
 
74
  result = response.json()
75
- return result["choices"][0]["message"]["content"].strip()
76
 
77
  except Exception as e:
78
- logger.error(f"❌ Error llamando a DeepSeek: {e}")
79
  return None
80
 
81
  def generate_response(self, prompt: str, is_code: bool = False) -> Dict[str, Any]:
@@ -84,18 +84,18 @@ class APIAgent:
84
  Returns: dict con response y source
85
  """
86
  # Verificar si hay claves configuradas
87
- has_deepseek = bool(self.config.get("deepseek_api_key"))
88
- has_openai = bool(self.config.get("openai_api_key"))
89
 
90
  if has_deepseek:
91
  deepseek_response = self.call_deepseek(prompt, is_code)
92
  if deepseek_response:
93
- return {"response": deepseek_response, "source": "deepseek"}
94
 
95
  if has_openai:
96
  openai_response = self.call_openai(prompt, is_code)
97
  if openai_response:
98
- return {"response": openai_response, "source": "openai"}
99
 
100
  # Si no hay APIs disponibles o fallan, retornar None para usar modelo local
101
- return {"response": None, "source": "none"}
 
17
 
18
  def call_openai(self, prompt: str, is_code: bool = False) -> Optional[str]:
19
  """Intenta llamar a OpenAI API"""
20
+ api_key = self.config.get('openai_api_key')
21
 
22
  if not api_key:
23
  return None
 
25
  try:
26
  openai.api_key = api_key
27
 
28
+ model = 'gpt-3.5-turbo'
29
+ max_tokens = self.config.get('max_tokens', 400)
30
+ temperature = self.config.get('temperature', 0.7)
31
 
32
  response = openai.ChatCompletion.create(
33
  model=model,
34
+ messages=[{'role': 'user', 'content': prompt}],
35
  max_tokens=max_tokens,
36
  temperature=temperature,
37
+ timeout=15
38
  )
39
 
40
  return response.choices[0].message.content.strip()
41
 
42
  except Exception as e:
43
+ logger.error(f'Error llamando a OpenAI: {e}')
44
  return None
45
 
46
  def call_deepseek(self, prompt: str, is_code: bool = False) -> Optional[str]:
47
  """Intenta llamar a DeepSeek API"""
48
+ api_key = self.config.get('deepseek_api_key')
49
 
50
  if not api_key:
51
  return None
52
 
53
  try:
54
+ url = 'https://api.deepseek.com/v1/chat/completions'
55
  headers = {
56
+ 'Content-Type': 'application/json',
57
+ 'Authorization': f'Bearer {api_key}'
58
  }
59
 
60
+ max_tokens = self.config.get('max_tokens', 400)
61
+ temperature = self.config.get('temperature', 0.7)
62
 
63
  data = {
64
+ 'model': 'deepseek-chat',
65
+ 'messages': [{'role': 'user', 'content': prompt}],
66
+ 'max_tokens': max_tokens,
67
+ 'temperature': temperature,
68
+ 'stream': False
69
  }
70
 
71
  response = requests.post(url, json=data, headers=headers, timeout=20)
72
  response.raise_for_status()
73
 
74
  result = response.json()
75
+ return result['choices'][0]['message']['content'].strip()
76
 
77
  except Exception as e:
78
+ logger.error(f'Error llamando a DeepSeek: {e}')
79
  return None
80
 
81
  def generate_response(self, prompt: str, is_code: bool = False) -> Dict[str, Any]:
 
84
  Returns: dict con response y source
85
  """
86
  # Verificar si hay claves configuradas
87
+ has_deepseek = bool(self.config.get('deepseek_api_key'))
88
+ has_openai = bool(self.config.get('openai_api_key'))
89
 
90
  if has_deepseek:
91
  deepseek_response = self.call_deepseek(prompt, is_code)
92
  if deepseek_response:
93
+ return {'response': deepseek_response, 'source': 'deepseek'}
94
 
95
  if has_openai:
96
  openai_response = self.call_openai(prompt, is_code)
97
  if openai_response:
98
+ return {'response': openai_response, 'source': 'openai'}
99
 
100
  # Si no hay APIs disponibles o fallan, retornar None para usar modelo local
101
+ return {'response': None, 'source': 'none'}