import os import gradio as gr import base64 import io import json import re from PIL import Image from sambanova import SambaNova from contextlib import redirect_stdout # ========================= # CONFIGURACIÓN # ========================= API_KEY = os.getenv("SAMBANOVA_API_KEY") if not API_KEY: raise ValueError("❌ Error: Configura la variable SAMBANOVA_API_KEY") client = SambaNova( api_key=API_KEY, base_url="https://api.sambanova.ai/v1", ) # ========================= # MODELOS # ========================= MODELS = { "vision": "Llama-4-Maverick-17B-128E-Instruct", "code": "DeepSeek-R1-Distill-Llama-70B", "general_precise": "gpt-oss-120b", "general_creative": "Qwen3-32B", } # ========================= # CLASIFICACIÓN LOCAL # ========================= def classify_task_local(message, has_image): if has_image: return "vision" msg = message.lower().strip() if re.search(r'\b(imagen|foto|describe|ver|colores|visual|ocr|objeto)\b', msg): return "vision" if re.search(r'\b(código|python|js|java|debug|función|error|clase|algoritmo)\b', msg): return "code" if re.search(r'\b(historia|cuento|poema|escribe|creativo|inventa|relato|personaje)\b', msg): return "general_creative" return "general_precise" # ========================= # HERRAMIENTAS # ========================= TOOLS = [ { "type": "function", "function": { "name": "execute_python", "description": "Ejecuta código Python en sandbox seguro.", "parameters": { "type": "object", "properties": {"code": {"type": "string"}}, "required": ["code"] } } } ] def execute_tool(tool_call): name = tool_call.function.name try: args = json.loads(tool_call.function.arguments) except json.JSONDecodeError: return "❌ Argumentos inválidos." if name == "execute_python": code = args.get("code", "") if not code.strip(): return "❌ Código vacío." output = io.StringIO() try: with redirect_stdout(output): exec(code, {"__builtins__": {}}, {}) result = output.getvalue().strip() return f"✅ {result}" if result else "✅ Ejecutado sin salida." except Exception as e: return f"❌ Error: {str(e)}" return f"❌ Tool no implementada: {name}" # ========================= # ESTILO VISUAL DEL MODELO # ========================= def model_badge(model_name): colors = { "gpt": "background-color:#3182ce;color:white;", "Qwen3": "background-color:#38a169;color:white;", "DeepSeek": "background-color:#e53e3e;color:white;", "Llama": "background-color:#805ad5;color:white;", } for key, style in colors.items(): if key.lower() in model_name.lower(): return f'{model_name}' return f'{model_name}' # ========================= # CHAT PRINCIPAL - CORREGIDO # ========================= def chat_with_batuto(system_prompt, message, image, history): if not message.strip(): return history, "", None has_image = image is not None task_type = classify_task_local(message, has_image) selected_model = MODELS[task_type] model_name = selected_model.split('-')[0] # Construir mensajes para la API messages = [{"role": "system", "content": system_prompt or "Eres BATUTO/ANDROIDE_90. Responde natural en español."}] # Convertir historial de Gradio a formato de API for entry in history: if isinstance(entry, list) and len(entry) == 2: user_msg, bot_msg = entry messages.append({"role": "user", "content": str(user_msg)}) messages.append({"role": "assistant", "content": str(bot_msg)}) # Agregar mensaje actual if selected_model == "Llama-4-Maverick-17B-128E-Instruct" and has_image: buffered = io.BytesIO() image.save(buffered, format="PNG", optimize=True) b64_img = base64.b64encode(buffered.getvalue()).decode() content = [ {"type": "text", "text": message}, {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{b64_img}"}} ] messages.append({"role": "user", "content": content}) else: messages.append({"role": "user", "content": message}) try: tools_param = TOOLS if task_type != "vision" else None api_call = client.chat.completions.create( model=selected_model, messages=messages, tools=tools_param, tool_choice="auto" if tools_param else None, temperature=0.15, top_p=0.1, max_tokens=1024, ) msg_out = api_call.choices[0].message badge = model_badge(selected_model) # Procesar respuesta if msg_out.content: reply = f"{badge} {msg_out.content}" elif hasattr(msg_out, 'tool_calls') and msg_out.tool_calls: tool_results = [] for tool_call in msg_out.tool_calls: result = execute_tool(tool_call) tool_results.append(f"🔧 {tool_call.function.name}: {result}") reply = f"{badge} " + "\n".join(tool_results) else: reply = f"{badge} Respuesta vacía." # Actualizar historial en formato Gradio (tuplas) history.append((message, reply)) return history, "", None except Exception as e: import traceback error = f"❌ [{model_name}] {str(e)}" print(f"Error completo: {traceback.format_exc()}") history.append((message, error)) return history, "", None # ========================= # INTERFAZ DE GRADIO - CORREGIDA # ========================= def clear_inputs(): return [], "", None with gr.Blocks( title="🤖 BATUTO/ANDROIDE_90 Pro", theme=gr.themes.Soft(primary_hue="blue"), css=""" .gradio-container {max-width: 1000px !important; margin: auto;} .header {text-align: center; padding: 15px; background: linear-gradient(135deg,#667eea 0%,#764ba2 100%); color: white; border-radius: 8px;} .chatbot {min-height: 480px;} """, ) as demo: with gr.Column(elem_classes="header"): gr.Markdown(""" # 🤖 BATUTO/ANDROIDE_90 Pro **Modelos SambaNova optimizados con selección automática y visualización** """) with gr.Tabs(): with gr.TabItem("💬 Chat"): system_prompt = gr.Textbox( lines=3, value="Eres BATUTO/ANDROIDE_90. Responde de manera natural y precisa en español.", label="Prompt del sistema" ) # Cambiar a type por defecto (sin "messages") chatbot = gr.Chatbot( height=480, show_copy_button=True, elem_classes="chatbot" ) msg = gr.Textbox(placeholder="Escribe tu mensaje...", label="Mensaje") img = gr.Image(type="pil", label="Imagen opcional") send = gr.Button("🚀 Enviar", variant="primary") clear = gr.Button("🧹 Limpiar") send.click( chat_with_batuto, [system_prompt, msg, img, chatbot], [chatbot, msg, img] ) msg.submit( chat_with_batuto, [system_prompt, msg, img, chatbot], [chatbot, msg, img] ) clear.click(clear_inputs, None, [chatbot, msg, img]) with gr.TabItem("⚙️ Ejecutor de Código"): gr.Markdown("### Ejecutor Independiente de Python") code_input = gr.Code( language="python", lines=8, value='print("¡Hola desde BATUTO!")\nresultado = 2 + 2\nprint(f"2 + 2 = {resultado}")', label="Código Python" ) exec_output = gr.Textbox( lines=8, label="Resultado de la ejecución", interactive=False ) def execute_independent(code): if not code.strip(): return "❌ Código vacío." output = io.StringIO() try: with redirect_stdout(output): exec(code, {"__builtins__": {}}, {}) result = output.getvalue().strip() return f"✅ Ejecutado correctamente:\n{result}" if result else "✅ Código ejecutado sin salida." except Exception as e: return f"❌ Error:\n{str(e)}" exec_button = gr.Button("▶️ Ejecutar Código", variant="primary") exec_button.click(execute_independent, code_input, exec_output) gr.Markdown("**Estado:** ✅ Modelos SambaNova activos | Visualización multimodal habilitada") # Lanzar la app if __name__ == "__main__": demo.launch(share=True, show_error=True)