Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import requests | |
| from transformers import pipeline | |
| import logging | |
| import threading | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| class SimpleChatbot: | |
| def __init__(self): | |
| self.conversation_history = [] | |
| self.models_loaded = False | |
| self.chat_model = None | |
| def load_models(self): | |
| try: | |
| logger.info('Loading DialoGPT model...') | |
| self.chat_model = pipeline( | |
| "text-generation", | |
| model="microsoft/DialoGPT-small", | |
| device="cpu" | |
| ) | |
| self.models_loaded = True | |
| logger.info('Model loaded successfully') | |
| return True | |
| except Exception as e: | |
| logger.error(f'Error loading model: {e}') | |
| return False | |
| def chat_response(self, message, history): | |
| if not message.strip(): | |
| return "" | |
| yield "Procesando..." | |
| try: | |
| if not self.models_loaded: | |
| self.load_models() | |
| # Generar respuesta con el modelo local | |
| result = self.chat_model( | |
| message, | |
| max_length=150, | |
| num_return_sequences=1, | |
| temperature=0.7, | |
| do_sample=True | |
| ) | |
| response = result[0]['generated_text'] | |
| # Limpiar respuesta | |
| if response.startswith(message): | |
| response = response[len(message):].strip() | |
| full_response = response + "\n\n---\nFuente: Modelo Local" | |
| self.conversation_history.append({ | |
| "user": message, | |
| "bot": response | |
| }) | |
| yield full_response | |
| except Exception as e: | |
| error_msg = f"Error: {str(e)}" | |
| yield error_msg | |
| # Crear instancia | |
| chatbot = SimpleChatbot() | |
| # Cargar modelos en segundo plano | |
| def load_models_async(): | |
| chatbot.load_models() | |
| model_loader = threading.Thread(target=load_models_async, daemon=True) | |
| model_loader.start() | |
| # Interfaz simple | |
| with gr.Blocks(title="BATUTO Chatbot") as demo: | |
| gr.Markdown("# BATUTO Chatbot - Asistente Educativo") | |
| with gr.Row(): | |
| with gr.Column(scale=2): | |
| chatbot_interface = gr.Chatbot(label="Conversaci贸n", height=400) | |
| msg = gr.Textbox( | |
| label="Escribe tu mensaje", | |
| placeholder="Pregunta sobre programaci贸n...", | |
| lines=2 | |
| ) | |
| with gr.Row(): | |
| submit_btn = gr.Button("Enviar", variant="primary") | |
| clear_btn = gr.Button("Limpiar", variant="secondary") | |
| with gr.Column(scale=1): | |
| gr.Markdown("### Informaci贸n") | |
| gr.Markdown(""" | |
| **Ejemplos:** | |
| - Explica qu茅 es Python | |
| - Muestra funci贸n para ordenar listas | |
| - Corrige c贸digo Python | |
| """) | |
| # Event handlers | |
| def handle_submit(message, history): | |
| if not message.strip(): | |
| return "", history | |
| return "", history + [[message, None]] | |
| submit_btn.click( | |
| handle_submit, | |
| inputs=[msg, chatbot_interface], | |
| outputs=[msg, chatbot_interface] | |
| ).then( | |
| chatbot.chat_response, | |
| inputs=[msg, chatbot_interface], | |
| outputs=[chatbot_interface] | |
| ) | |
| msg.submit( | |
| handle_submit, | |
| inputs=[msg, chatbot_interface], | |
| outputs=[msg, chatbot_interface] | |
| ).then( | |
| chatbot.chat_response, | |
| inputs=[msg, chatbot_interface], | |
| outputs=[chatbot_interface] | |
| ) | |
| clear_btn.click( | |
| lambda: (None, []), | |
| outputs=[msg, chatbot_interface] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |