from transformers import pipeline pipe = pipeline("text-generation", model="carlosdimare/qclase") def responder(prompt): respuesta = pipe(prompt, max_new_tokens=200, do_sample=True)[0]["generated_text"] return respuesta import gradio as gr gr.Interface(fn=responder, inputs="text", outputs="text").launch()