HusseinBashir commited on
Commit
746d13e
·
verified ·
1 Parent(s): 987b549

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +66 -22
app.py CHANGED
@@ -1,23 +1,67 @@
1
- import openai
 
2
  import gradio as gr
3
- import os
4
-
5
- openai.api_key = os.getenv("OPENAI_API_KEY")
6
-
7
- def chat(user_input, history=[]):
8
- messages = [{"role": "system", "content": "Ka jawaab su’aalaha af Soomaali"}]
9
- for q, a in history:
10
- messages.append({"role": "user", "content": q})
11
- messages.append({"role": "assistant", "content": a})
12
- messages.append({"role": "user", "content": user_input})
13
-
14
- response = openai.ChatCompletion.create(
15
- model="gpt-4o",
16
- messages=messages,
17
- temperature=0.7
18
- )
19
- reply = response.choices[0].message.content
20
- history.append((user_input, reply))
21
- return history, history
22
-
23
- gr.ChatInterface(chat, title="Chatbot Af Soomaali").launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Somali Chatbot with Translation + English Chatbot using Gradio
2
+
3
  import gradio as gr
4
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline, AutoModelForCausalLM
5
+ import torch
6
+
7
+ # Load Translation Model: facebook/nllb-200-3.3B
8
+ trans_model = "facebook/nllb-200-3.3B"
9
+ trans_tokenizer = AutoTokenizer.from_pretrained(trans_model)
10
+ trans_model_loaded = AutoModelForSeq2SeqLM.from_pretrained(trans_model)
11
+
12
+ def translate(text, src_lang, tgt_lang):
13
+ translator = pipeline("translation",
14
+ model=trans_model_loaded,
15
+ tokenizer=trans_tokenizer,
16
+ src_lang=src_lang,
17
+ tgt_lang=tgt_lang,
18
+ max_length=512)
19
+ return translator(text)[0]['translation_text']
20
+
21
+ # Load Chatbot Model: meta-llama/Meta-Llama-3-8B-Instruct
22
+ chatbot_model_name = "meta-llama/Meta-Llama-3-8B-Instruct"
23
+ chatbot_tokenizer = AutoTokenizer.from_pretrained(chatbot_model_name)
24
+ chatbot_model = AutoModelForCausalLM.from_pretrained(
25
+ chatbot_model_name, torch_dtype=torch.float16, device_map="auto")
26
+
27
+ def ask_chatbot(prompt):
28
+ input_ids = chatbot_tokenizer.encode(prompt, return_tensors="pt").to(chatbot_model.device)
29
+ with torch.no_grad():
30
+ output = chatbot_model.generate(input_ids, max_new_tokens=256, do_sample=True, temperature=0.7)
31
+ return chatbot_tokenizer.decode(output[0], skip_special_tokens=True)
32
+
33
+ # Full pipeline function
34
+ def somali_chatbot_pipeline(somali_input):
35
+ # Step 1: Somali -> English
36
+ english_input = translate(somali_input, src_lang="som_Latn", tgt_lang="eng_Latn")
37
+
38
+ # Step 2: Ask English chatbot
39
+ chatbot_response = ask_chatbot(english_input)
40
+
41
+ # Step 3: English -> Somali
42
+ somali_output = translate(chatbot_response, src_lang="eng_Latn", tgt_lang="som_Latn")
43
+
44
+ return somali_output
45
+
46
+ # Gradio Interface (with submit button)
47
+ def build_interface():
48
+ with gr.Blocks() as demo:
49
+ gr.Markdown("""
50
+ # 🤖 Chatbot Soomaaliyeed
51
+ Qor su'aal Af Soomaali ah. Nidaamku wuxuu si qarsoodi ah ugu rogaa luqadda Ingiriisiga, ka dib wuxuu weydiiyaa chatbot English ah, kadibna jawaabta wuxuu kuu soo celinayaa Af Soomaali.
52
+ """)
53
+
54
+ with gr.Row():
55
+ somali_input = gr.Textbox(label="Su’aashaada Af Soomaali", placeholder="Maxaa keena macaanka?", lines=3)
56
+ with gr.Row():
57
+ submit_btn = gr.Button("Dir Su’aasha")
58
+ with gr.Row():
59
+ somali_output = gr.Textbox(label="Jawaabta Chatbot-ka (Af Soomaali)", lines=5)
60
+
61
+ submit_btn.click(fn=somali_chatbot_pipeline, inputs=somali_input, outputs=somali_output)
62
+
63
+ return demo
64
+
65
+ if __name__ == "__main__":
66
+ iface = build_interface()
67
+ iface.launch()