Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -70,7 +70,7 @@ with st.sidebar:
|
|
| 70 |
|
| 71 |
# Select FM
|
| 72 |
option = st.selectbox(
|
| 73 |
-
"Which foundational model would you like?", ("GPT4", "LLAMA3")
|
| 74 |
)
|
| 75 |
|
| 76 |
# Clear button
|
|
@@ -139,16 +139,17 @@ elif uploaded_files:
|
|
| 139 |
|
| 140 |
# Call FM
|
| 141 |
content = " ".join(list(result.sentences))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 142 |
if option == "GPT4":
|
| 143 |
response = call_gpt(prompt, content)
|
|
|
|
|
|
|
| 144 |
else:
|
| 145 |
-
response = call_llama(
|
| 146 |
-
f"""
|
| 147 |
-
Answer the question: {prompt}
|
| 148 |
-
|
| 149 |
-
Use the following information: {content}
|
| 150 |
-
"""
|
| 151 |
-
)
|
| 152 |
|
| 153 |
# Display assistant response in chat message container
|
| 154 |
with st.chat_message("assistant"):
|
|
|
|
| 70 |
|
| 71 |
# Select FM
|
| 72 |
option = st.selectbox(
|
| 73 |
+
"Which foundational model would you like?", ("GPT4", "LLAMA3", "LLAMA2")
|
| 74 |
)
|
| 75 |
|
| 76 |
# Clear button
|
|
|
|
| 139 |
|
| 140 |
# Call FM
|
| 141 |
content = " ".join(list(result.sentences))
|
| 142 |
+
custom_prompt = f"""
|
| 143 |
+
Answer the question: {prompt}
|
| 144 |
+
|
| 145 |
+
Use the following information: {content}
|
| 146 |
+
"""
|
| 147 |
if option == "GPT4":
|
| 148 |
response = call_gpt(prompt, content)
|
| 149 |
+
elif option =="LLAMA2":
|
| 150 |
+
response = call_llama2(custom_prompt)
|
| 151 |
else:
|
| 152 |
+
response = call_llama(custom_prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 153 |
|
| 154 |
# Display assistant response in chat message container
|
| 155 |
with st.chat_message("assistant"):
|