S-Dreamer's picture
Update app.py
fc302aa verified
raw
history blame
4.2 kB
import gradio as gr
import tempfile
from datetime import datetime
# ---------------------------------------------------------------------------
# Mode Handlers (replace with your real intelligence pipelines)
# ---------------------------------------------------------------------------
def handle_threat_intel(message, files):
out = f"[Threat Intel] Processed: {message}"
if files:
out += f" | Files: {[f.name for f in files]}"
return out
def handle_translation(message, files):
out = f"[Translation] Interpreted: {message}"
if files:
out += " | (Files attached for context)"
return out
def handle_marketplace_watch(message, files):
out = f"[Marketplace Watch] Monitoring request: {message}"
return out
def handle_analyst_tools(message, files):
out = f"[Analyst Tools] Action: {message}"
return out
# Router for mode → handler mapping
MODE_ROUTER = {
"Threat Intel": handle_threat_intel,
"Translation": handle_translation,
"Marketplace Watch": handle_marketplace_watch,
"Analyst Tools": handle_analyst_tools,
}
# ---------------------------------------------------------------------------
# Core message processing
# ---------------------------------------------------------------------------
def process_message(message, files, history, mode):
if not message and not files:
return history, history
handler = MODE_ROUTER.get(mode, handle_threat_intel)
response = handler(message or "(no text, files only)", files)
user_label = f"{mode}: {message or '[files only]'}"
history = history + [(user_label, response)]
return history, history
def clear_chat():
return [], []
def download_chat(history):
"""Write chat transcript to temp file and return path."""
if not history:
content = "No conversation yet.\n"
else:
lines = []
for user_msg, bot_msg in history:
lines.append(f"[User]: {user_msg}")
lines.append(f"[System]: {bot_msg}")
lines.append("")
content = "\n".join(lines)
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".txt")
with open(tmp.name, "w", encoding="utf-8") as f:
f.write(content)
return tmp.name
# ---------------------------------------------------------------------------
# Mobile-first UI
# ---------------------------------------------------------------------------
with gr.Blocks(title="Threat Landscape Intelligence Console") as demo:
gr.Markdown("## 🛰 APJ Threat Intelligence Console")
chat_state = gr.State([])
# Mode selector
mode = gr.Radio(
["Threat Intel", "Translation", "Marketplace Watch", "Analyst Tools"],
value="Threat Intel",
label="Mode",
interactive=True
)
# Chat window (mobile-friendly)
chat = gr.Chatbot(
label="Dialogue",
height=430
)
# Collapsible utility section
with gr.Accordion("Attachments & Utilities", open=False):
file_input = gr.File(label="Upload files", file_count="multiple")
with gr.Row(variant="compact"):
clear_btn = gr.Button("Clear Chat", variant="secondary")
download_btn = gr.Button("Download Transcript", variant="secondary")
download_file = gr.File(label="Transcript File", interactive=False)
# Input bar
with gr.Row(variant="compact"):
user_input = gr.Textbox(
placeholder="Type your message…",
label="",
scale=5
)
send_btn = gr.Button("Send", variant="primary", scale=1)
# Event wiring
send_btn.click(
fn=process_message,
inputs=[user_input, file_input, chat_state, mode],
outputs=[chat, chat_state]
).then(lambda: "", None, user_input)
user_input.submit(
fn=process_message,
inputs=[user_input, file_input, chat_state, mode],
outputs=[chat, chat_state]
).then(lambda: "", None, user_input)
clear_btn.click(fn=clear_chat, outputs=[chat, chat_state])
download_btn.click(fn=download_chat, inputs=chat_state, outputs=download_file)
# Launch for local dev or Spaces
if __name__ == "__main__":
demo.launch()