RAG.axera / gui.py
yongqiang
Initialize the repository
1ed9a31
# gui.py
import gradio as gr
from rag_engine import ask_question, build_index, stream_answer
from config import PORT
import time
chat_history = []
def handle_upload(file):
if file is None:
return "❌ 请上传文件", ""
try:
result = build_index(file.name) # file.name 是本地路径
return result, ""
except Exception as e:
return f"❌ 构建索引失败:{str(e)}", ""
def handle_chat(message, history):
history = history or []
if not message.strip():
return "", history
try:
# 使用流式响应
history.append((message, ""))
full_response = ""
# 获取流式响应生成器
answer_generator = stream_answer(message)
# 逐个token添加到聊天历史
for token in answer_generator:
full_response += token
history[-1] = (message, full_response)
yield "", history
time.sleep(0.02) # 添加微小延迟使输出更平滑
# 流结束后添加一点停顿
time.sleep(0.1)
yield "", history
except Exception as e:
history.append((message, f"⚠️ 出错了:{str(e)}"))
return "", history
with gr.Blocks(title="RAG 文档问答系统") as demo:
gr.Markdown("## 🤖 AXERA RAG 文档问答\n请上传 PDF 或 TXT 文件并提问")
with gr.Row():
with gr.Column(scale=1):
file_input = gr.File(label="📄 上传文件", file_types=[".pdf", ".txt"])
upload_btn = gr.Button("📥 上传并构建索引")
upload_status = gr.Textbox(label="", interactive=False)
with gr.Column(scale=2):
chatbot = gr.Chatbot(height=400, label="🧠 问答对话")
with gr.Row():
message = gr.Textbox(placeholder="请输入你的问题,按 Shift + Enter 发送", show_label=False, lines=2)
send_btn = gr.Button("🚀 发送")
upload_btn.click(fn=handle_upload, inputs=[file_input], outputs=[upload_status, message])
send_btn.click(fn=handle_chat, inputs=[message, chatbot], outputs=[message, chatbot])
message.submit(fn=handle_chat, inputs=[message, chatbot], outputs=[message, chatbot])
# 启用队列并启动
demo.queue().launch(server_port=PORT)