|
|
|
@ -78,11 +78,11 @@ async def h5_chat_stream(request: Request, inp: ChatIn): |
|
|
|
|
# 对于多景区比较,提取多个景区名称 |
|
|
|
|
scenics = await extract_multi_scenic(all_messages) |
|
|
|
|
|
|
|
|
|
# if spot: |
|
|
|
|
# # 使用线程池异步执行同步函数 |
|
|
|
|
# loop = asyncio.get_event_loop() |
|
|
|
|
# with ThreadPoolExecutor() as executor: |
|
|
|
|
# knowledge_task = loop.run_in_executor(executor, fetch_and_parse_markdown, user_id, inp.message) |
|
|
|
|
if spot: |
|
|
|
|
# 使用线程池异步执行同步函数 |
|
|
|
|
loop = asyncio.get_event_loop() |
|
|
|
|
with ThreadPoolExecutor() as executor: |
|
|
|
|
knowledge_task = loop.run_in_executor(executor, fetch_and_parse_markdown, user_id, inp.message) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 获取开启的前4个问题(包含标题和内容) |
|
|
|
@ -121,8 +121,8 @@ async def h5_chat_stream(request: Request, inp: ChatIn): |
|
|
|
|
else: |
|
|
|
|
data = await query_flow(request, spot) |
|
|
|
|
# 等待知识库查询结果 |
|
|
|
|
# if spot: |
|
|
|
|
# knowledge = await knowledge_task |
|
|
|
|
if spot: |
|
|
|
|
knowledge = await knowledge_task |
|
|
|
|
#如果知识库返回的内容不包含"知识库内未找到相应资源"则拼接字符串 |
|
|
|
|
if knowledge and "无法" not in knowledge: |
|
|
|
|
data += "\n\n知识库查询到的景区内容:"+ knowledge |
|
|
|
@ -139,8 +139,8 @@ async def h5_chat_stream(request: Request, inp: ChatIn): |
|
|
|
|
# 如果只提取到一个景区,按单景区处理 |
|
|
|
|
data = await query_flow(request, scenics[0]) |
|
|
|
|
# 等待知识库查询结果 |
|
|
|
|
# if scenics[0]: |
|
|
|
|
# knowledge = await knowledge_task |
|
|
|
|
if scenics[0]: |
|
|
|
|
knowledge = await knowledge_task |
|
|
|
|
#如果知识库返回的内容不包含"知识库内未找到相应资源"则拼接字符串 |
|
|
|
|
if knowledge and "无法" not in knowledge: |
|
|
|
|
data += "\n\n知识库查询到的景区内容:"+ knowledge |
|
|
|
@ -153,8 +153,8 @@ async def h5_chat_stream(request: Request, inp: ChatIn): |
|
|
|
|
yield chunk |
|
|
|
|
else: |
|
|
|
|
# 等待知识库查询结果 |
|
|
|
|
# if spot: |
|
|
|
|
# knowledge = await knowledge_task |
|
|
|
|
if spot: |
|
|
|
|
knowledge = await knowledge_task |
|
|
|
|
if knowledge and "无法" not in knowledge: |
|
|
|
|
inp.message += "\n\n知识库查询到的景区内容:"+ knowledge |
|
|
|
|
async for chunk in ai_chat_stream(inp, conversation_history): |
|
|
|
|