Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
B
bme-mcp
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
何家明
bme-mcp
Commits
a5715884
Commit
a5715884
authored
May 15, 2025
by
何家明
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
添加默认问题回答
parent
7951de2c
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
87 additions
and
71 deletions
+87
-71
api.py
api.py
+0
-17
client.py
client/client.py
+87
-54
No files found.
api.py
View file @
a5715884
from
collections
import
Counter
from
collections
import
Counter
from
contextlib
import
asynccontextmanager
from
contextlib
import
asynccontextmanager
from
datetime
import
datetime
import
uvicorn
import
uvicorn
from
fastapi
import
FastAPI
,
Request
,
APIRouter
from
fastapi
import
FastAPI
,
Request
,
APIRouter
...
@@ -13,7 +12,6 @@ from config.system import config
...
@@ -13,7 +12,6 @@ from config.system import config
from
model.entity.AiChatPermissionEntity
import
AiChatPermissionEntity
from
model.entity.AiChatPermissionEntity
import
AiChatPermissionEntity
from
model.entity.AiChatRecommendQuestionEntity
import
AiChatRecommendQuestionEntity
from
model.entity.AiChatRecommendQuestionEntity
import
AiChatRecommendQuestionEntity
from
model.entity.AiChatRecordEntity
import
AiChatRecordEntity
from
model.entity.AiChatRecordEntity
import
AiChatRecordEntity
from
model.param.AiChatRecordParam
import
AiChatRecordParam
from
model.vo.ResultVo
import
ResultVo
from
model.vo.ResultVo
import
ResultVo
...
@@ -96,21 +94,6 @@ async def chat(request: Request, message: str):
...
@@ -96,21 +94,6 @@ async def chat(request: Request, message: str):
media_type
=
"text/event-stream"
,
headers
=
{
"Cache-Control"
:
"no-cache"
})
media_type
=
"text/event-stream"
,
headers
=
{
"Cache-Control"
:
"no-cache"
})
@
router
.
post
(
"/chat/record"
,
description
=
"记录AI回答"
)
async
def
record_chat
(
chat_record_param
:
AiChatRecordParam
):
await
db_util
.
update
(
AiChatRecordEntity
,
{
"id"
:
chat_record_param
.
id
},
{
"answer"
:
chat_record_param
.
answer
,
"answer_time"
:
datetime
.
now
()
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
}
)
return
ResultVo
()
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
cors
=
config
.
get
(
"cors"
,
{})
cors
=
config
.
get
(
"cors"
,
{})
api
.
add_middleware
(
CORSMiddleware
,
allow_origins
=
cors
.
get
(
"allow_origins"
,
[
"*"
]),
api
.
add_middleware
(
CORSMiddleware
,
allow_origins
=
cors
.
get
(
"allow_origins"
,
[
"*"
]),
...
...
client/client.py
View file @
a5715884
...
@@ -15,9 +15,24 @@ from pydantic import AnyUrl
...
@@ -15,9 +15,24 @@ from pydantic import AnyUrl
from
config.database
import
db_util
from
config.database
import
db_util
from
config.logger
import
logger
from
config.logger
import
logger
from
config.system
import
config
from
config.system
import
config
from
model.entity.AiChatRecommendQuestionEntity
import
AiChatRecommendQuestionEntity
from
model.entity.AiChatRecordEntity
import
AiChatRecordEntity
from
model.entity.AiChatRecordEntity
import
AiChatRecordEntity
async
def
record_answer
(
_id
,
_completion_answer
):
"""AI回答完成后,异步更新回答数据"""
await
db_util
.
update
(
AiChatRecordEntity
,
{
"id"
:
_id
},
{
"answer"
:
_completion_answer
,
"answer_time"
:
datetime
.
now
()
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
}
)
class
McpClient
:
class
McpClient
:
def
__init__
(
self
):
def
__init__
(
self
):
self
.
customer_resource
:
[]
=
None
# 客户资源
self
.
customer_resource
:
[]
=
None
# 客户资源
...
@@ -132,6 +147,20 @@ class McpClient:
...
@@ -132,6 +147,20 @@ class McpClient:
chat_record_entity
=
await
db_util
.
add
(
chat_record_entity
)
chat_record_entity
=
await
db_util
.
add
(
chat_record_entity
)
logger
.
info
(
f
"--> user origin query, message: {message}, customer_id: {customer_id}"
)
logger
.
info
(
f
"--> user origin query, message: {message}, customer_id: {customer_id}"
)
db_question
=
await
db_util
.
get_by_filter
(
AiChatRecommendQuestionEntity
,
AiChatRecommendQuestionEntity
.
question
.
__eq__
(
message
),
)
if
db_question
and
len
(
db_question
)
>
0
and
(
default_answer
:
=
db_question
[
0
]
.
default_answer
):
# 固定问题,模拟AI回答
step
=
2
# 两个字符两个字符的输出
for
i
in
range
(
0
,
len
(
default_answer
),
step
):
await
asyncio
.
sleep
(
0.05
)
# 50毫秒延迟
yield
json
.
dumps
({
"content"
:
default_answer
[
i
:
i
+
step
]})
yield
"[DONE]"
asyncio
.
create_task
(
record_answer
(
chat_record_entity
.
id
,
default_answer
))
else
:
messages
=
[
messages
=
[
{
"role"
:
"system"
,
"content"
:
self
.
default_system_prompt
},
{
"role"
:
"system"
,
"content"
:
self
.
default_system_prompt
},
{
"role"
:
"system"
,
"content"
:
self
.
deal_customer_permission
(
customer_id
)},
{
"role"
:
"system"
,
"content"
:
self
.
deal_customer_permission
(
customer_id
)},
...
@@ -182,11 +211,15 @@ class McpClient:
...
@@ -182,11 +211,15 @@ class McpClient:
tool_choice
=
"auto"
,
tool_choice
=
"auto"
,
stream
=
True
stream
=
True
)
)
completion_answer
=
""
async
for
chunk
in
ai_stream_response
:
async
for
chunk
in
ai_stream_response
:
if
chunk
.
choices
[
0
]
.
finish_reason
==
"stop"
:
if
chunk
.
choices
[
0
]
.
finish_reason
==
"stop"
:
yield
f
"[DONE]-{chat_record_entity.id}"
asyncio
.
create_task
(
record_answer
(
chat_record_entity
.
id
,
completion_answer
))
yield
"[DONE]"
else
:
else
:
yield
json
.
dumps
({
"content"
:
chunk
.
choices
[
0
]
.
delta
.
content
})
content
=
chunk
.
choices
[
0
]
.
delta
.
content
completion_answer
+=
content
yield
json
.
dumps
({
"content"
:
content
})
mcp_client_instance
=
McpClient
()
mcp_client_instance
=
McpClient
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment