feat: 支持没模型也可以问答
This commit is contained in:
parent
5c226a6ba8
commit
947cdab2a6
|
@ -122,6 +122,7 @@ class ChatMessageSerializer(serializers.Serializer):
|
|||
vector.delete_by_paragraph_id(_value.get('paragraph_id'))
|
||||
|
||||
title, content = (None, None) if paragraph is None else (paragraph.title, paragraph.content)
|
||||
_id = str(uuid.uuid1())
|
||||
|
||||
embedding_id, dataset_id, document_id, paragraph_id, source_type, source_id = (_value.get(
|
||||
'id'), _value.get(
|
||||
|
@ -130,6 +131,26 @@ class ChatMessageSerializer(serializers.Serializer):
|
|||
'paragraph_id'), _value.get(
|
||||
'source_type'), _value.get(
|
||||
'source_id')) if _value is not None else (None, None, None, None, None, None)
|
||||
|
||||
if chat_model is None:
|
||||
def event_block_content(c: str):
|
||||
yield 'data: ' + json.dumps({'chat_id': chat_id, 'id': _id, 'operate': paragraph is not None,
|
||||
'content': c if c is not None else '抱歉,根据已知信息无法回答这个问题,请重新描述您的问题或提供更多信息~'}) + "\n\n"
|
||||
chat_info.append_chat_message(
|
||||
ChatMessage(_id, message, title, content, embedding_id, dataset_id, document_id,
|
||||
paragraph_id,
|
||||
source_type,
|
||||
source_id, c, 0,
|
||||
0))
|
||||
# 重新设置缓存
|
||||
chat_cache.set(chat_id,
|
||||
chat_info, timeout=60 * 30)
|
||||
|
||||
r = StreamingHttpResponse(streaming_content=event_block_content(content),
|
||||
content_type='text/event-stream;charset=utf-8')
|
||||
|
||||
r['Cache-Control'] = 'no-cache'
|
||||
return r
|
||||
# 获取上下文
|
||||
history_message = chat_info.get_context_message()
|
||||
|
||||
|
@ -138,8 +159,6 @@ class ChatMessageSerializer(serializers.Serializer):
|
|||
# 对话
|
||||
result_data = chat_model.stream(chat_message)
|
||||
|
||||
_id = str(uuid.uuid1())
|
||||
|
||||
def event_content(response):
|
||||
all_text = ''
|
||||
try:
|
||||
|
|
|
@ -93,10 +93,12 @@ class ChatSerializers(serializers.Serializer):
|
|||
self.is_valid(raise_exception=True)
|
||||
application_id = self.data.get('application_id')
|
||||
application = QuerySet(Application).get(id=application_id)
|
||||
model = application.model
|
||||
model = QuerySet(Model).filter(id=application.model_id).first()
|
||||
dataset_id_list = [str(row.dataset_id) for row in
|
||||
QuerySet(ApplicationDatasetMapping).filter(
|
||||
application_id=application_id)]
|
||||
chat_model = None
|
||||
if model is not None:
|
||||
chat_model = ModelProvideConstants[model.provider].value.get_model(model.model_type, model.model_name,
|
||||
json.loads(
|
||||
decrypt(model.credential)),
|
||||
|
|
|
@ -19,7 +19,6 @@ diskcache = "^5.6.3"
|
|||
pillow = "9.5.0"
|
||||
filetype = "^1.2.0"
|
||||
chardet = "^5.2.0"
|
||||
torch = "^2.1.0"
|
||||
sentence-transformers = "^2.2.2"
|
||||
blinker = "^1.6.3"
|
||||
openai = "^0.28.1"
|
||||
|
|
Loading…
Reference in New Issue