Commit e1760911 authored by 于飞's avatar 于飞

换行问题恢复调试

parent eb02c241
......@@ -533,10 +533,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
if chunk:
msg = chunk.replace("\ufffd", "")
if incremental:
incremental_output = msg[len(previous_response):].replace("\n", "\\n")
# 处理连续换行符
incremental_output = incremental_output.replace("\n\n", "\\n\\n")
incremental_output = msg[len(previous_response):]
choice_data = ChatCompletionResponseStreamChoice(
index=0,
delta=DeltaMessage(role="assistant", content=incremental_output),
......@@ -544,7 +541,9 @@ async def stream_generator(chat, incremental: bool, model_name: str):
chunk = ChatCompletionStreamResponse(
id=chat.chat_session_id, choices=[choice_data], model=model_name
)
json_chunk = model_to_json(chunk, exclude_unset=True, ensure_ascii=False)
json_chunk = model_to_json(
chunk, exclude_unset=True, ensure_ascii=False
)
yield f"data: {json_chunk}\n\n"
else:
# TODO generate an openai-compatible streaming responses
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment