diff --git a/request_llms/oai_std_model_template.py b/request_llms/oai_std_model_template.py index a5da92ea6..9b6a1c4ce 100644 --- a/request_llms/oai_std_model_template.py +++ b/request_llms/oai_std_model_template.py @@ -252,9 +252,9 @@ def predict_no_ui_long_connection( logger.error(error_msg) raise RuntimeError("Json解析不合常规") if reasoning: - return f'''
- {''.join([f'

{line}

' for line in reasoning_buffer.split('\n')])} -
\n\n''' + result + style = 'padding: 1em; line-height: 1.5; text-wrap: wrap; opacity: 0.8' + paragraphs = ''.join([f'

{line}

' for line in reasoning_buffer.split('\n')]) + return f'''
{paragraphs}
\n\n''' + result return result def predict( @@ -390,9 +390,9 @@ def predict( if reasoning: gpt_replying_buffer += response_text gpt_reasoning_buffer += reasoning_content - history[-1] = f'''
- {''.join([f'

{line}

' for line in gpt_reasoning_buffer.split('\n')])} -
\n\n''' + gpt_replying_buffer + style = 'padding: 1em; line-height: 1.5; text-wrap: wrap; opacity: 0.8' + paragraphs = ''.join([f'

{line}

' for line in gpt_reasoning_buffer.split('\n')]) + history[-1] = f'
{paragraphs}
\n\n' + gpt_replying_buffer else: gpt_replying_buffer += response_text # 如果这里抛出异常,一般是文本过长,详情见get_full_error的输出