Skip to content

Commit

Permalink
added debuggin logs for CI
Browse files Browse the repository at this point in the history
  • Loading branch information
nalbion committed Oct 4, 2023
1 parent 4abed31 commit 8914bb4
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions pilot/utils/llm_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,6 +310,7 @@ def return_result(result_data, lines_printed):
# Ignore keep-alive new lines
if line and line != b': OPENROUTER PROCESSING':
line = line.decode("utf-8") # decode the bytes to string
logger.info(f'##### 1, line: {line}')

if line.startswith('data: '):
line = line[6:] # remove the 'data: ' prefix
Expand Down Expand Up @@ -353,6 +354,8 @@ def return_result(result_data, lines_printed):
if 'content' in json_line:
content = json_line.get('content')
if content:
logger.info(f'##### 2, content: {content}')
logger.info(f'##### 3, buffer: {buffer}')
buffer += content # accumulate the data

# If you detect a natural breakpoint (e.g., line break or end of a response object), print & count:
Expand All @@ -364,6 +367,7 @@ def return_result(result_data, lines_printed):
lines_printed += count_lines_based_on_width(buffer, terminal_width)
buffer = "" # reset the buffer

logger.info(f'##### 4, gpt_response: {gpt_response}')
gpt_response += content
print(content, type='stream', end='', flush=True)

Expand All @@ -375,6 +379,7 @@ def return_result(result_data, lines_printed):
# return return_result({'function_calls': function_calls}, lines_printed)
logger.info(f'< Response message: {gpt_response}')

logger.info(f'##### 5, expecting_json: {expecting_json}')
if expecting_json:
gpt_response = clean_json_response(gpt_response)
assert_json_schema(gpt_response, expecting_json)
Expand Down

0 comments on commit 8914bb4

Please sign in to comment.