Skip to content

Commit

Permalink
🐛 Bug: Fix the bug that prevents the OpenAI API key from being used n…
Browse files Browse the repository at this point in the history
…ormally in zed.
  • Loading branch information
yym68686 committed Sep 5, 2024
1 parent dac9c70 commit 60e7a94
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions response.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ async def generate_sse_response(timestamp, model, content=None, tools_id=None, f
json_data = json.dumps(sample_data, ensure_ascii=False)

# 构建SSE响应
sse_response = f"data: {json_data}\n\r"
sse_response = f"data: {json_data}\n\r\n"

return sse_response

Expand Down Expand Up @@ -90,7 +90,7 @@ async def fetch_gemini_response_stream(client, url, headers, payload, model):
function_full_response = json.dumps(function_call["functionCall"]["args"])
sse_string = await generate_sse_response(timestamp, model, content=None, tools_id="chatcmpl-9inWv0yEtgn873CxMBzHeCeiHctTV", function_call_name=None, function_call_content=function_full_response)
yield sse_string
yield "data: [DONE]\n\r"
yield "data: [DONE]\n\r\n"

async def fetch_vertex_claude_response_stream(client, url, headers, payload, model):
timestamp = datetime.timestamp(datetime.now())
Expand Down Expand Up @@ -137,7 +137,7 @@ async def fetch_vertex_claude_response_stream(client, url, headers, payload, mod
function_full_response = json.dumps(function_call["input"])
sse_string = await generate_sse_response(timestamp, model, content=None, tools_id=function_call_id, function_call_name=None, function_call_content=function_full_response)
yield sse_string
yield "data: [DONE]\n\r"
yield "data: [DONE]\n\r\n"

async def fetch_gpt_response_stream(client, url, headers, payload, max_redirects=5):
redirect_count = 0
Expand Down Expand Up @@ -174,7 +174,7 @@ async def fetch_gpt_response_stream(client, url, headers, payload, max_redirects
line, buffer = buffer.split("\n", 1)
# logger.info("line: %s", repr(line))
if line and line != "data: " and line != "data:" and not line.startswith(": "):
yield line + "\n\r"
yield line.strip() + "\n\r\n"
except httpx.RemoteProtocolError as e:
yield {"error": f"fetch_gpt_response_stream RemoteProtocolError {e.__class__.__name__}", "details": str(e)}
return
Expand Down Expand Up @@ -236,7 +236,7 @@ async def fetch_claude_response_stream(client, url, headers, payload, model):
function_call_content = delta["partial_json"]
sse_string = await generate_sse_response(timestamp, model, None, None, None, function_call_content)
yield sse_string
yield "data: [DONE]\n\r"
yield "data: [DONE]\n\r\n"

async def fetch_response(client, url, headers, payload):
response = await client.post(url, headers=headers, json=payload)
Expand Down

0 comments on commit 60e7a94

Please sign in to comment.