支持ghcproxy
This commit is contained in:
@@ -166,7 +166,7 @@ async def _raw_stream_from_llm(messages: List[ChatMessage], settings: Settings,
|
||||
Yields raw byte chunks as received.
|
||||
"""
|
||||
headers = { "Authorization": f"Bearer {settings.REAL_LLM_API_KEY}", "Content-Type": "application/json" }
|
||||
payload = { "model": "default-model", "messages": [msg.model_dump() for msg in messages], "stream": True }
|
||||
payload = { "model": "gpt-4.1", "messages": [msg.model_dump() for msg in messages], "stream": True }
|
||||
|
||||
# Log the request payload to the database
|
||||
update_request_log(log_id, llm_request=payload)
|
||||
|
||||
Reference in New Issue
Block a user