summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/needs_auth
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-03-28 17:17:59 +0100
committerGitHub <noreply@github.com>2024-03-28 17:17:59 +0100
commit64e07b7fbf810176d66506786a946a3122ea7fc4 (patch)
tree1cf10ab4f117583fdb4a98712c18052e5a42cdf2 /g4f/Provider/needs_auth
parentMerge pull request #1758 from Zero6992/main (diff)
parentFix history support for OpenaiChat (diff)
downloadgpt4free-64e07b7fbf810176d66506786a946a3122ea7fc4.tar
gpt4free-64e07b7fbf810176d66506786a946a3122ea7fc4.tar.gz
gpt4free-64e07b7fbf810176d66506786a946a3122ea7fc4.tar.bz2
gpt4free-64e07b7fbf810176d66506786a946a3122ea7fc4.tar.lz
gpt4free-64e07b7fbf810176d66506786a946a3122ea7fc4.tar.xz
gpt4free-64e07b7fbf810176d66506786a946a3122ea7fc4.tar.zst
gpt4free-64e07b7fbf810176d66506786a946a3122ea7fc4.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py11
1 files changed, 5 insertions, 6 deletions
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 72f9f224..396d73dd 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -389,19 +389,17 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
print(f"{e.__class__.__name__}: {e}")
model = cls.get_model(model).replace("gpt-3.5-turbo", "text-davinci-002-render-sha")
- fields = Conversation() if conversation is None else copy(conversation)
+ fields = Conversation(conversation_id, parent_id) if conversation is None else copy(conversation)
fields.finish_reason = None
while fields.finish_reason is None:
- conversation_id = conversation_id if fields.conversation_id is None else fields.conversation_id
- parent_id = parent_id if fields.message_id is None else fields.message_id
websocket_request_id = str(uuid.uuid4())
data = {
"action": action,
"conversation_mode": {"kind": "primary_assistant"},
"force_paragen": False,
"force_rate_limit": False,
- "conversation_id": conversation_id,
- "parent_message_id": parent_id,
+ "conversation_id": fields.conversation_id,
+ "parent_message_id": fields.message_id,
"model": model,
"history_and_training_disabled": history_disabled and not auto_continue and not return_conversation,
"websocket_request_id": websocket_request_id
@@ -425,6 +423,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
await raise_for_status(response)
async for chunk in cls.iter_messages_chunk(response.iter_lines(), session, fields):
if return_conversation:
+ history_disabled = False
return_conversation = False
yield fields
yield chunk
@@ -432,7 +431,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
break
action = "continue"
await asyncio.sleep(5)
- if history_disabled and auto_continue and not return_conversation:
+ if history_disabled and auto_continue:
await cls.delete_conversation(session, cls._headers, fields.conversation_id)
@staticmethod