summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
Diffstat (limited to 'g4f')
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py14
-rw-r--r--g4f/Provider/openai/har_file.py28
2 files changed, 22 insertions, 20 deletions
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 29e3d4b4..38b97022 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -412,7 +412,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
if "proofofwork" in chat_requirements:
proofofwork = generate_proof_token(
**chat_requirements["proofofwork"],
- user_agent=cls._headers["user-agent"],
+ user_agent=cls._headers.get("user-agent"),
proof_token=RequestConfig.proof_token
)
[debug.log(text) for text in (
@@ -439,15 +439,15 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
messages = messages if conversation_id is None else [messages[-1]]
data["messages"] = cls.create_messages(messages, image_request)
headers = {
- "Accept": "text/event-stream",
- "Content-Type": "application/json",
- "Openai-Sentinel-Chat-Requirements-Token": chat_token,
- **cls._headers
+ **cls._headers,
+ "accept": "text/event-stream",
+ "content-type": "application/json",
+ "openai-sentinel-chat-requirements-token": chat_token,
}
if RequestConfig.arkose_token:
- headers["Openai-Sentinel-Arkose-Token"] = RequestConfig.arkose_token
+ headers["openai-sentinel-arkose-token"] = RequestConfig.arkose_token
if proofofwork is not None:
- headers["Openai-Sentinel-Proof-Token"] = proofofwork
+ headers["openai-sentinel-proof-token"] = proofofwork
if need_turnstile and RequestConfig.turnstile_token is not None:
headers['openai-sentinel-turnstile-token'] = RequestConfig.turnstile_token
async with session.post(
diff --git a/g4f/Provider/openai/har_file.py b/g4f/Provider/openai/har_file.py
index d731bf73..2c7c1604 100644
--- a/g4f/Provider/openai/har_file.py
+++ b/g4f/Provider/openai/har_file.py
@@ -63,26 +63,28 @@ def readHAR():
continue
for v in harFile['log']['entries']:
v_headers = get_headers(v)
- try:
- if "openai-sentinel-proof-token" in v_headers:
- RequestConfig.proof_token = json.loads(base64.b64decode(
- v_headers["openai-sentinel-proof-token"].split("gAAAAAB", 1)[-1].encode()
- ).decode())
- if "openai-sentinel-turnstile-token" in v_headers:
- RequestConfig.turnstile_token = v_headers["openai-sentinel-turnstile-token"]
- except Exception as e:
- debug.log(f"Read proof token: {e}")
if arkose_url == v['request']['url']:
RequestConfig.arkose_request = parseHAREntry(v)
- elif v['request']['url'] == start_url or v['request']['url'].startswith(conversation_url):
+ elif v['request']['url'].startswith(start_url):
try:
match = re.search(r'"accessToken":"(.*?)"', v["response"]["content"]["text"])
if match:
RequestConfig.access_token = match.group(1)
except KeyError:
- continue
- RequestConfig.cookies = {c['name']: c['value'] for c in v['request']['cookies']}
- RequestConfig.headers = v_headers
+ pass
+ try:
+ if "openai-sentinel-proof-token" in v_headers:
+ RequestConfig.headers = v_headers
+ RequestConfig.proof_token = json.loads(base64.b64decode(
+ v_headers["openai-sentinel-proof-token"].split("gAAAAAB", 1)[-1].encode()
+ ).decode())
+ if "openai-sentinel-turnstile-token" in v_headers:
+ RequestConfig.turnstile_token = v_headers["openai-sentinel-turnstile-token"]
+ if "authorization" in v_headers:
+ RequestConfig.access_token = v_headers["authorization"].split(" ")[1]
+ RequestConfig.cookies = {c['name']: c['value'] for c in v['request']['cookies']}
+ except Exception as e:
+ debug.log(f"Error on read headers: {e}")
if RequestConfig.proof_token is None:
raise NoValidHarFileError("No proof_token found in .har files")