diff options
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/You.py | 25 | ||||
-rw-r--r-- | g4f/Provider/openai/har_file.py | 7 | ||||
-rw-r--r-- | g4f/Provider/you/har_file.py | 2 |
3 files changed, 13 insertions, 21 deletions
diff --git a/g4f/Provider/You.py b/g4f/Provider/You.py index 231f953f..6256cda9 100644 --- a/g4f/Provider/You.py +++ b/g4f/Provider/You.py @@ -4,18 +4,12 @@ import re import json import base64 import uuid -try: - from ..requests.curl_cffi import FormData - has_curl_cffi = True -except ImportError: - has_curl_cffi = False from ..typing import AsyncResult, Messages, ImageType, Cookies from .base_provider import AsyncGeneratorProvider, ProviderModelMixin from .helper import format_prompt from ..image import to_bytes, ImageResponse -from ..requests import StreamSession, raise_for_status -from ..errors import MissingRequirementsError +from ..requests import StreamSession, FormData, raise_for_status from .you.har_file import get_dfp_telemetry_id @@ -34,7 +28,8 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): "claude-3-opus", "claude-3-sonnet", "gemini-pro", - "zephyr" + "zephyr", + "dall-e", ] model_aliases = { "claude-v2": "claude-2" @@ -54,8 +49,6 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): chat_mode: str = "default", **kwargs, ) -> AsyncResult: - if not has_curl_cffi: - raise MissingRequirementsError('Install "curl_cffi" package') if image is not None: chat_mode = "agent" elif not model or model == cls.default_model: @@ -66,18 +59,11 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): chat_mode = "custom" model = cls.get_model(model) async with StreamSession( - proxy=proxy, + proxies={"all": proxy}, impersonate="chrome" ) as session: cookies = await cls.get_cookies(session) if chat_mode != "default" else None upload = json.dumps([await cls.upload_file(session, cookies, to_bytes(image), image_name)]) if image else "" - #questions = [message["content"] for message in messages if message["role"] == "user"] - # chat = [ - # {"question": questions[idx-1], "answer": message["content"]} - # for idx, message in enumerate(messages) - # if message["role"] == "assistant" - # and idx < len(questions) - # ] headers = { "Accept": "text/event-stream", "Referer": f"{cls.url}/search?fromSearchBar=true&tbm=youchat", @@ -183,6 +169,7 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): "dfp_telemetry_id": await get_dfp_telemetry_id(), "email": f"{user_uuid}@gmail.com", "password": f"{user_uuid}#{user_uuid}", + "dfp_telemetry_id": f"{uuid.uuid4()}", "session_duration_minutes": 129600 } ) as response: @@ -193,4 +180,4 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): 'stytch_session_jwt': session["session_jwt"], 'ydc_stytch_session': session["session_token"], 'ydc_stytch_session_jwt': session["session_jwt"], - }
\ No newline at end of file + } diff --git a/g4f/Provider/openai/har_file.py b/g4f/Provider/openai/har_file.py index 8936c131..52cd8260 100644 --- a/g4f/Provider/openai/har_file.py +++ b/g4f/Provider/openai/har_file.py @@ -54,7 +54,10 @@ def readHAR(): if arkPreURL in v['request']['url']: chatArks.append(parseHAREntry(v)) elif v['request']['url'] == sessionUrl: - accessToken = json.loads(v["response"]["content"]["text"]).get("accessToken") + try: + accessToken = json.loads(v["response"]["content"]["text"]).get("accessToken") + except KeyError: + continue cookies = {c['name']: c['value'] for c in v['request']['cookies']} if not accessToken: raise NoValidHarFileError("No accessToken found in .har files") @@ -127,4 +130,4 @@ async def getArkoseAndAccessToken(proxy: str): if chatArk is None: return None, accessToken, cookies newReq = genArkReq(chatArk) - return await sendRequest(newReq, proxy), accessToken, cookies
\ No newline at end of file + return await sendRequest(newReq, proxy), accessToken, cookies diff --git a/g4f/Provider/you/har_file.py b/g4f/Provider/you/har_file.py index 59e1ff94..281f37e2 100644 --- a/g4f/Provider/you/har_file.py +++ b/g4f/Provider/you/har_file.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import os import random |