diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-02-25 21:34:21 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-02-25 21:34:21 +0100 |
commit | 35c1da538265371505364136cae1f34219b4a4d3 (patch) | |
tree | 7049778e69e65ee951ce22c65ff860f7897f71bd /g4f/Provider | |
parent | Merge pull request #1625 from hlohaus/flow (diff) | |
parent | Improve mobile css styles (diff) | |
download | gpt4free-35c1da538265371505364136cae1f34219b4a4d3.tar gpt4free-35c1da538265371505364136cae1f34219b4a4d3.tar.gz gpt4free-35c1da538265371505364136cae1f34219b4a4d3.tar.bz2 gpt4free-35c1da538265371505364136cae1f34219b4a4d3.tar.lz gpt4free-35c1da538265371505364136cae1f34219b4a4d3.tar.xz gpt4free-35c1da538265371505364136cae1f34219b4a4d3.tar.zst gpt4free-35c1da538265371505364136cae1f34219b4a4d3.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/GeminiPro.py | 28 | ||||
-rw-r--r-- | g4f/Provider/Liaobots.py | 2 |
2 files changed, 19 insertions, 11 deletions
diff --git a/g4f/Provider/GeminiPro.py b/g4f/Provider/GeminiPro.py index e1738dc8..87ded3ac 100644 --- a/g4f/Provider/GeminiPro.py +++ b/g4f/Provider/GeminiPro.py @@ -13,6 +13,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin): url = "https://ai.google.dev" working = True supports_message_history = True + needs_auth = True default_model = "gemini-pro" models = ["gemini-pro", "gemini-pro-vision"] @@ -24,19 +25,27 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin): stream: bool = False, proxy: str = None, api_key: str = None, + api_base: str = None, image: ImageType = None, **kwargs ) -> AsyncResult: model = "gemini-pro-vision" if not model and image else model model = cls.get_model(model) + if not api_key: - raise MissingAuthError('Missing "api_key" for auth') - headers = { - "Content-Type": "application/json", - } + raise MissingAuthError('Missing "api_key"') + if not api_base: + api_base = f"https://generativelanguage.googleapis.com/v1beta" + + method = "streamGenerateContent" if stream else "generateContent" + url = f"{api_base.rstrip('/')}/models/{model}:{method}" + headers = None + if api_base: + headers = {f"Authorization": "Bearer {api_key}"} + else: + url += f"?key={api_key}" + async with ClientSession(headers=headers) as session: - method = "streamGenerateContent" if stream else "generateContent" - url = f"https://generativelanguage.googleapis.com/v1beta/models/{model}:{method}" contents = [ { "role": "model" if message["role"] == "assistant" else message["role"], @@ -62,7 +71,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin): "topK": kwargs.get("top_k"), } } - async with session.post(url, params={"key": api_key}, json=data, proxy=proxy) as response: + async with session.post(url, json=data, proxy=proxy) as response: if not response.ok: data = await response.json() raise RuntimeError(data[0]["error"]["message"]) @@ -73,12 +82,11 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin): lines = [b"{\n"] elif chunk == b",\r\n" or chunk == b"]": try: - data = b"".join(lines) - data = json.loads(data) + data = json.loads(b"".join(lines)) yield data["candidates"][0]["content"]["parts"][0]["text"] except: data = data.decode() if isinstance(data, bytes) else data - raise RuntimeError(f"Read text failed. data: {data}") + raise RuntimeError(f"Read chunk failed: {data}") lines = [] else: lines.append(chunk) diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py index e93642ba..54bf7f2e 100644 --- a/g4f/Provider/Liaobots.py +++ b/g4f/Provider/Liaobots.py @@ -78,7 +78,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin): supports_gpt_35_turbo = True supports_gpt_4 = True default_model = "gpt-3.5-turbo" - models = [m for m in models] + models = list(models) model_aliases = { "claude-v2": "claude-2" } |