diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-11-06 20:53:18 +0100 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-11-06 20:53:18 +0100 |
commit | 18b309257c56b73f680debfd8eec1b12231c2698 (patch) | |
tree | f44c02b56916547e55f5ab5ea0f61bba27d44b55 /g4f/Provider/nexra/NexraGeminiPro.py | |
parent | Update (g4f/Provider/Allyfy.py) (diff) | |
download | gpt4free-18b309257c56b73f680debfd8eec1b12231c2698.tar gpt4free-18b309257c56b73f680debfd8eec1b12231c2698.tar.gz gpt4free-18b309257c56b73f680debfd8eec1b12231c2698.tar.bz2 gpt4free-18b309257c56b73f680debfd8eec1b12231c2698.tar.lz gpt4free-18b309257c56b73f680debfd8eec1b12231c2698.tar.xz gpt4free-18b309257c56b73f680debfd8eec1b12231c2698.tar.zst gpt4free-18b309257c56b73f680debfd8eec1b12231c2698.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/nexra/NexraGeminiPro.py | 86 |
1 files changed, 0 insertions, 86 deletions
diff --git a/g4f/Provider/nexra/NexraGeminiPro.py b/g4f/Provider/nexra/NexraGeminiPro.py deleted file mode 100644 index e4e6a8ec..00000000 --- a/g4f/Provider/nexra/NexraGeminiPro.py +++ /dev/null @@ -1,86 +0,0 @@ -from __future__ import annotations - -import json -import requests - -from ...typing import CreateResult, Messages -from ..base_provider import ProviderModelMixin, AbstractProvider -from ..helper import format_prompt - -class NexraGeminiPro(AbstractProvider, ProviderModelMixin): - label = "Nexra Gemini PRO" - url = "https://nexra.aryahcr.cc/documentation/gemini-pro/en" - api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements" - working = True - supports_stream = True - - default_model = 'gemini-pro' - models = [default_model] - - @classmethod - def get_model(cls, model: str) -> str: - return cls.default_model - - @classmethod - def create_completion( - cls, - model: str, - messages: Messages, - stream: bool, - proxy: str = None, - markdown: bool = False, - **kwargs - ) -> CreateResult: - model = cls.get_model(model) - - headers = { - 'Content-Type': 'application/json' - } - - data = { - "messages": [ - { - "role": "user", - "content": format_prompt(messages) - } - ], - "stream": stream, - "markdown": markdown, - "model": model - } - - response = requests.post(cls.api_endpoint, headers=headers, json=data, stream=stream) - - if stream: - return cls.process_streaming_response(response) - else: - return cls.process_non_streaming_response(response) - - @classmethod - def process_non_streaming_response(cls, response): - if response.status_code == 200: - try: - content = response.text.lstrip('') - data = json.loads(content) - return data.get('message', '') - except json.JSONDecodeError: - return "Error: Unable to decode JSON response" - else: - return f"Error: {response.status_code}" - - @classmethod - def process_streaming_response(cls, response): - full_message = "" - for line in response.iter_lines(decode_unicode=True): - if line: - try: - line = line.lstrip('') - data = json.loads(line) - if data.get('finish'): - break - message = data.get('message', '') - if message: - yield message[len(full_message):] - full_message = message - except json.JSONDecodeError: - pass |