From 8ec942040ceeb1dd9c7aab3cc52cd8223066a9ee Mon Sep 17 00:00:00 2001 From: abc <98614666+xtekky@users.noreply.github.com> Date: Tue, 16 Apr 2024 11:28:54 +0100 Subject: add cohere provider. --- g4f/Provider/Cohere.py | 106 +++++++++++++++++++++++++++++++++++++++++++++++ g4f/Provider/__init__.py | 1 + g4f/models.py | 3 +- 3 files changed, 109 insertions(+), 1 deletion(-) create mode 100644 g4f/Provider/Cohere.py (limited to 'g4f') diff --git a/g4f/Provider/Cohere.py b/g4f/Provider/Cohere.py new file mode 100644 index 00000000..4f9fd30a --- /dev/null +++ b/g4f/Provider/Cohere.py @@ -0,0 +1,106 @@ +from __future__ import annotations + +import json, random, requests, threading +from aiohttp import ClientSession + +from ..typing import CreateResult, Messages +from .base_provider import AbstractProvider +from .helper import format_prompt + +class Cohere(AbstractProvider): + url = "https://cohereforai-c4ai-command-r-plus.hf.space" + working = True + supports_gpt_35_turbo = False + supports_gpt_4 = False + supports_stream = True + + @staticmethod + def create_completion( + model: str, + messages: Messages, + stream: bool, + proxy: str = None, + max_retries: int = 6, + **kwargs + ) -> CreateResult: + + prompt = format_prompt(messages) + + headers = { + 'accept': 'text/event-stream', + 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', + 'cache-control': 'no-cache', + 'pragma': 'no-cache', + 'referer': 'https://cohereforai-c4ai-command-r-plus.hf.space/?__theme=light', + 'sec-ch-ua': '"Google Chrome";v="123", "Not:A-Brand";v="8", "Chromium";v="123"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-platform': '"macOS"', + 'sec-fetch-dest': 'empty', + 'sec-fetch-mode': 'cors', + 'sec-fetch-site': 'same-origin', + 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36', + } + + session_hash = ''.join(random.choices("abcdefghijklmnopqrstuvwxyz0123456789", k=11)) + + params = { + 'fn_index': '1', + 'session_hash': session_hash, + } + + response = requests.get( + 'https://cohereforai-c4ai-command-r-plus.hf.space/queue/join', + params=params, + headers=headers, + stream=True + ) + + completion = '' + + for line in response.iter_lines(): + if line: + json_data = json.loads(line[6:]) + + if b"send_data" in (line): + event_id = json_data["event_id"] + + threading.Thread(target=send_data, args=[session_hash, event_id, prompt]).start() + + if b"process_generating" in line or b"process_completed" in line: + token = (json_data['output']['data'][0][0][1]) + + yield (token.replace(completion, "")) + completion = token + +def send_data(session_hash, event_id, prompt): + headers = { + 'accept': '*/*', + 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', + 'cache-control': 'no-cache', + 'content-type': 'application/json', + 'origin': 'https://cohereforai-c4ai-command-r-plus.hf.space', + 'pragma': 'no-cache', + 'referer': 'https://cohereforai-c4ai-command-r-plus.hf.space/?__theme=light', + 'sec-ch-ua': '"Google Chrome";v="123", "Not:A-Brand";v="8", "Chromium";v="123"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-platform': '"macOS"', + 'sec-fetch-dest': 'empty', + 'sec-fetch-mode': 'cors', + 'sec-fetch-site': 'same-origin', + 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36', + } + + json_data = { + 'data': [ + prompt, + '', + [], + ], + 'event_data': None, + 'fn_index': 1, + 'session_hash': session_hash, + 'event_id': event_id + } + + requests.post('https://cohereforai-c4ai-command-r-plus.hf.space/queue/data', + json = json_data, headers=headers) \ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index b818a752..ea64f80a 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -46,6 +46,7 @@ from .ReplicateImage import ReplicateImage from .Vercel import Vercel from .WhiteRabbitNeo import WhiteRabbitNeo from .You import You +from .Cohere import Cohere import sys diff --git a/g4f/models.py b/g4f/models.py index ff8b3a9c..fe99958c 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -20,6 +20,7 @@ from .Provider import ( Vercel, Gemini, Koala, + Cohere, Bing, You, Pi, @@ -275,7 +276,7 @@ dbrx_instruct = Model( command_r_plus = Model( name = 'CohereForAI/c4ai-command-r-plus', base_provider = 'mistral', - best_provider = HuggingChat + best_provider = RetryProvider([HuggingChat, Cohere]) ) class ModelUtils: -- cgit v1.2.3