summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorabc <98614666+xtekky@users.noreply.github.com>2024-04-25 01:06:11 +0200
committerabc <98614666+xtekky@users.noreply.github.com>2024-04-25 01:06:11 +0200
commit2b271013fb0d8e0bd15575b1d0a3aedee5a46f33 (patch)
tree14ecf451a69336e24b56824ad9fc34aa4633ecd3
parentMerge pull request #1882 from hlohaus/css (diff)
downloadgpt4free-2b271013fb0d8e0bd15575b1d0a3aedee5a46f33.tar
gpt4free-2b271013fb0d8e0bd15575b1d0a3aedee5a46f33.tar.gz
gpt4free-2b271013fb0d8e0bd15575b1d0a3aedee5a46f33.tar.bz2
gpt4free-2b271013fb0d8e0bd15575b1d0a3aedee5a46f33.tar.lz
gpt4free-2b271013fb0d8e0bd15575b1d0a3aedee5a46f33.tar.xz
gpt4free-2b271013fb0d8e0bd15575b1d0a3aedee5a46f33.tar.zst
gpt4free-2b271013fb0d8e0bd15575b1d0a3aedee5a46f33.zip
Diffstat (limited to '')
-rw-r--r--README.md7
-rw-r--r--etc/examples/image_chat_reka.py27
-rw-r--r--g4f/Provider/Reka.py148
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/models.py16
5 files changed, 197 insertions, 2 deletions
diff --git a/README.md b/README.md
index 770abe9b..12792eaf 100644
--- a/README.md
+++ b/README.md
@@ -304,8 +304,11 @@ While we wait for gpt-5, here is a list of new models that are at least better t
| ------ | ------- | ------ | ------ |
| [mixtral-8x22b](https://huggingface.co/mistral-community/Mixtral-8x22B-v0.1) | `g4f.Provider.DeepInfra` | 176B / 44b active | gpt-3.5-turbo |
| [dbrx-instruct](https://www.databricks.com/blog/introducing-dbrx-new-state-art-open-llm) | `g4f.Provider.DeepInfra` | 132B / 36B active| gpt-3.5-turbo |
-| [command-r+](https://txt.cohere.com/command-r-plus-microsoft-azure/) | `g4f.Provider.HuggingChat` | 104B | gpt-4-0613 |
-
+| [command-r+](https://txt.cohere.com/command-r-plus-microsoft-azure/) | `g4f.Provider.HuggingChat` | 104B | gpt-4-0314 |
+| [reka-core](https://chat.reka.ai/) | `g4f.Provider.Reka` | 104B | gpt-4-vision |
+| [claude-3-opus](https://anthropic.com/) | `g4f.Provider.You` | ?B | gpt-4-0125-preview |
+| [claude-3-sonnet](https://anthropic.com/) | `g4f.Provider.You` | ?B | gpt-4-0314 |
+| [llama-3-70b](https://meta.ai/) | `g4f.Provider.Llama` or `DeepInfra` | ?B | gpt-4-0314 |
### GPT-3.5
diff --git a/etc/examples/image_chat_reka.py b/etc/examples/image_chat_reka.py
new file mode 100644
index 00000000..954960db
--- /dev/null
+++ b/etc/examples/image_chat_reka.py
@@ -0,0 +1,27 @@
+# Image Chat with Reca
+# !! YOU NEED COOKIES / BE LOGGED IN TO chat.reka.ai
+# download an image and save it as test.png in the same folder
+
+from g4f.client import Client
+from g4f.Provider import Reka
+
+client = Client(
+ provider = Reka # Optional if you set model name to reka-core
+)
+
+completion = client.chat.completions.create(
+ model = "reka-core",
+ messages = [
+ {
+ "role": "user",
+ "content": "What can you see in the image ?"
+ }
+ ],
+ stream = True,
+ image = open("test.png", "rb") # open("path", "rb"), do not use .read(), etc. it must be a file object
+)
+
+for message in completion:
+ print(message.choices[0].delta.content or "")
+
+ # >>> In the image there is ... \ No newline at end of file
diff --git a/g4f/Provider/Reka.py b/g4f/Provider/Reka.py
new file mode 100644
index 00000000..9a6e479d
--- /dev/null
+++ b/g4f/Provider/Reka.py
@@ -0,0 +1,148 @@
+from __future__ import annotations
+
+import os, requests, time, json
+from ..typing import CreateResult, Messages, ImageType
+from .base_provider import AbstractProvider
+from ..cookies import get_cookies
+
+class Reka(AbstractProvider):
+ url = "https://chat.reka.ai/"
+ working = True
+ supports_stream = True
+ cookies = {}
+
+ @classmethod
+ def create_completion(
+ cls,
+ model: str,
+ messages: Messages,
+ stream: bool,
+ proxy: str = None,
+ timeout: int = 180,
+ bearer_auth: str = None,
+ image: ImageType = None, **kwargs) -> CreateResult:
+
+ cls.proxy = proxy
+
+ if not bearer_auth:
+ cls.cookies = get_cookies("chat.reka.ai")
+
+ if not cls.cookies:
+ raise ValueError("No cookies found for chat.reka.ai")
+
+ elif "appSession" not in cls.cookies:
+ raise ValueError("No appSession found in cookies for chat.reka.ai, log in or provide bearer_auth")
+
+ bearer_auth = cls.get_access_token(cls)
+
+ conversation = []
+ for message in messages:
+ conversation.append({
+ "type": "human",
+ "text": message["content"],
+ })
+
+ if image:
+ image_url = cls.upload_image(cls, bearer_auth, image)
+ conversation[-1]["image_url"] = image_url
+ conversation[-1]["media_type"] = "image"
+
+ headers = {
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'authorization': f'Bearer {bearer_auth}',
+ 'cache-control': 'no-cache',
+ 'content-type': 'application/json',
+ 'origin': 'https://chat.reka.ai',
+ 'pragma': 'no-cache',
+ 'priority': 'u=1, i',
+ 'sec-ch-ua': '"Chromium";v="124", "Google Chrome";v="124", "Not-A.Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
+ }
+
+ json_data = {
+ 'conversation_history': conversation,
+ 'stream': True,
+ 'use_search_engine': False,
+ 'use_code_interpreter': False,
+ 'model_name': 'reka-core',
+ 'random_seed': int(time.time() * 1000),
+ }
+
+ tokens = ''
+
+ response = requests.post('https://chat.reka.ai/api/chat',
+ cookies=cls.cookies, headers=headers, json=json_data, proxies=cls.proxy, stream=True)
+
+ for completion in response.iter_lines():
+ if b'data' in completion:
+ token_data = json.loads(completion.decode('utf-8')[5:])['text']
+
+ yield (token_data.replace(tokens, ''))
+
+ tokens = token_data
+
+ def upload_image(cls, access_token, image: ImageType) -> str:
+ boundary_token = os.urandom(8).hex()
+
+ headers = {
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'cache-control': 'no-cache',
+ 'authorization': f'Bearer {access_token}',
+ 'content-type': f'multipart/form-data; boundary=----WebKitFormBoundary{boundary_token}',
+ 'origin': 'https://chat.reka.ai',
+ 'pragma': 'no-cache',
+ 'priority': 'u=1, i',
+ 'referer': 'https://chat.reka.ai/chat/hPReZExtDOPvUfF8vCPC',
+ 'sec-ch-ua': '"Chromium";v="124", "Google Chrome";v="124", "Not-A.Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
+ }
+
+ image_data = image.read()
+
+ boundary = f'----WebKitFormBoundary{boundary_token}'
+ data = f'--{boundary}\r\nContent-Disposition: form-data; name="image"; filename="image.png"\r\nContent-Type: image/png\r\n\r\n'
+ data += image_data.decode('latin-1')
+ data += f'\r\n--{boundary}--\r\n'
+
+ response = requests.post('https://chat.reka.ai/api/upload-image',
+ cookies=Reka.cookies, headers=headers, proxies=cls.proxy, data=data.encode('latin-1'))
+
+ return response.json()['media_url']
+
+ def get_access_token(cls):
+ headers = {
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'cache-control': 'no-cache',
+ 'pragma': 'no-cache',
+ 'priority': 'u=1, i',
+ 'referer': 'https://chat.reka.ai/chat',
+ 'sec-ch-ua': '"Chromium";v="124", "Google Chrome";v="124", "Not-A.Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
+ }
+
+ try:
+ response = requests.get('https://chat.reka.ai/bff/auth/access_token',
+ cookies=cls.cookies, headers=headers)
+
+ return response.json()['accessToken']
+
+ except Exception as e:
+ raise ValueError(f"Failed to get access token: {e}, refresh your cookies / log in into chat.reka.ai") \ No newline at end of file
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index d2d9bfda..4c0e3f32 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -50,6 +50,7 @@ from .ReplicateImage import ReplicateImage
from .Vercel import Vercel
from .WhiteRabbitNeo import WhiteRabbitNeo
from .You import You
+from .Reka import Reka
import sys
diff --git a/g4f/models.py b/g4f/models.py
index acd61846..e3da0363 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -29,6 +29,7 @@ from .Provider import (
Pi,
Vercel,
You,
+ Reka
)
@@ -306,6 +307,12 @@ blackbox = Model(
best_provider = Blackbox
)
+reka_core = Model(
+ name = 'reka-core',
+ base_provider = 'Reka AI',
+ best_provider = Reka
+)
+
class ModelUtils:
"""
Utility class for mapping string identifiers to Model instances.
@@ -333,8 +340,12 @@ class ModelUtils:
'llama2-7b' : llama2_7b,
'llama2-13b': llama2_13b,
'llama2-70b': llama2_70b,
+
+ 'llama3-8b' : llama3_8b_instruct, # alias
+ 'llama3-70b': llama3_70b_instruct, # alias
'llama3-8b-instruct' : llama3_8b_instruct,
'llama3-70b-instruct': llama3_70b_instruct,
+
'codellama-34b-instruct': codellama_34b_instruct,
'codellama-70b-instruct': codellama_70b_instruct,
@@ -359,6 +370,11 @@ class ModelUtils:
'claude-3-opus': claude_3_opus,
'claude-3-sonnet': claude_3_sonnet,
+ # reka core
+ 'reka-core': reka_core,
+ 'reka': reka_core,
+ 'Reka Core': reka_core,
+
# other
'blackbox': blackbox,
'command-r+': command_r_plus,