diff options
author | xtekky <98614666+xtekky@users.noreply.github.com> | 2023-08-14 00:36:43 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-08-14 00:36:43 +0200 |
commit | fd86811ef706907abd5dfb30b9c496b0e8755a92 (patch) | |
tree | 0bcc7891e27e3a2e77825e98983aeee349e30d34 /g4f/Provider | |
parent | Merge pull request #793 from Leechist/main (diff) | |
parent | Add Raycast Provider (diff) | |
download | gpt4free-fd86811ef706907abd5dfb30b9c496b0e8755a92.tar gpt4free-fd86811ef706907abd5dfb30b9c496b0e8755a92.tar.gz gpt4free-fd86811ef706907abd5dfb30b9c496b0e8755a92.tar.bz2 gpt4free-fd86811ef706907abd5dfb30b9c496b0e8755a92.tar.lz gpt4free-fd86811ef706907abd5dfb30b9c496b0e8755a92.tar.xz gpt4free-fd86811ef706907abd5dfb30b9c496b0e8755a92.tar.zst gpt4free-fd86811ef706907abd5dfb30b9c496b0e8755a92.zip |
Diffstat (limited to 'g4f/Provider')
-rw-r--r-- | g4f/Provider/Providers/Raycast.py | 50 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 1 |
2 files changed, 51 insertions, 0 deletions
diff --git a/g4f/Provider/Providers/Raycast.py b/g4f/Provider/Providers/Raycast.py new file mode 100644 index 00000000..1704b4e8 --- /dev/null +++ b/g4f/Provider/Providers/Raycast.py @@ -0,0 +1,50 @@ +import json +import os + +import requests +from g4f.typing import get_type_hints + +url = "https://backend.raycast.com/api/v1/ai/chat_completions" +model = ['gpt-3.5-turbo', 'gpt-4'] +supports_stream = True +needs_auth = True +working = True + + +def _create_completion(model: str, messages: list, stream: bool, **kwargs): + auth = kwargs.get('auth') + headers = { + 'Accept': 'application/json', + 'Accept-Language': 'en-US,en;q=0.9', + 'Authorization': f'Bearer {auth}', + 'Content-Type': 'application/json', + 'User-Agent': 'Raycast/0 CFNetwork/1410.0.3 Darwin/22.6.0', + } + parsed_messages = [] + for message in messages: + parsed_messages.append({ + 'author': message['role'], + 'content': {'text': message['content']} + }) + data = { + "debug": False, + "locale": "en-CN", + "messages": parsed_messages, + "model": model, + "provider": "openai", + "source": "ai_chat", + "system_instruction": "markdown", + "temperature": 0.5 + } + response = requests.post(url, headers=headers, json=data, stream=True) + for token in response.iter_lines(): + if b'data: ' not in token: + continue + completion_chunk = json.loads(token.decode().replace('data: ', '')) + token = completion_chunk['text'] + if token != None: + yield token + + +params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \ + '(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]]) diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index ee434400..9d0442d0 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -24,6 +24,7 @@ from .Providers import ( Wewordle, ChatgptAi, opchatgpts, + Raycast, ) Palm = Bard |