From 932d3a3ca092756cc3025e427fabf9ab674350fc Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Sat, 20 Apr 2024 10:43:53 +0200 Subject: Add MetaAI Provider and some small improvments --- g4f/Provider/BingCreateImages.py | 1 + g4f/Provider/Ecosia.py | 8 +- g4f/Provider/MetaAI.py | 199 +++++++++++++++++++++++++++++++ g4f/Provider/__init__.py | 1 + g4f/Provider/base_provider.py | 2 +- g4f/Provider/needs_auth/Groq.py | 2 +- g4f/Provider/needs_auth/OpenaiAccount.py | 1 - g4f/client/service.py | 1 + g4f/gui/client/static/css/style.css | 2 +- g4f/gui/client/static/js/chat.v1.js | 5 +- g4f/gui/server/api.py | 2 +- g4f/providers/types.py | 9 +- 12 files changed, 221 insertions(+), 12 deletions(-) create mode 100644 g4f/Provider/MetaAI.py diff --git a/g4f/Provider/BingCreateImages.py b/g4f/Provider/BingCreateImages.py index a7bd54e4..f29126b6 100644 --- a/g4f/Provider/BingCreateImages.py +++ b/g4f/Provider/BingCreateImages.py @@ -15,6 +15,7 @@ class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin): label = "Microsoft Designer" url = "https://www.bing.com/images/create" working = True + needs_auth = True def __init__(self, cookies: Cookies = None, proxy: str = None) -> None: self.cookies: Cookies = cookies diff --git a/g4f/Provider/Ecosia.py b/g4f/Provider/Ecosia.py index 1cae3560..231412aa 100644 --- a/g4f/Provider/Ecosia.py +++ b/g4f/Provider/Ecosia.py @@ -15,7 +15,8 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin): working = True supports_gpt_35_turbo = True default_model = "gpt-3.5-turbo-0125" - model_aliases = {"gpt-3.5-turbo": "gpt-3.5-turbo-0125"} + models = [default_model, "green"] + model_aliases = {"gpt-3.5-turbo": default_model} @classmethod async def create_async_generator( @@ -23,11 +24,10 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin): model: str, messages: Messages, connector: BaseConnector = None, - green: bool = False, proxy: str = None, **kwargs ) -> AsyncResult: - cls.get_model(model) + model = cls.get_model(model) headers = { "authority": "api.ecosia.org", "accept": "*/*", @@ -39,7 +39,7 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin): data = { "messages": base64.b64encode(json.dumps(messages).encode()).decode() } - api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if green else 'productivity'}" + api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if model == 'green' else 'productivity'}" async with session.post(api_url, json=data) as response: await raise_for_status(response) async for chunk in response.content.iter_any(): diff --git a/g4f/Provider/MetaAI.py b/g4f/Provider/MetaAI.py new file mode 100644 index 00000000..1831de63 --- /dev/null +++ b/g4f/Provider/MetaAI.py @@ -0,0 +1,199 @@ +import json +import uuid +import random +import time +import uuid +from typing import Dict, List + +from aiohttp import ClientSession, BaseConnector + +from ..typing import AsyncResult, Messages, Cookies +from ..requests import raise_for_status, DEFAULT_HEADERS +from ..image import ImageResponse +from .base_provider import AsyncGeneratorProvider +from .helper import format_prompt, get_connector, get_cookies + +class MetaAI(AsyncGeneratorProvider): + url = "https://www.meta.ai" + working = True + + def __init__(self, proxy: str = None, connector: BaseConnector = None): + self.session = ClientSession(connector=get_connector(connector, proxy), headers=DEFAULT_HEADERS) + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + proxy: str = None, + **kwargs + ) -> AsyncResult: + #cookies = get_cookies(".meta.ai", False, True) + async for chunk in cls(proxy).prompt(format_prompt(messages)): + yield chunk + + async def get_access_token(self, cookies: Cookies, birthday: str = "1999-01-01") -> str: + url = "https://www.meta.ai/api/graphql/" + + payload = { + "lsd": cookies["lsd"], + "fb_api_caller_class": "RelayModern", + "fb_api_req_friendly_name": "useAbraAcceptTOSForTempUserMutation", + "variables": json.dumps({ + "dob": birthday, + "icebreaker_type": "TEXT", + "__relay_internal__pv__WebPixelRatiorelayprovider": 1, + }), + "doc_id": "7604648749596940", + } + headers = { + "x-fb-friendly-name": "useAbraAcceptTOSForTempUserMutation", + "x-fb-lsd": cookies["lsd"], + "x-asbd-id": "129477", + "alt-used": "www.meta.ai", + "sec-fetch-site": "same-origin" + } + async with self.session.post(url, headers=headers, cookies=cookies, data=payload) as response: + await raise_for_status(response, "Fetch access_token failed") + auth_json = await response.json(content_type=None) + access_token = auth_json["data"]["xab_abra_accept_terms_of_service"]["new_temp_user_auth"]["access_token"] + return access_token + + async def prompt(self, message: str, cookies: Cookies = None) -> AsyncResult: + access_token = None + if cookies is None: + cookies = await self.get_cookies() + access_token = await self.get_access_token(cookies) + else: + cookies = await self.get_cookies(cookies) + + url = "https://graph.meta.ai/graphql?locale=user" + #url = "https://www.meta.ai/api/graphql/" + payload = { + "access_token": access_token, + #"lsd": cookies["lsd"], + "fb_api_caller_class": "RelayModern", + "fb_api_req_friendly_name": "useAbraSendMessageMutation", + "variables": json.dumps({ + "message": {"sensitive_string_value": message}, + "externalConversationId": str(uuid.uuid4()), + "offlineThreadingId": generate_offline_threading_id(), + "suggestedPromptIndex": None, + "flashVideoRecapInput": {"images": []}, + "flashPreviewInput": None, + "promptPrefix": None, + "entrypoint": "ABRA__CHAT__TEXT", + "icebreaker_type": "TEXT", + "__relay_internal__pv__AbraDebugDevOnlyrelayprovider": False, + "__relay_internal__pv__WebPixelRatiorelayprovider": 1, + }), + "server_timestamps": "true", + "doc_id": "7783822248314888", + } + headers = { + "x-asbd-id": "129477", + "x-fb-friendly-name": "useAbraSendMessageMutation", + #"x-fb-lsd": cookies["lsd"], + } + async with self.session.post(url, headers=headers, cookies=cookies, data=payload) as response: + await raise_for_status(response, "Fetch response failed") + last_snippet_len = 0 + fetch_id = None + async for line in response.content: + try: + json_line = json.loads(line) + except json.JSONDecodeError: + continue + bot_response_message = json_line.get("data", {}).get("node", {}).get("bot_response_message", {}) + streaming_state = bot_response_message.get("streaming_state") + fetch_id = bot_response_message.get("fetch_id") + if streaming_state in ("STREAMING", "OVERALL_DONE"): + #imagine_card = bot_response_message["imagine_card"] + snippet = bot_response_message["snippet"] + yield snippet[last_snippet_len:] + last_snippet_len = len(snippet) + elif streaming_state == "OVERALL_DONE": + break + #if last_streamed_response is None: + # if attempts > 3: + # raise Exception("MetaAI is having issues and was not able to respond (Server Error)") + # access_token = await self.get_access_token() + # return await self.prompt(message=message, attempts=attempts + 1) + if fetch_id is not None: + sources = await self.fetch_sources(fetch_id, cookies, access_token) + if sources is not None: + yield sources + + async def get_cookies(self, cookies: Cookies = None) -> dict: + async with self.session.get("https://www.meta.ai/", cookies=cookies) as response: + await raise_for_status(response, "Fetch home failed") + text = await response.text() + if cookies is None: + cookies = { + "_js_datr": self.extract_value(text, "_js_datr"), + "abra_csrf": self.extract_value(text, "abra_csrf"), + "datr": self.extract_value(text, "datr"), + } + cookies["lsd"] = self.extract_value(text, start_str='"LSD",[],{"token":"', end_str='"}') + return cookies + + async def fetch_sources(self, fetch_id: str, cookies: Cookies, access_token: str) -> List[Dict]: + url = "https://graph.meta.ai/graphql?locale=user" + payload = { + "access_token": access_token, + "fb_api_caller_class": "RelayModern", + "fb_api_req_friendly_name": "AbraSearchPluginDialogQuery", + "variables": json.dumps({"abraMessageFetchID": fetch_id}), + "server_timestamps": "true", + "doc_id": "6946734308765963", + } + headers = { + "authority": "graph.meta.ai", + "x-fb-friendly-name": "AbraSearchPluginDialogQuery", + } + async with self.session.post(url, headers=headers, cookies=cookies, data=payload) as response: + await raise_for_status(response) + response_json = await response.json() + try: + message = response_json["data"]["message"] + if message is not None: + searchResults = message["searchResults"] + if searchResults is not None: + return Sources(searchResults["references"]) + except (KeyError, TypeError): + raise RuntimeError(f"Response: {response_json}") + + @staticmethod + def extract_value(text: str, key: str = None, start_str = None, end_str = '",') -> str: + if start_str is None: + start_str = f'{key}":{{"value":"' + start = text.find(start_str) + if start >= 0: + start+= len(start_str) + end = text.find(end_str, start) + return text[start:end] + +def generate_offline_threading_id() -> str: + """ + Generates an offline threading ID. + + Returns: + str: The generated offline threading ID. + """ + # Generate a random 64-bit integer + random_value = random.getrandbits(64) + + # Get the current timestamp in milliseconds + timestamp = int(time.time() * 1000) + + # Combine timestamp and random value + threading_id = (timestamp << 22) | (random_value & ((1 << 22) - 1)) + + return str(threading_id) + +class Sources(): + def __init__(self, list: List[Dict[str, str]]) -> None: + self.list = list + + def __str__(self) -> str: + return "\n\n" + ("\n".join([f"[{link['title']}]({link['link']})" for link in self.list])) \ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index f761df5b..10249aa2 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -42,6 +42,7 @@ from .Koala import Koala from .Liaobots import Liaobots from .Llama import Llama from .Local import Local +from .MetaAI import MetaAI from .PerplexityLabs import PerplexityLabs from .Pi import Pi from .ReplicateImage import ReplicateImage diff --git a/g4f/Provider/base_provider.py b/g4f/Provider/base_provider.py index 4c0157f3..8f368747 100644 --- a/g4f/Provider/base_provider.py +++ b/g4f/Provider/base_provider.py @@ -1,3 +1,3 @@ from ..providers.base_provider import * -from ..providers.types import FinishReason +from ..providers.types import FinishReason, Streaming from .helper import get_cookies, format_prompt \ No newline at end of file diff --git a/g4f/Provider/needs_auth/Groq.py b/g4f/Provider/needs_auth/Groq.py index 922b2dd2..d11f6a82 100644 --- a/g4f/Provider/needs_auth/Groq.py +++ b/g4f/Provider/needs_auth/Groq.py @@ -4,7 +4,7 @@ from .Openai import Openai from ...typing import AsyncResult, Messages class Groq(Openai): - lebel = "Groq" + label = "Groq" url = "https://console.groq.com/playground" working = True default_model = "mixtral-8x7b-32768" diff --git a/g4f/Provider/needs_auth/OpenaiAccount.py b/g4f/Provider/needs_auth/OpenaiAccount.py index 5c90b1de..7be60c86 100644 --- a/g4f/Provider/needs_auth/OpenaiAccount.py +++ b/g4f/Provider/needs_auth/OpenaiAccount.py @@ -3,5 +3,4 @@ from __future__ import annotations from .OpenaiChat import OpenaiChat class OpenaiAccount(OpenaiChat): - label = "OpenAI ChatGPT with Account" needs_auth = True \ No newline at end of file diff --git a/g4f/client/service.py b/g4f/client/service.py index d25c923d..dd6bf4b6 100644 --- a/g4f/client/service.py +++ b/g4f/client/service.py @@ -111,5 +111,6 @@ def get_last_provider(as_dict: bool = False) -> Union[ProviderType, dict[str, st "name": last.__name__, "url": last.url, "model": debug.last_model, + "label": last.label if hasattr(last, "label") else None } return last \ No newline at end of file diff --git a/g4f/gui/client/static/css/style.css b/g4f/gui/client/static/css/style.css index c0279bc2..a28c9cd6 100644 --- a/g4f/gui/client/static/css/style.css +++ b/g4f/gui/client/static/css/style.css @@ -890,7 +890,7 @@ a:-webkit-any-link { resize: vertical; max-height: 200px; - min-height: 80px; + min-height: 100px; } /* style for hljs copy */ diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js index 39027260..a043cb25 100644 --- a/g4f/gui/client/static/js/chat.v1.js +++ b/g4f/gui/client/static/js/chat.v1.js @@ -302,7 +302,7 @@ async function add_message_chunk(message) { window.provider_result = message.provider; content.querySelector('.provider').innerHTML = ` - ${message.provider.name} + ${message.provider.label ? message.provider.label : message.provider.name} ${message.provider.model ? ' with ' + message.provider.model : ''} ` @@ -545,7 +545,8 @@ const load_conversation = async (conversation_id, scroll=true) => { last_model = item.provider?.model; let next_i = parseInt(i) + 1; let next_provider = item.provider ? item.provider : (messages.length > next_i ? messages[next_i].provider : null); - let provider_link = item.provider?.name ? `${item.provider.name}` : ""; + let provider_label = item.provider?.label ? item.provider?.label : item.provider?.name; + let provider_link = item.provider?.name ? `${provider_label}` : ""; let provider = provider_link ? `
${provider_link} diff --git a/g4f/gui/server/api.py b/g4f/gui/server/api.py index e3244c84..211d40c6 100644 --- a/g4f/gui/server/api.py +++ b/g4f/gui/server/api.py @@ -99,7 +99,7 @@ class Api(): if api_key is not None: kwargs["api_key"] = api_key if json_data.get('web_search'): - if provider == "Bing": + if provider in ("Bing", "HuggingChat"): kwargs['web_search'] = True else: from .internet import get_search_message diff --git a/g4f/providers/types.py b/g4f/providers/types.py index f33ea969..50c14431 100644 --- a/g4f/providers/types.py +++ b/g4f/providers/types.py @@ -102,4 +102,11 @@ ProviderType = Union[Type[BaseProvider], BaseRetryProvider] class FinishReason(): def __init__(self, reason: str): - self.reason = reason \ No newline at end of file + self.reason = reason + +class Streaming(): + def __init__(self, data: str) -> None: + self.data = data + + def __str__(self) -> str: + return self.data \ No newline at end of file -- cgit v1.2.3