From 74a289ae649fc66f35d7b9a632ab753acd2910cf Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Thu, 2 Jan 2025 02:33:09 +0100 Subject: Add Anthropic provider --- g4f/Provider/PollinationsAI.py | 3 +- g4f/Provider/needs_auth/Anthropic.py | 194 +++++++++++++++++++++++++++++++++ g4f/Provider/needs_auth/HuggingFace.py | 6 +- g4f/Provider/needs_auth/OpenaiAPI.py | 16 +-- g4f/Provider/needs_auth/Replicate.py | 4 +- g4f/Provider/needs_auth/__init__.py | 1 + 6 files changed, 204 insertions(+), 20 deletions(-) create mode 100644 g4f/Provider/needs_auth/Anthropic.py (limited to 'g4f/Provider') diff --git a/g4f/Provider/PollinationsAI.py b/g4f/Provider/PollinationsAI.py index f1ad0031..3df3fafd 100644 --- a/g4f/Provider/PollinationsAI.py +++ b/g4f/Provider/PollinationsAI.py @@ -3,6 +3,7 @@ from __future__ import annotations import json import random import requests +from urllib.parse import quote from typing import Optional from aiohttp import ClientSession @@ -170,7 +171,7 @@ class PollinationsAI(OpenaiAPI): params = {k: v for k, v in params.items() if v is not None} async with ClientSession(headers=headers) as session: - prompt = quote(messages[-1]["content"]) + prompt = quote(messages[-1]["content"] if prompt is None else prompt) param_string = "&".join(f"{k}={v}" for k, v in params.items()) url = f"{cls.image_api_endpoint}/prompt/{prompt}?{param_string}" diff --git a/g4f/Provider/needs_auth/Anthropic.py b/g4f/Provider/needs_auth/Anthropic.py new file mode 100644 index 00000000..1e7a7efe --- /dev/null +++ b/g4f/Provider/needs_auth/Anthropic.py @@ -0,0 +1,194 @@ +from __future__ import annotations + +import requests +import json +import base64 +from typing import Optional + +from ..helper import filter_none +from ...typing import AsyncResult, Messages, ImagesType +from ...requests import StreamSession, raise_for_status +from ...providers.response import FinishReason, ToolCalls, Usage +from ...errors import MissingAuthError +from ...image import to_bytes, is_accepted_format +from .OpenaiAPI import OpenaiAPI + +class Anthropic(OpenaiAPI): + label = "Anthropic API" + url = "https://console.anthropic.com" + working = True + api_base = "https://api.anthropic.com/v1" + needs_auth = True + supports_stream = True + supports_system_message = True + supports_message_history = True + default_model = "claude-3-5-sonnet-latest" + models = [ + default_model, + "claude-3-5-sonnet-20241022", + "claude-3-5-haiku-latest", + "claude-3-5-haiku-20241022", + "claude-3-opus-latest", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307" + ] + models_aliases = { + "claude-3.5-sonnet": default_model, + "claude-3-opus": "claude-3-opus-latest", + "claude-3-sonnet": "claude-3-sonnet-20240229", + "claude-3-haiku": "claude-3-haiku-20240307", + } + + @classmethod + def get_models(cls, api_key: str = None, **kwargs): + if not cls.models: + url = f"https://api.anthropic.com/v1/models" + response = requests.get(url, headers={ + "Content-Type": "application/json", + "x-api-key": api_key, + "anthropic-version": "2023-06-01" + }) + raise_for_status(response) + models = response.json() + cls.models = [model["id"] for model in models["data"]] + return cls.models + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + proxy: str = None, + timeout: int = 120, + images: ImagesType = None, + api_key: str = None, + temperature: float = None, + max_tokens: int = 4096, + top_k: int = None, + top_p: float = None, + stop: list[str] = None, + stream: bool = False, + headers: dict = None, + impersonate: str = None, + tools: Optional[list] = None, + extra_data: dict = {}, + **kwargs + ) -> AsyncResult: + if api_key is None: + raise MissingAuthError('Add a "api_key"') + + if images is not None: + insert_images = [] + for image, _ in images: + data = to_bytes(image) + insert_images.append({ + "type": "image", + "source": { + "type": "base64", + "media_type": is_accepted_format(data), + "data": base64.b64encode(data).decode(), + } + }) + messages[-1]["content"] = [ + *insert_images, + { + "type": "text", + "text": messages[-1]["content"] + } + ] + system = "\n".join([message for message in messages if message.get("role") == "system"]) + if system: + messages = [message for message in messages if message.get("role") != "system"] + else: + system = None + + async with StreamSession( + proxy=proxy, + headers=cls.get_headers(stream, api_key, headers), + timeout=timeout, + impersonate=impersonate, + ) as session: + data = filter_none( + messages=messages, + model=cls.get_model(model, api_key=api_key), + temperature=temperature, + max_tokens=max_tokens, + top_k=top_k, + top_p=top_p, + stop_sequences=stop, + system=system, + stream=stream, + tools=tools, + **extra_data + ) + async with session.post(f"{cls.api_base}/messages", json=data) as response: + await raise_for_status(response) + if not stream: + data = await response.json() + cls.raise_error(data) + if "type" in data and data["type"] == "message": + for content in data["content"]: + if content["type"] == "text": + yield content["text"] + elif content["type"] == "tool_use": + tool_calls.append({ + "id": content["id"], + "type": "function", + "function": { "name": content["name"], "arguments": content["input"] } + }) + if data["stop_reason"] == "end_turn": + yield FinishReason("stop") + elif data["stop_reason"] == "max_tokens": + yield FinishReason("length") + yield Usage(**data["usage"]) + else: + content_block = None + partial_json = [] + tool_calls = [] + async for line in response.iter_lines(): + if line.startswith(b"data: "): + chunk = line[6:] + if chunk == b"[DONE]": + break + data = json.loads(chunk) + cls.raise_error(data) + if "type" in data: + if data["type"] == "content_block_start": + content_block = data["content_block"] + if content_block is None: + pass # Message start + elif data["type"] == "content_block_delta": + if content_block["type"] == "text": + yield data["delta"]["text"] + elif content_block["type"] == "tool_use": + partial_json.append(data["delta"]["partial_json"]) + elif data["type"] == "message_delta": + if data["delta"]["stop_reason"] == "end_turn": + yield FinishReason("stop") + elif data["delta"]["stop_reason"] == "max_tokens": + yield FinishReason("length") + yield Usage(**data["usage"]) + elif data["type"] == "content_block_stop": + if content_block["type"] == "tool_use": + tool_calls.append({ + "id": content_block["id"], + "type": "function", + "function": { "name": content_block["name"], "arguments": partial_json.join("") } + }) + partial_json = [] + if tool_calls: + yield ToolCalls(tool_calls) + + @classmethod + def get_headers(cls, stream: bool, api_key: str = None, headers: dict = None) -> dict: + return { + "Accept": "text/event-stream" if stream else "application/json", + "Content-Type": "application/json", + **( + {"x-api-key": api_key} + if api_key is not None else {} + ), + "anthropic-version": "2023-06-01", + **({} if headers is None else headers) + } \ No newline at end of file diff --git a/g4f/Provider/needs_auth/HuggingFace.py b/g4f/Provider/needs_auth/HuggingFace.py index 05e69072..fd6c74f0 100644 --- a/g4f/Provider/needs_auth/HuggingFace.py +++ b/g4f/Provider/needs_auth/HuggingFace.py @@ -47,8 +47,8 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): proxy: str = None, api_base: str = "https://api-inference.huggingface.co", api_key: str = None, - max_new_tokens: int = 1024, - temperature: float = 0.7, + max_tokens: int = 1024, + temperature: float = None, prompt: str = None, action: str = None, extra_data: dict = {}, @@ -84,7 +84,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): else: params = { "return_full_text": False, - "max_new_tokens": max_new_tokens, + "max_new_tokens": max_tokens, "temperature": temperature, **extra_data } diff --git a/g4f/Provider/needs_auth/OpenaiAPI.py b/g4f/Provider/needs_auth/OpenaiAPI.py index 6471895e..67b55cee 100644 --- a/g4f/Provider/needs_auth/OpenaiAPI.py +++ b/g4f/Provider/needs_auth/OpenaiAPI.py @@ -4,7 +4,7 @@ import json import requests from ..helper import filter_none -from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin, RaiseErrorMixin from ...typing import Union, Optional, AsyncResult, Messages, ImagesType from ...requests import StreamSession, raise_for_status from ...providers.response import FinishReason, ToolCalls, Usage @@ -12,7 +12,7 @@ from ...errors import MissingAuthError, ResponseError from ...image import to_data_uri from ... import debug -class OpenaiAPI(AsyncGeneratorProvider, ProviderModelMixin): +class OpenaiAPI(AsyncGeneratorProvider, ProviderModelMixin, RaiseErrorMixin): label = "OpenAI API" url = "https://platform.openai.com" api_base = "https://api.openai.com/v1" @@ -141,18 +141,6 @@ class OpenaiAPI(AsyncGeneratorProvider, ProviderModelMixin): if "finish_reason" in choice and choice["finish_reason"] is not None: return FinishReason(choice["finish_reason"]) - @staticmethod - def raise_error(data: dict): - if "error_message" in data: - raise ResponseError(data["error_message"]) - elif "error" in data: - if "code" in data["error"]: - raise ResponseError(f'Error {data["error"]["code"]}: {data["error"]["message"]}') - elif "message" in data["error"]: - raise ResponseError(data["error"]["message"]) - else: - raise ResponseError(data["error"]) - @classmethod def get_headers(cls, stream: bool, api_key: str = None, headers: dict = None) -> dict: return { diff --git a/g4f/Provider/needs_auth/Replicate.py b/g4f/Provider/needs_auth/Replicate.py index ec993aa4..51fdf593 100644 --- a/g4f/Provider/needs_auth/Replicate.py +++ b/g4f/Provider/needs_auth/Replicate.py @@ -25,7 +25,7 @@ class Replicate(AsyncGeneratorProvider, ProviderModelMixin): proxy: str = None, timeout: int = 180, system_prompt: str = None, - max_new_tokens: int = None, + max_tokens: int = None, temperature: float = None, top_p: float = None, top_k: float = None, @@ -55,7 +55,7 @@ class Replicate(AsyncGeneratorProvider, ProviderModelMixin): "prompt": format_prompt(messages), **filter_none( system_prompt=system_prompt, - max_new_tokens=max_new_tokens, + max_new_tokens=max_tokens, temperature=temperature, top_p=top_p, top_k=top_k, diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py index 3a50aafd..d0acd6fc 100644 --- a/g4f/Provider/needs_auth/__init__.py +++ b/g4f/Provider/needs_auth/__init__.py @@ -1,5 +1,6 @@ from .gigachat import * +from .Anthropic import Anthropic from .BingCreateImages import BingCreateImages from .Cerebras import Cerebras from .CopilotAccount import CopilotAccount -- cgit v1.2.3