From b35dfcd1b01c575b65e0299ef71d285dc8f41459 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Sun, 7 Apr 2024 10:36:13 +0200 Subject: Add local models to gui, Fix You Provider, add AsyncClient --- g4f/client/__init__.py | 3 + g4f/client/async.py | 198 -------------------------------------------- g4f/client/async_client.py | 202 +++++++++++++++++++++++++++++++++++++++++++++ g4f/client/client.py | 169 +++++++++++++++++++++++++++++++++++++ g4f/client/helper.py | 20 ++++- g4f/client/image_models.py | 10 ++- g4f/client/service.py | 114 +++++++++++++++++++++++++ g4f/client/types.py | 12 ++- 8 files changed, 519 insertions(+), 209 deletions(-) delete mode 100644 g4f/client/async.py create mode 100644 g4f/client/async_client.py create mode 100644 g4f/client/client.py create mode 100644 g4f/client/service.py (limited to 'g4f/client') diff --git a/g4f/client/__init__.py b/g4f/client/__init__.py index e69de29b..5bb4ba35 100644 --- a/g4f/client/__init__.py +++ b/g4f/client/__init__.py @@ -0,0 +1,3 @@ +from .stubs import ChatCompletion, ChatCompletionChunk, ImagesResponse +from .client import Client +from .async_client import AsyncClient \ No newline at end of file diff --git a/g4f/client/async.py b/g4f/client/async.py deleted file mode 100644 index 76e410fc..00000000 --- a/g4f/client/async.py +++ /dev/null @@ -1,198 +0,0 @@ -from __future__ import annotations - -import re -import os -import time -import random -import string - -from .types import Client as BaseClient -from .types import BaseProvider, ProviderType, FinishReason -from .stubs import ChatCompletion, ChatCompletionChunk, Image, ImagesResponse -from ..typing import Union, Iterator, Messages, ImageType, AsyncIerator -from ..image import ImageResponse as ImageProviderResponse -from ..errors import NoImageResponseError, RateLimitError, MissingAuthError -from .. import get_model_and_provider, get_last_provider -from .helper import read_json, find_stop, filter_none -รค -async def iter_response( - response: AsyncIterator[str], - stream: bool, - response_format: dict = None, - max_tokens: int = None, - stop: list = None -) -> AsyncIterResponse: - content = "" - finish_reason = None - completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28)) - count: int = 0 - async for chunk in response: - if isinstance(chunk, FinishReason): - finish_reason = chunk.reason - break - content += str(chunk) - count += 1 - if max_tokens is not None and count >= max_tokens: - finish_reason = "length" - first, content, chunk = find_stop(stop, content, chunk) - if first != -1: - finish_reason = "stop" - if stream: - yield ChatCompletionChunk(chunk, None, completion_id, int(time.time())) - if finish_reason is not None: - break - finish_reason = "stop" if finish_reason is None else finish_reason - if stream: - yield ChatCompletionChunk(None, finish_reason, completion_id, int(time.time())) - else: - if response_format is not None and "type" in response_format: - if response_format["type"] == "json_object": - content = read_json(content) - yield ChatCompletion(content, finish_reason, completion_id, int(time.time())) - -async def iter_append_model_and_provider(response: AsyncIterResponse) -> IterResponse: - last_provider = None - async for chunk in response: - last_provider = get_last_provider(True) if last_provider is None else last_provider - chunk.model = last_provider.get("model") - chunk.provider = last_provider.get("name") - yield chunk - -class Client(BaseClient): - def __init__( - self, - **kwargs - ): - super().__init__(**kwargs) - self.chat: Chat = Chat(self, provider) - self.images: Images = Images(self, image_provider) - -async def cast_iter_async(iter): - for chunk in iter: - yield chunk - -def create_response( - messages: Messages, - model: str, - provider: ProviderType = None, - stream: bool = False, - response_format: dict = None, - max_tokens: int = None, - stop: Union[list[str], str] = None, - api_key: str = None, - **kwargs -): - if hasattr(provider, "create_async_generator): - create = provider.create_async_generator - else: - create = provider.create_completion - response = create( - model, messages, stream, - **filter_none( - proxy=self.client.get_proxy(), - max_tokens=max_tokens, - stop=stop, - api_key=self.client.api_key if api_key is None else api_key - ), - **kwargs - ) - if not hasattr(provider, "create_async_generator") - response = cast_iter_async(response) - return response - -class Completions(): - def __init__(self, client: Client, provider: ProviderType = None): - self.client: Client = client - self.provider: ProviderType = provider - - def create( - self, - messages: Messages, - model: str, - provider: ProviderType = None, - stream: bool = False, - response_format: dict = None, - ignored : list[str] = None, - ignore_working: bool = False, - ignore_stream: bool = False, - **kwargs - ) -> Union[ChatCompletion, AsyncIterator[ChatCompletionChunk]]: - model, provider = get_model_and_provider( - model, - self.provider if provider is None else provider, - stream, - ignored, - ignore_working, - ignore_stream, - **kwargs - ) - stop = [stop] if isinstance(stop, str) else stop - response = create_response(messages, model, provider, stream, **kwargs) - response = iter_response(response, stream, response_format, max_tokens, stop) - response = iter_append_model_and_provider(response) - return response if stream else anext(response) - -class Chat(): - completions: Completions - - def __init__(self, client: Client, provider: ProviderType = None): - self.completions = Completions(client, provider) - -async def iter_image_response(response: Iterator) -> Union[ImagesResponse, None]: - async for chunk in list(response): - if isinstance(chunk, ImageProviderResponse): - return ImagesResponse([Image(image) for image in chunk.get_list()]) - -def create_image(client: Client, provider: ProviderType, prompt: str, model: str = "", **kwargs) -> AsyncIterator: - prompt = f"create a image with: {prompt}" - return provider.create_async_generator( - model, - [{"role": "user", "content": prompt}], - True, - proxy=client.get_proxy(), - **kwargs - ) - -class Images(): - def __init__(self, client: Client, provider: ImageProvider = None): - self.client: Client = client - self.provider: ImageProvider = provider - self.models: ImageModels = ImageModels(client) - - async def generate(self, prompt, model: str = None, **kwargs) -> ImagesResponse: - provider = self.models.get(model, self.provider) - if isinstance(provider, type) and issubclass(provider, BaseProvider): - response = create_image(self.client, provider, prompt, **kwargs) - else: - try: - response = list(provider.create(prompt)) - except (RateLimitError, MissingAuthError) as e: - # Fallback for default provider - if self.provider is None: - response = create_image(self.client, self.models.you, prompt, model or "dall-e", **kwargs) - else: - raise e - image = iter_image_response(response) - if image is None: - raise NoImageResponseError() - return image - - async def create_variation(self, image: ImageType, model: str = None, **kwargs): - provider = self.models.get(model, self.provider) - result = None - if isinstance(provider, type) and issubclass(provider, BaseProvider): - response = provider.create_async_generator( - "", - [{"role": "user", "content": "create a image like this"}], - True, - image=image, - proxy=self.client.get_proxy(), - **kwargs - ) - async for chunk in response: - if isinstance(chunk, ImageProviderResponse): - result = ([chunk.images] if isinstance(chunk.images, str) else chunk.images) - result = ImagesResponse([Image(image)for image in result]) - if result is None: - raise NoImageResponseError() - return result diff --git a/g4f/client/async_client.py b/g4f/client/async_client.py new file mode 100644 index 00000000..25de1c76 --- /dev/null +++ b/g4f/client/async_client.py @@ -0,0 +1,202 @@ +from __future__ import annotations + +import time +import random +import string + +from .types import Client as BaseClient +from .types import ProviderType, FinishReason +from .stubs import ChatCompletion, ChatCompletionChunk, ImagesResponse, Image +from .types import AsyncIterResponse, ImageProvider +from .image_models import ImageModels +from .helper import filter_json, find_stop, filter_none, cast_iter_async +from .service import get_last_provider, get_model_and_provider +from ..typing import Union, Iterator, Messages, AsyncIterator, ImageType +from ..errors import NoImageResponseError +from ..image import ImageResponse as ImageProviderResponse +from ..providers.base_provider import AsyncGeneratorProvider + +async def iter_response( + response: AsyncIterator[str], + stream: bool, + response_format: dict = None, + max_tokens: int = None, + stop: list = None +) -> AsyncIterResponse: + content = "" + finish_reason = None + completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28)) + count: int = 0 + async for chunk in response: + if isinstance(chunk, FinishReason): + finish_reason = chunk.reason + break + content += str(chunk) + count += 1 + if max_tokens is not None and count >= max_tokens: + finish_reason = "length" + first, content, chunk = find_stop(stop, content, chunk) + if first != -1: + finish_reason = "stop" + if stream: + yield ChatCompletionChunk(chunk, None, completion_id, int(time.time())) + if finish_reason is not None: + break + finish_reason = "stop" if finish_reason is None else finish_reason + if stream: + yield ChatCompletionChunk(None, finish_reason, completion_id, int(time.time())) + else: + if response_format is not None and "type" in response_format: + if response_format["type"] == "json_object": + content = filter_json(content) + yield ChatCompletion(content, finish_reason, completion_id, int(time.time())) + +async def iter_append_model_and_provider(response: AsyncIterResponse) -> AsyncIterResponse: + last_provider = None + async for chunk in response: + last_provider = get_last_provider(True) if last_provider is None else last_provider + chunk.model = last_provider.get("model") + chunk.provider = last_provider.get("name") + yield chunk + +class AsyncClient(BaseClient): + def __init__( + self, + provider: ProviderType = None, + image_provider: ImageProvider = None, + **kwargs + ): + super().__init__(**kwargs) + self.chat: Chat = Chat(self, provider) + self.images: Images = Images(self, image_provider) + +def create_response( + messages: Messages, + model: str, + provider: ProviderType = None, + stream: bool = False, + proxy: str = None, + max_tokens: int = None, + stop: list[str] = None, + api_key: str = None, + **kwargs +): + has_asnyc = isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider) + if has_asnyc: + create = provider.create_async_generator + else: + create = provider.create_completion + response = create( + model, messages, stream, + **filter_none( + proxy=proxy, + max_tokens=max_tokens, + stop=stop, + api_key=api_key + ), + **kwargs + ) + if not has_asnyc: + response = cast_iter_async(response) + return response + +class Completions(): + def __init__(self, client: AsyncClient, provider: ProviderType = None): + self.client: AsyncClient = client + self.provider: ProviderType = provider + + def create( + self, + messages: Messages, + model: str, + provider: ProviderType = None, + stream: bool = False, + proxy: str = None, + max_tokens: int = None, + stop: Union[list[str], str] = None, + api_key: str = None, + response_format: dict = None, + ignored : list[str] = None, + ignore_working: bool = False, + ignore_stream: bool = False, + **kwargs + ) -> Union[ChatCompletion, AsyncIterator[ChatCompletionChunk]]: + model, provider = get_model_and_provider( + model, + self.provider if provider is None else provider, + stream, + ignored, + ignore_working, + ignore_stream + ) + stop = [stop] if isinstance(stop, str) else stop + response = create_response( + messages, model, + provider, stream, + proxy=self.client.get_proxy() if proxy is None else proxy, + max_tokens=max_tokens, + stop=stop, + api_key=self.client.api_key if api_key is None else api_key + **kwargs + ) + response = iter_response(response, stream, response_format, max_tokens, stop) + response = iter_append_model_and_provider(response) + return response if stream else anext(response) + +class Chat(): + completions: Completions + + def __init__(self, client: AsyncClient, provider: ProviderType = None): + self.completions = Completions(client, provider) + +async def iter_image_response(response: Iterator) -> Union[ImagesResponse, None]: + async for chunk in response: + if isinstance(chunk, ImageProviderResponse): + return ImagesResponse([Image(image) for image in chunk.get_list()]) + +def create_image(client: AsyncClient, provider: ProviderType, prompt: str, model: str = "", **kwargs) -> AsyncIterator: + prompt = f"create a image with: {prompt}" + if provider.__name__ == "You": + kwargs["chat_mode"] = "create" + return provider.create_async_generator( + model, + [{"role": "user", "content": prompt}], + stream=True, + proxy=client.get_proxy(), + **kwargs + ) + +class Images(): + def __init__(self, client: AsyncClient, provider: ImageProvider = None): + self.client: AsyncClient = client + self.provider: ImageProvider = provider + self.models: ImageModels = ImageModels(client) + + async def generate(self, prompt, model: str = "", **kwargs) -> ImagesResponse: + provider = self.models.get(model, self.provider) + if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider): + response = create_image(self.client, provider, prompt, **kwargs) + else: + response = await provider.create_async(prompt) + return ImagesResponse([Image(image) for image in response.get_list()]) + image = await iter_image_response(response) + if image is None: + raise NoImageResponseError() + return image + + async def create_variation(self, image: ImageType, model: str = None, **kwargs): + provider = self.models.get(model, self.provider) + result = None + if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider): + response = provider.create_async_generator( + "", + [{"role": "user", "content": "create a image like this"}], + True, + image=image, + proxy=self.client.get_proxy(), + **kwargs + ) + result = iter_image_response(response) + if result is None: + raise NoImageResponseError() + return result \ No newline at end of file diff --git a/g4f/client/client.py b/g4f/client/client.py new file mode 100644 index 00000000..8ce5d932 --- /dev/null +++ b/g4f/client/client.py @@ -0,0 +1,169 @@ +from __future__ import annotations + +import time +import random +import string + +from ..typing import Union, Iterator, Messages, ImageType +from ..providers.types import BaseProvider, ProviderType, FinishReason +from ..image import ImageResponse as ImageProviderResponse +from ..errors import NoImageResponseError +from .stubs import ChatCompletion, ChatCompletionChunk, Image, ImagesResponse +from .image_models import ImageModels +from .types import IterResponse, ImageProvider +from .types import Client as BaseClient +from .service import get_model_and_provider, get_last_provider +from .helper import find_stop, filter_json, filter_none + +def iter_response( + response: iter[str], + stream: bool, + response_format: dict = None, + max_tokens: int = None, + stop: list = None +) -> IterResponse: + content = "" + finish_reason = None + completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28)) + for idx, chunk in enumerate(response): + if isinstance(chunk, FinishReason): + finish_reason = chunk.reason + break + content += str(chunk) + if max_tokens is not None and idx + 1 >= max_tokens: + finish_reason = "length" + first, content, chunk = find_stop(stop, content, chunk if stream else None) + if first != -1: + finish_reason = "stop" + if stream: + yield ChatCompletionChunk(chunk, None, completion_id, int(time.time())) + if finish_reason is not None: + break + finish_reason = "stop" if finish_reason is None else finish_reason + if stream: + yield ChatCompletionChunk(None, finish_reason, completion_id, int(time.time())) + else: + if response_format is not None and "type" in response_format: + if response_format["type"] == "json_object": + content = filter_json(content) + yield ChatCompletion(content, finish_reason, completion_id, int(time.time())) + +def iter_append_model_and_provider(response: IterResponse) -> IterResponse: + last_provider = None + for chunk in response: + last_provider = get_last_provider(True) if last_provider is None else last_provider + chunk.model = last_provider.get("model") + chunk.provider = last_provider.get("name") + yield chunk + +class Client(BaseClient): + def __init__( + self, + provider: ProviderType = None, + image_provider: ImageProvider = None, + **kwargs + ) -> None: + super().__init__(**kwargs) + self.chat: Chat = Chat(self, provider) + self.images: Images = Images(self, image_provider) + +class Completions(): + def __init__(self, client: Client, provider: ProviderType = None): + self.client: Client = client + self.provider: ProviderType = provider + + def create( + self, + messages: Messages, + model: str, + provider: ProviderType = None, + stream: bool = False, + proxy: str = None, + response_format: dict = None, + max_tokens: int = None, + stop: Union[list[str], str] = None, + api_key: str = None, + ignored : list[str] = None, + ignore_working: bool = False, + ignore_stream: bool = False, + **kwargs + ) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]: + model, provider = get_model_and_provider( + model, + self.provider if provider is None else provider, + stream, + ignored, + ignore_working, + ignore_stream, + ) + stop = [stop] if isinstance(stop, str) else stop + response = provider.create_completion( + model, messages, stream, + **filter_none( + proxy=self.client.get_proxy() if proxy is None else proxy, + max_tokens=max_tokens, + stop=stop, + api_key=self.client.api_key if api_key is None else api_key + ), + **kwargs + ) + response = iter_response(response, stream, response_format, max_tokens, stop) + response = iter_append_model_and_provider(response) + return response if stream else next(response) + +class Chat(): + completions: Completions + + def __init__(self, client: Client, provider: ProviderType = None): + self.completions = Completions(client, provider) + +def iter_image_response(response: Iterator) -> Union[ImagesResponse, None]: + for chunk in list(response): + if isinstance(chunk, ImageProviderResponse): + return ImagesResponse([Image(image) for image in chunk.get_list()]) + +def create_image(client: Client, provider: ProviderType, prompt: str, model: str = "", **kwargs) -> Iterator: + prompt = f"create a image with: {prompt}" + if provider.__name__ == "You": + kwargs["chat_mode"] = "create" + return provider.create_completion( + model, + [{"role": "user", "content": prompt}], + stream=True, + proxy=client.get_proxy(), + **kwargs + ) + +class Images(): + def __init__(self, client: Client, provider: ImageProvider = None): + self.client: Client = client + self.provider: ImageProvider = provider + self.models: ImageModels = ImageModels(client) + + def generate(self, prompt, model: str = None, **kwargs) -> ImagesResponse: + provider = self.models.get(model, self.provider) + if isinstance(provider, type) and issubclass(provider, BaseProvider): + response = create_image(self.client, provider, prompt, **kwargs) + else: + response = list(provider.create(prompt)) + image = iter_image_response(response) + if image is None: + raise NoImageResponseError() + return image + + def create_variation(self, image: ImageType, model: str = None, **kwargs): + provider = self.models.get(model, self.provider) + result = None + if isinstance(provider, type) and issubclass(provider, BaseProvider): + response = provider.create_completion( + "", + [{"role": "user", "content": "create a image like this"}], + True, + image=image, + proxy=self.client.get_proxy(), + **kwargs + ) + result = iter_image_response(response) + if result is None: + raise NoImageResponseError() + return result \ No newline at end of file diff --git a/g4f/client/helper.py b/g4f/client/helper.py index 32aa9183..c502d478 100644 --- a/g4f/client/helper.py +++ b/g4f/client/helper.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import re +from typing import Iterable, AsyncIterator -def read_json(text: str) -> dict: +def filter_json(text: str) -> str: """ Parses JSON code block from a string. @@ -15,7 +18,7 @@ def read_json(text: str) -> dict: return match.group("code") return text -def find_stop(stop, content: str, chunk: str): +def find_stop(stop, content: str, chunk: str = None): first = -1 word = None if stop is not None: @@ -24,10 +27,21 @@ def find_stop(stop, content: str, chunk: str): if first != -1: content = content[:first] break - if stream and first != -1: + if chunk is not None and first != -1: first = chunk.find(word) if first != -1: chunk = chunk[:first] else: first = 0 return first, content, chunk + +def filter_none(**kwargs) -> dict: + return { + key: value + for key, value in kwargs.items() + if value is not None + } + +async def cast_iter_async(iter: Iterable) -> AsyncIterator: + for chunk in iter: + yield chunk \ No newline at end of file diff --git a/g4f/client/image_models.py b/g4f/client/image_models.py index 1fd2d0b5..db2ce09a 100644 --- a/g4f/client/image_models.py +++ b/g4f/client/image_models.py @@ -1,8 +1,10 @@ -from .Provider.BingCreateImages import BingCreateImages -from .Provider.needs_auth import Gemini, OpenaiChat -from ..Provider.You import You +from __future__ import annotations + +from .types import Client, ImageProvider -from .types import Client +from ..Provider.BingCreateImages import BingCreateImages +from ..Provider.needs_auth import Gemini, OpenaiChat +from ..Provider.You import You class ImageModels(): gemini = Gemini diff --git a/g4f/client/service.py b/g4f/client/service.py new file mode 100644 index 00000000..f3565f6d --- /dev/null +++ b/g4f/client/service.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from typing import Union + +from .. import debug, version +from ..errors import ProviderNotFoundError, ModelNotFoundError, ProviderNotWorkingError, StreamNotSupportedError +from ..models import Model, ModelUtils +from ..Provider import ProviderUtils +from ..providers.types import BaseRetryProvider, ProviderType +from ..providers.retry_provider import IterProvider + +def convert_to_provider(provider: str) -> ProviderType: + if " " in provider: + provider_list = [ProviderUtils.convert[p] for p in provider.split() if p in ProviderUtils.convert] + if not provider_list: + raise ProviderNotFoundError(f'Providers not found: {provider}') + provider = IterProvider(provider_list) + elif provider in ProviderUtils.convert: + provider = ProviderUtils.convert[provider] + elif provider: + raise ProviderNotFoundError(f'Provider not found: {provider}') + return provider + +def get_model_and_provider(model : Union[Model, str], + provider : Union[ProviderType, str, None], + stream : bool, + ignored : list[str] = None, + ignore_working: bool = False, + ignore_stream: bool = False) -> tuple[str, ProviderType]: + """ + Retrieves the model and provider based on input parameters. + + Args: + model (Union[Model, str]): The model to use, either as an object or a string identifier. + provider (Union[ProviderType, str, None]): The provider to use, either as an object, a string identifier, or None. + stream (bool): Indicates if the operation should be performed as a stream. + ignored (list[str], optional): List of provider names to be ignored. + ignore_working (bool, optional): If True, ignores the working status of the provider. + ignore_stream (bool, optional): If True, ignores the streaming capability of the provider. + + Returns: + tuple[str, ProviderType]: A tuple containing the model name and the provider type. + + Raises: + ProviderNotFoundError: If the provider is not found. + ModelNotFoundError: If the model is not found. + ProviderNotWorkingError: If the provider is not working. + StreamNotSupportedError: If streaming is not supported by the provider. + """ + if debug.version_check: + debug.version_check = False + version.utils.check_version() + + if isinstance(provider, str): + provider = convert_to_provider(provider) + + if isinstance(model, str): + if model in ModelUtils.convert: + model = ModelUtils.convert[model] + + if not provider: + if isinstance(model, str): + raise ModelNotFoundError(f'Model not found: {model}') + provider = model.best_provider + + if not provider: + raise ProviderNotFoundError(f'No provider found for model: {model}') + + if isinstance(model, Model): + model = model.name + + if not ignore_working and not provider.working: + raise ProviderNotWorkingError(f'{provider.__name__} is not working') + + if not ignore_working and isinstance(provider, BaseRetryProvider): + provider.providers = [p for p in provider.providers if p.working] + + if ignored and isinstance(provider, BaseRetryProvider): + provider.providers = [p for p in provider.providers if p.__name__ not in ignored] + + if not ignore_stream and not provider.supports_stream and stream: + raise StreamNotSupportedError(f'{provider.__name__} does not support "stream" argument') + + if debug.logging: + if model: + print(f'Using {provider.__name__} provider and {model} model') + else: + print(f'Using {provider.__name__} provider') + + debug.last_provider = provider + debug.last_model = model + + return model, provider + +def get_last_provider(as_dict: bool = False) -> Union[ProviderType, dict[str, str]]: + """ + Retrieves the last used provider. + + Args: + as_dict (bool, optional): If True, returns the provider information as a dictionary. + + Returns: + Union[ProviderType, dict[str, str]]: The last used provider, either as an object or a dictionary. + """ + last = debug.last_provider + if isinstance(last, BaseRetryProvider): + last = last.last_provider + if last and as_dict: + return { + "name": last.__name__, + "url": last.url, + "model": debug.last_model, + } + return last \ No newline at end of file diff --git a/g4f/client/types.py b/g4f/client/types.py index b21ff03a..100be432 100644 --- a/g4f/client/types.py +++ b/g4f/client/types.py @@ -1,9 +1,15 @@ +from __future__ import annotations + +import os + +from .stubs import ChatCompletion, ChatCompletionChunk from ..providers.types import BaseProvider, ProviderType, FinishReason -from typing import Union, Iterator +from typing import Union, Iterator, AsyncIterator ImageProvider = Union[BaseProvider, object] Proxies = Union[dict, str] IterResponse = Iterator[Union[ChatCompletion, ChatCompletionChunk]] +AsyncIterResponse = AsyncIterator[Union[ChatCompletion, ChatCompletionChunk]] class ClientProxyMixin(): def get_proxy(self) -> Union[str, None]: @@ -21,9 +27,7 @@ class Client(ClientProxyMixin): self, api_key: str = None, proxies: Proxies = None, - provider: ProviderType = None, - image_provider: ImageProvider = None, **kwargs ) -> None: self.api_key: str = api_key - self.proxies: Proxies = proxies + self.proxies: Proxies = proxies \ No newline at end of file -- cgit v1.2.3