diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-10-29 21:03:05 +0100 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-10-29 21:03:05 +0100 |
commit | e79c8b01f58d21502c962f38c804bf81196f89fb (patch) | |
tree | e8f0d3f94d7a8834d45173272bd1307c91ba06d4 /g4f/client/client.py | |
parent | Update (g4f/gui/server/api.py) (diff) | |
download | gpt4free-e79c8b01f58d21502c962f38c804bf81196f89fb.tar gpt4free-e79c8b01f58d21502c962f38c804bf81196f89fb.tar.gz gpt4free-e79c8b01f58d21502c962f38c804bf81196f89fb.tar.bz2 gpt4free-e79c8b01f58d21502c962f38c804bf81196f89fb.tar.lz gpt4free-e79c8b01f58d21502c962f38c804bf81196f89fb.tar.xz gpt4free-e79c8b01f58d21502c962f38c804bf81196f89fb.tar.zst gpt4free-e79c8b01f58d21502c962f38c804bf81196f89fb.zip |
Diffstat (limited to 'g4f/client/client.py')
-rw-r--r-- | g4f/client/client.py | 31 |
1 files changed, 5 insertions, 26 deletions
diff --git a/g4f/client/client.py b/g4f/client/client.py index 2772f9bb..44d99d60 100644 --- a/g4f/client/client.py +++ b/g4f/client/client.py @@ -149,7 +149,6 @@ class Completions: self, messages: Messages, model: str, - system: str = None, # Added system parameter provider: ProviderType = None, stream: bool = False, proxy: str = None, @@ -162,12 +161,6 @@ class Completions: ignore_stream: bool = False, **kwargs ) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]: - # If a system prompt is provided, prepend it to the messages - if system: - system_message = {"role": "system", "content": system} - messages = [system_message] + messages - - # Existing implementation continues... model, provider = get_model_and_provider( model, self.provider if provider is None else provider, @@ -228,7 +221,6 @@ class Completions: self, messages: Messages, model: str, - system: str = None, # Added system parameter provider: ProviderType = None, stream: bool = False, proxy: str = None, @@ -241,12 +233,6 @@ class Completions: ignore_stream: bool = False, **kwargs ) -> Union[ChatCompletion, AsyncIterator[ChatCompletionChunk]]: - # If a system prompt is provided, prepend it to the messages - if system: - system_message = {"role": "system", "content": system} - messages = [system_message] + messages - - # Existing implementation continues... model, provider = get_model_and_provider( model, self.provider if provider is None else provider, @@ -285,18 +271,16 @@ class Completions: **kwargs ) - # Handle streaming or non-streaming responses + # Removed 'await' here since 'async_iter_response' returns an async generator + response = async_iter_response(response, stream, response_format, max_tokens, stop) + response = async_iter_append_model_and_provider(response) + if stream: - response = async_iter_response(response, stream, response_format, max_tokens, stop) - response = async_iter_append_model_and_provider(response) return response else: - response = async_iter_response(response, stream, response_format, max_tokens, stop) - response = async_iter_append_model_and_provider(response) async for result in response: return result - class Chat: completions: Completions @@ -417,12 +401,6 @@ class Image: def __repr__(self): return f"Image(url={self.url}, b64_json={'<base64 data>' if self.b64_json else None})" - def to_dict(self): - return { - "url": self.url, - "b64_json": self.b64_json - } - class ImagesResponse: def __init__(self, data: list[Image]): self.data = data @@ -530,3 +508,4 @@ class Images: async def create_variation(self, image: Union[str, bytes], model: str = None, response_format: str = "url", **kwargs): # Existing implementation, adjust if you want to support b64_json here as well pass + |