diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-08-25 18:16:25 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-08-25 18:16:25 +0200 |
commit | d765551b97b671b7b532371b31f2da1dc22dedc1 (patch) | |
tree | 339f0a91136f6611c186f5730d3c4e58477df25e /g4f/Provider/Hugchat.py | |
parent | ~ | Merge pull request #829 (diff) | |
parent | Add OpenaiChat and Hugchat Provider (diff) | |
download | gpt4free-d765551b97b671b7b532371b31f2da1dc22dedc1.tar gpt4free-d765551b97b671b7b532371b31f2da1dc22dedc1.tar.gz gpt4free-d765551b97b671b7b532371b31f2da1dc22dedc1.tar.bz2 gpt4free-d765551b97b671b7b532371b31f2da1dc22dedc1.tar.lz gpt4free-d765551b97b671b7b532371b31f2da1dc22dedc1.tar.xz gpt4free-d765551b97b671b7b532371b31f2da1dc22dedc1.tar.zst gpt4free-d765551b97b671b7b532371b31f2da1dc22dedc1.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/Hugchat.py | 67 |
1 files changed, 67 insertions, 0 deletions
diff --git a/g4f/Provider/Hugchat.py b/g4f/Provider/Hugchat.py new file mode 100644 index 00000000..cedf8402 --- /dev/null +++ b/g4f/Provider/Hugchat.py @@ -0,0 +1,67 @@ +has_module = False +try: + from hugchat.hugchat import ChatBot +except ImportError: + has_module = False + +from .base_provider import BaseProvider, get_cookies +from g4f.typing import CreateResult + +class Hugchat(BaseProvider): + url = "https://huggingface.co/chat/" + needs_auth = True + working = has_module + llms = ['OpenAssistant/oasst-sft-6-llama-30b-xor', 'meta-llama/Llama-2-70b-chat-hf'] + + @classmethod + def create_completion( + cls, + model: str, + messages: list[dict[str, str]], + stream: bool = False, + proxy: str = None, + cookies: str = get_cookies(".huggingface.co"), + **kwargs + ) -> CreateResult: + bot = ChatBot( + cookies=cookies + ) + + if proxy and "://" not in proxy: + proxy = f"http://{proxy}" + bot.session.proxies = {"http": proxy, "https": proxy} + + if model: + try: + if not isinstance(model, int): + model = cls.llms.index(model) + bot.switch_llm(model) + except: + raise RuntimeError(f"Model are not supported: {model}") + + if len(messages) > 1: + formatted = "\n".join( + ["%s: %s" % (message["role"], message["content"]) for message in messages] + ) + prompt = f"{formatted}\nAssistant:" + else: + prompt = messages.pop()["content"] + + try: + yield bot.chat(prompt, **kwargs) + finally: + bot.delete_conversation(bot.current_conversation) + bot.current_conversation = "" + pass + + @classmethod + @property + def params(cls): + params = [ + ("model", "str"), + ("messages", "list[dict[str, str]]"), + ("stream", "bool"), + ("proxy", "str"), + ] + param = ", ".join([": ".join(p) for p in params]) + return f"g4f.provider.{cls.__name__} supports: ({param})" |