From 8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Sun, 5 May 2024 23:38:31 +0200 Subject: Add Ollama provider, Add vision support to Openai --- g4f/Provider/Ollama.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 g4f/Provider/Ollama.py (limited to 'g4f/Provider/Ollama.py') diff --git a/g4f/Provider/Ollama.py b/g4f/Provider/Ollama.py new file mode 100644 index 00000000..a44aaacd --- /dev/null +++ b/g4f/Provider/Ollama.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +import requests + +from .needs_auth.Openai import Openai +from ..typing import AsyncResult, Messages + +class Ollama(Openai): + label = "Ollama" + url = "https://ollama.com" + needs_auth = False + working = True + + @classmethod + def get_models(cls): + if not cls.models: + url = 'http://127.0.0.1:11434/api/tags' + models = requests.get(url).json()["models"] + cls.models = [model['name'] for model in models] + cls.default_model = cls.models[0] + return cls.models + + @classmethod + def create_async_generator( + cls, + model: str, + messages: Messages, + api_base: str = "http://localhost:11434/v1", + **kwargs + ) -> AsyncResult: + return super().create_async_generator( + model, messages, api_base=api_base, **kwargs + ) \ No newline at end of file -- cgit v1.2.3