1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
|
from __future__ import annotations
import base64
import json
from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
from ..requests.raise_for_status import raise_for_status
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import get_connector
class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://www.ecosia.org"
working = True
supports_gpt_35_turbo = True
default_model = "gpt-3.5-turbo-0125"
model_aliases = {"gpt-3.5-turbo": "gpt-3.5-turbo-0125"}
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
connector: BaseConnector = None,
green: bool = False,
proxy: str = None,
**kwargs
) -> AsyncResult:
cls.get_model(model)
headers = {
"authority": "api.ecosia.org",
"accept": "*/*",
"origin": cls.url,
"referer": f"{cls.url}/",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36",
}
async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
data = {
"messages": base64.b64encode(json.dumps(messages).encode()).decode()
}
api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if green else 'productivity'}"
async with session.post(api_url, json=data) as response:
await raise_for_status(response)
async for chunk in response.content.iter_any():
if chunk:
yield chunk.decode(errors="ignore")
|