summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/Provider/PI.py111
-rw-r--r--g4f/Provider/__init__.py3
-rw-r--r--g4f/__init__.py2
-rw-r--r--g4f/api/__init__.py30
-rw-r--r--g4f/api/run.py2
-rw-r--r--g4f/models.py8
6 files changed, 144 insertions, 12 deletions
diff --git a/g4f/Provider/PI.py b/g4f/Provider/PI.py
new file mode 100644
index 00000000..1e2edde8
--- /dev/null
+++ b/g4f/Provider/PI.py
@@ -0,0 +1,111 @@
+from __future__ import annotations
+
+from ..typing import AsyncResult, Messages
+from .base_provider import AsyncGeneratorProvider
+
+import json
+import cloudscraper
+
+class PI(AsyncGeneratorProvider):
+ url = "https://chat-gpt.com"
+ working = True
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ proxy: str = None,
+ **kwargs
+ ) -> AsyncResult:
+ Conversation = kwargs['conversation']
+ UserPrompt = messages[-1]
+ if UserPrompt['role'] == 'user':
+ UserPrompt = UserPrompt['content']
+ else:
+ UserPrompt = messages[-2]['content']
+ if Conversation == None:
+ Conversation = PI.Start_Conversation()
+ Answer = Ask_PI(UserPrompt,Conversation['sid'],Conversation['cookies'])
+
+ yield Answer[0]['text']
+
+ def Start_Conversation():
+ scraper.headers = {
+ 'accept-type': 'application/json'
+ }
+ response = scraper.post('https://pi.ai/api/chat/start', data="{}",headers={'x-api-version': '3'})
+ cookies = response.cookies
+
+ if 'Just a moment' in response.text:
+ return {
+ 'error': 'cloudflare detected',
+ 'sid': None,
+ 'cookies': None,
+ }
+ return {
+ 'sid': response.json()['conversations'][0]['sid'],
+ 'cookies': cookies
+ }
+
+ def GetConversationTitle(Conversation):
+ response = scraper.post('https://pi.ai/api/chat/start', data="{}",headers={'x-api-version': '3'}, cookies=Conversation['cookies'])
+ if 'Just a moment' in response.text:
+ return {
+ 'error': 'cloudflare detected',
+ 'title': 'Couldnt get the title',
+ }
+ return {
+ 'title': response.json()['conversations'][0]['title']
+ }
+
+ def GetChatHistory(Conversation):
+ params = {
+ 'conversation': Conversation['sid'],
+ }
+ response = scraper.get('https://pi.ai/api/chat/history', params=params, cookies=Conversation['cookies'])
+ if 'Just a moment' in response.text:
+ return {
+ 'error': 'cloudflare detected',
+ 'traceback': 'Couldnt get the chat history'
+ }
+ return response.json()
+
+session = cloudscraper.session()
+
+scraper = cloudscraper.create_scraper(
+ browser={
+ 'browser': 'chrome',
+ 'platform': 'windows',
+ 'desktop': True
+ },
+ sess=session
+)
+
+scraper.headers = {
+ 'Accept': '*/*',
+ 'Accept-Encoding': 'deflate,gzip,br',
+}
+
+def Ask_PI(message,sid,cookies):
+ json_data = {
+ 'text': message,
+ 'conversation': sid,
+ 'mode': 'BASE',
+ }
+ response = scraper.post('https://pi.ai/api/chat', json=json_data, cookies=cookies)
+
+ if 'Just a moment' in response.text:
+ return [{
+ 'error': 'cloudflare detected',
+ 'text': 'Couldnt generate the answer because we got detected by cloudflare please try again later'
+ }
+ ]
+ result = []
+ for line in response.iter_lines(chunk_size=1024, decode_unicode=True):
+ if line.startswith('data: {"text":'):
+ result.append(json.loads(line.split('data: ')[1].encode('utf-8')))
+ if line.startswith('data: {"title":'):
+ result.append(json.loads(line.split('data: ')[1].encode('utf-8')))
+
+ return result
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 2b47b071..199b4f27 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -70,4 +70,5 @@ __map__: dict[str, BaseProvider] = dict([
])
class ProviderUtils:
- convert: dict[str, BaseProvider] = __map__ \ No newline at end of file
+ convert: dict[str, BaseProvider] = __map__
+from .PI import PI \ No newline at end of file
diff --git a/g4f/__init__.py b/g4f/__init__.py
index b96ebf9c..4c47fe7d 100644
--- a/g4f/__init__.py
+++ b/g4f/__init__.py
@@ -118,4 +118,4 @@ class Completion:
return result if stream else ''.join(result)
if version_check:
- check_pypi_version()
+ check_pypi_version() \ No newline at end of file
diff --git a/g4f/api/__init__.py b/g4f/api/__init__.py
index d2244ff5..410fcc5f 100644
--- a/g4f/api/__init__.py
+++ b/g4f/api/__init__.py
@@ -40,12 +40,15 @@ class Api:
@self.app.get("/v1/models")
async def models():
- model_list = [{
+ model_list = []
+ for model in g4f.Model.__all__():
+ model_info = (g4f.ModelUtils.convert[model])
+ model_list.append({
'id': model,
'object': 'model',
'created': 0,
- 'owned_by': 'g4f'} for model in g4f.Model.__all__()]
-
+ 'owned_by': model_info.base_provider}
+ )
return Response(content=json.dumps({
'object': 'list',
'data': model_list}, indent=4), media_type="application/json")
@@ -80,17 +83,25 @@ class Api:
model = item_data.get('model')
stream = True if item_data.get("stream") == "True" else False
messages = item_data.get('messages')
+ conversation = item_data.get('conversation') if item_data.get('conversation') != None else None
try:
- response = g4f.ChatCompletion.create(
- model=model,
- stream=stream,
- messages=messages,
- ignored=self.list_ignored_providers)
+ if model == 'pi':
+ response = g4f.ChatCompletion.create(
+ model=model,
+ stream=stream,
+ messages=messages,
+ conversation=conversation,
+ ignored=self.list_ignored_providers)
+ else:
+ response = g4f.ChatCompletion.create(
+ model=model,
+ stream=stream,
+ messages=messages,
+ ignored=self.list_ignored_providers)
except Exception as e:
logging.exception(e)
return Response(content=json.dumps({"error": "An error occurred while generating the response."}, indent=4), media_type="application/json")
-
completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28))
completion_timestamp = int(time.time())
@@ -134,6 +145,7 @@ class Api:
{
'index': 0,
'delta': {
+ 'role': 'assistant',
'content': chunk,
},
'finish_reason': None,
diff --git a/g4f/api/run.py b/g4f/api/run.py
index 88f34741..83bb0bdd 100644
--- a/g4f/api/run.py
+++ b/g4f/api/run.py
@@ -3,4 +3,4 @@ import g4f.api
if __name__ == "__main__":
print(f'Starting server... [g4f v-{g4f.version}]')
- g4f.api.Api(engine = g4f, debug = True).run(ip = "127.0.0.1:1337")
+ g4f.api.Api(engine = g4f, debug = True).run(ip = "0.0.0.0:10000")
diff --git a/g4f/models.py b/g4f/models.py
index 46eb49a0..e4b20ae5 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -27,6 +27,7 @@ from .Provider import (
Bing,
You,
H2o,
+ PI,
)
@dataclass(unsafe_hash=True)
@@ -260,6 +261,11 @@ llama70b_v2_chat = Model(
base_provider = 'replicate',
best_provider = Vercel)
+pi = Model(
+ name = 'pi',
+ base_provider = 'inflection',
+ best_provider=PI
+)
class ModelUtils:
convert: dict[str, Model] = {
@@ -315,6 +321,8 @@ class ModelUtils:
'oasst-sft-1-pythia-12b' : oasst_sft_1_pythia_12b,
'oasst-sft-4-pythia-12b-epoch-3.5' : oasst_sft_4_pythia_12b_epoch_35,
'command-light-nightly' : command_light_nightly,
+
+ 'pi': pi
}
_all_models = list(ModelUtils.convert.keys()) \ No newline at end of file