summaryrefslogtreecommitdiffstats
path: root/gpt4free
diff options
context:
space:
mode:
Diffstat (limited to 'gpt4free')
-rw-r--r--gpt4free/forefront/__init__.py79
-rw-r--r--gpt4free/italygpt/README.md18
-rw-r--r--gpt4free/italygpt/__init__.py28
-rw-r--r--gpt4free/quora/__init__.py4
-rw-r--r--gpt4free/test.py4
-rw-r--r--gpt4free/theb/__init__.py2
6 files changed, 32 insertions, 103 deletions
diff --git a/gpt4free/forefront/__init__.py b/gpt4free/forefront/__init__.py
index 23978501..dbf1730b 100644
--- a/gpt4free/forefront/__init__.py
+++ b/gpt4free/forefront/__init__.py
@@ -1,49 +1,27 @@
-import os
-import pickle
from json import loads
+from xtempmail import Email
from re import findall
+from typing import Optional, Generator
+from faker import Faker
from time import time, sleep
-from typing import Generator, Optional
from uuid import uuid4
-
from fake_useragent import UserAgent
-from pymailtm import MailTm, Message
from requests import post
from tls_client import Session
-
from .typing import ForeFrontResponse
class Account:
- COOKIES_FILE_NAME = 'cookies.pickle'
-
@staticmethod
- def login(proxy: Optional[str] = None, logging: bool = False) -> str:
- if not os.path.isfile(Account.COOKIES_FILE_NAME):
- return Account.create(proxy, logging)
-
- with open(Account.COOKIES_FILE_NAME, 'rb') as f:
- cookies = pickle.load(f)
- proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
-
- client = Session(client_identifier='chrome110')
- client.proxies = proxies
- client.cookies.update(cookies)
-
- if Account.is_cookie_enabled(client):
- response = client.get('https://clerk.forefront.ai/v1/client?_clerk_js_version=4.38.4')
- return response.json()['response']['sessions'][0]['last_active_token']['jwt']
- else:
- return Account.create(proxy, logging)
-
- @staticmethod
- def create(proxy: Optional[str] = None, logging: bool = False, save_cookies: bool = False) -> str:
+ def create(proxy: Optional[str] = None, logging: bool = False):
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
+ faker = Faker()
+ name = (faker.name().replace(' ', '_')).lower()
start = time()
- mail_client = MailTm().get_account()
- mail_address = mail_client.address
+ mail_client = Email(name=name)
+ mail_address = mail_client.email
client = Session(client_identifier='chrome110')
client.proxies = proxies
@@ -66,7 +44,10 @@ class Account:
response = client.post(
f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/prepare_verification?_clerk_js_version=4.38.4',
- data={'strategy': 'email_link', 'redirect_url': 'https://accounts.forefront.ai/sign-up/verify'},
+ data={
+ 'strategy': 'email_link',
+ 'redirect_url': 'https://accounts.forefront.ai/sign-up/verify'
+ },
)
if logging:
@@ -74,31 +55,23 @@ class Account:
if 'sign_up_attempt' not in response.text:
return 'Failed to create account!'
-
- while True:
- sleep(1)
- new_message: Message = mail_client.wait_for_message()
- if logging:
- print(new_message.data['id'])
-
- verification_url = findall(r'https:\/\/clerk\.forefront\.ai\/v1\/verify\?token=\w.+', new_message.text)[0]
-
+ verification_url = None
+ new_message = mail_client.get_new_message(5)
+ for msg in new_message:
+ verification_url = findall(r'https:\/\/clerk\.forefront\.ai\/v1\/verify\?token=\w.+', msg.text)[0]
if verification_url:
break
-
+
+ if verification_url is None or not verification_url:
+ raise RuntimeError('Error while obtaining verfication URL!')
if logging:
print(verification_url)
-
response = client.get(verification_url)
response = client.get('https://clerk.forefront.ai/v1/client?_clerk_js_version=4.38.4')
token = response.json()['response']['sessions'][0]['last_active_token']['jwt']
- if save_cookies:
- with open(Account.COOKIES_FILE_NAME, 'wb') as f:
- pickle.dump(client.cookies, f)
-
with open('accounts.txt', 'a') as f:
f.write(f'{mail_address}:{token}\n')
@@ -107,11 +80,6 @@ class Account:
return token
- @staticmethod
- def is_cookie_enabled(client: Session) -> bool:
- response = client.get('https://chat.forefront.ai/')
- return 'window.startClerk' in response.text
-
class StreamingCompletion:
@staticmethod
@@ -122,14 +90,14 @@ class StreamingCompletion:
action_type='new',
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4',
- proxy=None,
+ proxy=None
) -> Generator[ForeFrontResponse, None, None]:
if not token:
raise Exception('Token is required!')
if not chat_id:
chat_id = str(uuid4())
- proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
+ proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else None
headers = {
'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
@@ -197,7 +165,7 @@ class Completion:
action_type='new',
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4',
- proxy=None,
+ proxy=None
) -> ForeFrontResponse:
text = ''
final_response = None
@@ -208,7 +176,7 @@ class Completion:
action_type=action_type,
default_persona=default_persona,
model=model,
- proxy=proxy,
+ proxy=proxy
):
if response:
final_response = response
@@ -220,3 +188,4 @@ class Completion:
raise Exception('Unable to get the response, Please try again')
return final_response
+ \ No newline at end of file
diff --git a/gpt4free/italygpt/README.md b/gpt4free/italygpt/README.md
deleted file mode 100644
index 984eff3a..00000000
--- a/gpt4free/italygpt/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-### Example: `italygpt`
-
-```python
-# create an instance
-from gpt4free import italygpt
-italygpt = italygpt.Completion()
-
-# initialize api
-italygpt.init()
-
-# get an answer
-italygpt.create(prompt="What is the meaning of life?")
-print(italygpt.answer) # html formatted
-
-# keep the old conversation
-italygpt.create(prompt="Are you a human?", messages=italygpt.messages)
-print(italygpt.answer)
-``` \ No newline at end of file
diff --git a/gpt4free/italygpt/__init__.py b/gpt4free/italygpt/__init__.py
deleted file mode 100644
index 27a965f1..00000000
--- a/gpt4free/italygpt/__init__.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import requests, time, ast, json
-from bs4 import BeautifulSoup
-from hashlib import sha256
-
-class Completion:
- # answer is returned with html formatting
- next_id = None
- messages = []
- answer = None
-
- def init(self):
- r = requests.get("https://italygpt.it")
- soup = BeautifulSoup(r.text, "html.parser")
- self.next_id = soup.find("input", {"name": "next_id"})["value"]
-
- def create(self, prompt: str, messages: list = []):
- try:
- r = requests.get("https://italygpt.it/question", params={"hash": sha256(self.next_id.encode()).hexdigest(), "prompt": prompt, "raw_messages": json.dumps(messages)}).json()
- except:
- r = requests.get("https://italygpt.it/question", params={"hash": sha256(self.next_id.encode()).hexdigest(), "prompt": prompt, "raw_messages": json.dumps(messages)}).text
- if "too many requests" in r.lower():
- # rate limit is 17 requests per 1 minute
- time.sleep(20)
- return self.create(prompt, messages)
- self.next_id = r["next_id"]
- self.messages = ast.literal_eval(r["raw_messages"])
- self.answer = r["response"]
- return self \ No newline at end of file
diff --git a/gpt4free/quora/__init__.py b/gpt4free/quora/__init__.py
index bc17ea5d..5d9e80c1 100644
--- a/gpt4free/quora/__init__.py
+++ b/gpt4free/quora/__init__.py
@@ -104,8 +104,8 @@ class Model:
def create(
token: str,
model: str = 'gpt-3.5-turbo', # claude-instant
- system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
- description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
+ system_prompt: str = 'You are ChatGPT a large language model. Answer as consisely as possible',
+ description: str = 'gpt-3.5 language model',
handle: str = None,
) -> ModelResponse:
if not handle:
diff --git a/gpt4free/test.py b/gpt4free/test.py
new file mode 100644
index 00000000..b2516748
--- /dev/null
+++ b/gpt4free/test.py
@@ -0,0 +1,4 @@
+import forefront
+token = forefront.Account.create()
+response = forefront.Completion.create(token=token, prompt='Hello!')
+print(response) \ No newline at end of file
diff --git a/gpt4free/theb/__init__.py b/gpt4free/theb/__init__.py
index 4513ea6e..0177194e 100644
--- a/gpt4free/theb/__init__.py
+++ b/gpt4free/theb/__init__.py
@@ -72,3 +72,5 @@ class Completion:
for message in Completion.create(prompt, proxy):
response_list.append(message)
return ''.join(response_list)
+
+ Completion.message_queue.put(response.decode(errors='replace'))