summaryrefslogtreecommitdiffstats
path: root/gpt4free
diff options
context:
space:
mode:
authorsagadav <artursagadeev97@gmail.com>2023-05-01 14:53:32 +0200
committersagadav <artursagadeev97@gmail.com>2023-05-01 14:53:32 +0200
commitfb054408bf2b15b22c524d9c9556ef0af7ac8ed9 (patch)
treeef7c1e264381b4dc1455498c7067a8b661684988 /gpt4free
parentTypo correction in forefront README (diff)
downloadgpt4free-fb054408bf2b15b22c524d9c9556ef0af7ac8ed9.tar
gpt4free-fb054408bf2b15b22c524d9c9556ef0af7ac8ed9.tar.gz
gpt4free-fb054408bf2b15b22c524d9c9556ef0af7ac8ed9.tar.bz2
gpt4free-fb054408bf2b15b22c524d9c9556ef0af7ac8ed9.tar.lz
gpt4free-fb054408bf2b15b22c524d9c9556ef0af7ac8ed9.tar.xz
gpt4free-fb054408bf2b15b22c524d9c9556ef0af7ac8ed9.tar.zst
gpt4free-fb054408bf2b15b22c524d9c9556ef0af7ac8ed9.zip
Diffstat (limited to 'gpt4free')
-rw-r--r--gpt4free/theb/README.md9
-rw-r--r--gpt4free/theb/__init__.py14
2 files changed, 17 insertions, 6 deletions
diff --git a/gpt4free/theb/README.md b/gpt4free/theb/README.md
index a4abdf62..a7af9dd8 100644
--- a/gpt4free/theb/README.md
+++ b/gpt4free/theb/README.md
@@ -5,7 +5,10 @@
from gpt4free import theb
# simple streaming completion
-for token in theb.Completion.create('hello world'):
- print(token, end='', flush=True)
-print("")
+
+while True:
+ x = input()
+ for token in theb.Completion.create(x):
+ print(token, end='', flush=True)
+ print("")
```
diff --git a/gpt4free/theb/__init__.py b/gpt4free/theb/__init__.py
index 75a15068..741de34d 100644
--- a/gpt4free/theb/__init__.py
+++ b/gpt4free/theb/__init__.py
@@ -17,6 +17,7 @@ class Completion:
timer = None
message_queue = Queue()
stream_completed = False
+ last_msg_id = None
@staticmethod
def request(prompt: str, proxy: Optional[str]=None):
@@ -28,26 +29,33 @@ class Completion:
}
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
-
+
+ options = {}
+ if Completion.last_msg_id:
+ options['parentMessageId'] = Completion.last_msg_id
+
requests.post(
'https://chatbot.theb.ai/api/chat-process',
headers=headers,
proxies=proxies,
content_callback=Completion.handle_stream_response,
- json={'prompt': prompt, 'options': {}},
+ json={'prompt': prompt, 'options': options},
)
Completion.stream_completed = True
@staticmethod
def create(prompt: str, proxy: Optional[str]=None) -> Generator[str, None, None]:
+ Completion.stream_completed = False
Thread(target=Completion.request, args=[prompt, proxy]).start()
while not Completion.stream_completed or not Completion.message_queue.empty():
try:
message = Completion.message_queue.get(timeout=0.01)
for message in findall(Completion.regex, message):
- yield loads(Completion.part1 + message + Completion.part2)['delta']
+ message_json = loads(Completion.part1 + message + Completion.part2)
+ Completion.last_msg_id = message_json['id']
+ yield message_json['delta']
except Empty:
pass