summaryrefslogtreecommitdiffstats
path: root/unfinished/vercelai/test.py
diff options
context:
space:
mode:
authort.me/xtekky <98614666+xtekky@users.noreply.github.com>2023-04-29 01:04:16 +0200
committerGitHub <noreply@github.com>2023-04-29 01:04:16 +0200
commitc65875f3b0752d63ea1416a67c9685cbc794bd7e (patch)
treeae1fda94ca7fee7e381c41acda25e62d1d26592a /unfinished/vercelai/test.py
parentmade changes (diff)
parentreadme (diff)
downloadgpt4free-c65875f3b0752d63ea1416a67c9685cbc794bd7e.tar
gpt4free-c65875f3b0752d63ea1416a67c9685cbc794bd7e.tar.gz
gpt4free-c65875f3b0752d63ea1416a67c9685cbc794bd7e.tar.bz2
gpt4free-c65875f3b0752d63ea1416a67c9685cbc794bd7e.tar.lz
gpt4free-c65875f3b0752d63ea1416a67c9685cbc794bd7e.tar.xz
gpt4free-c65875f3b0752d63ea1416a67c9685cbc794bd7e.tar.zst
gpt4free-c65875f3b0752d63ea1416a67c9685cbc794bd7e.zip
Diffstat (limited to 'unfinished/vercelai/test.py')
-rw-r--r--unfinished/vercelai/test.py67
1 files changed, 67 insertions, 0 deletions
diff --git a/unfinished/vercelai/test.py b/unfinished/vercelai/test.py
new file mode 100644
index 00000000..318e71c3
--- /dev/null
+++ b/unfinished/vercelai/test.py
@@ -0,0 +1,67 @@
+import requests
+from base64 import b64decode, b64encode
+from json import loads
+from json import dumps
+
+headers = {
+ 'Accept': '*/*',
+ 'Accept-Language': 'en-GB,en-US;q=0.9,en;q=0.8',
+ 'Connection': 'keep-alive',
+ 'Referer': 'https://play.vercel.ai/',
+ 'Sec-Fetch-Dest': 'empty',
+ 'Sec-Fetch-Mode': 'cors',
+ 'Sec-Fetch-Site': 'same-origin',
+ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
+ 'sec-ch-ua': '"Chromium";v="110", "Google Chrome";v="110", "Not:A-Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+}
+
+response = requests.get('https://play.vercel.ai/openai.jpeg', headers=headers)
+
+token_data = loads(b64decode(response.text))
+print(token_data)
+
+raw_token = {
+ 'a': token_data['a'] * .1 * .2,
+ 't': token_data['t']
+}
+
+print(raw_token)
+
+new_token = b64encode(dumps(raw_token, separators=(',', ':')).encode()).decode()
+print(new_token)
+
+import requests
+
+headers = {
+ 'authority': 'play.vercel.ai',
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'content-type': 'application/json',
+ 'custom-encoding': new_token,
+ 'origin': 'https://play.vercel.ai',
+ 'referer': 'https://play.vercel.ai/',
+ 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
+}
+
+json_data = {
+ 'prompt': 'hello\n',
+ 'model': 'openai:gpt-3.5-turbo',
+ 'temperature': 0.7,
+ 'maxTokens': 200,
+ 'topK': 1,
+ 'topP': 1,
+ 'frequencyPenalty': 1,
+ 'presencePenalty': 1,
+ 'stopSequences': [],
+}
+
+response = requests.post('https://play.vercel.ai/api/generate', headers=headers, json=json_data)
+print(response.text) \ No newline at end of file