summaryrefslogtreecommitdiffstats
path: root/unfinished/vercelai/test.py
diff options
context:
space:
mode:
authort.me/xtekky <98614666+xtekky@users.noreply.github.com>2023-05-04 18:29:09 +0200
committert.me/xtekky <98614666+xtekky@users.noreply.github.com>2023-05-04 18:29:09 +0200
commit824d7259d030f7c5d35004729eee68a0e4359304 (patch)
tree909b5bc4b10461e1fe2e82aa2cfd9da9724bcec6 /unfinished/vercelai/test.py
parentUpdate README.md (diff)
downloadgpt4free-824d7259d030f7c5d35004729eee68a0e4359304.tar
gpt4free-824d7259d030f7c5d35004729eee68a0e4359304.tar.gz
gpt4free-824d7259d030f7c5d35004729eee68a0e4359304.tar.bz2
gpt4free-824d7259d030f7c5d35004729eee68a0e4359304.tar.lz
gpt4free-824d7259d030f7c5d35004729eee68a0e4359304.tar.xz
gpt4free-824d7259d030f7c5d35004729eee68a0e4359304.tar.zst
gpt4free-824d7259d030f7c5d35004729eee68a0e4359304.zip
Diffstat (limited to 'unfinished/vercelai/test.py')
-rw-r--r--unfinished/vercelai/test.py67
1 files changed, 0 insertions, 67 deletions
diff --git a/unfinished/vercelai/test.py b/unfinished/vercelai/test.py
deleted file mode 100644
index 318e71c3..00000000
--- a/unfinished/vercelai/test.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import requests
-from base64 import b64decode, b64encode
-from json import loads
-from json import dumps
-
-headers = {
- 'Accept': '*/*',
- 'Accept-Language': 'en-GB,en-US;q=0.9,en;q=0.8',
- 'Connection': 'keep-alive',
- 'Referer': 'https://play.vercel.ai/',
- 'Sec-Fetch-Dest': 'empty',
- 'Sec-Fetch-Mode': 'cors',
- 'Sec-Fetch-Site': 'same-origin',
- 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
- 'sec-ch-ua': '"Chromium";v="110", "Google Chrome";v="110", "Not:A-Brand";v="99"',
- 'sec-ch-ua-mobile': '?0',
- 'sec-ch-ua-platform': '"macOS"',
-}
-
-response = requests.get('https://play.vercel.ai/openai.jpeg', headers=headers)
-
-token_data = loads(b64decode(response.text))
-print(token_data)
-
-raw_token = {
- 'a': token_data['a'] * .1 * .2,
- 't': token_data['t']
-}
-
-print(raw_token)
-
-new_token = b64encode(dumps(raw_token, separators=(',', ':')).encode()).decode()
-print(new_token)
-
-import requests
-
-headers = {
- 'authority': 'play.vercel.ai',
- 'accept': '*/*',
- 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
- 'content-type': 'application/json',
- 'custom-encoding': new_token,
- 'origin': 'https://play.vercel.ai',
- 'referer': 'https://play.vercel.ai/',
- 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
- 'sec-ch-ua-mobile': '?0',
- 'sec-ch-ua-platform': '"macOS"',
- 'sec-fetch-dest': 'empty',
- 'sec-fetch-mode': 'cors',
- 'sec-fetch-site': 'same-origin',
- 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
-}
-
-json_data = {
- 'prompt': 'hello\n',
- 'model': 'openai:gpt-3.5-turbo',
- 'temperature': 0.7,
- 'maxTokens': 200,
- 'topK': 1,
- 'topP': 1,
- 'frequencyPenalty': 1,
- 'presencePenalty': 1,
- 'stopSequences': [],
-}
-
-response = requests.post('https://play.vercel.ai/api/generate', headers=headers, json=json_data)
-print(response.text) \ No newline at end of file