From 824d7259d030f7c5d35004729eee68a0e4359304 Mon Sep 17 00:00:00 2001 From: "t.me/xtekky" <98614666+xtekky@users.noreply.github.com> Date: Thu, 4 May 2023 17:29:09 +0100 Subject: updates --- unfinished/vercelai/__init__.py | 41 ---------------------- unfinished/vercelai/test.js | 33 ------------------ unfinished/vercelai/test.py | 67 ------------------------------------ unfinished/vercelai/token.py | 0 unfinished/vercelai/vercelai_test.py | 5 --- 5 files changed, 146 deletions(-) delete mode 100644 unfinished/vercelai/__init__.py delete mode 100644 unfinished/vercelai/test.js delete mode 100644 unfinished/vercelai/test.py delete mode 100644 unfinished/vercelai/token.py delete mode 100644 unfinished/vercelai/vercelai_test.py (limited to 'unfinished/vercelai') diff --git a/unfinished/vercelai/__init__.py b/unfinished/vercelai/__init__.py deleted file mode 100644 index 1dcb5b39..00000000 --- a/unfinished/vercelai/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -import requests - -class Completion: - def create(prompt: str, - model: str = 'openai:gpt-3.5-turbo', - temperature: float = 0.7, - max_tokens: int = 200, - top_p: float = 1, - top_k: int = 1, - frequency_penalty: float = 1, - presence_penalty: float = 1, - stopSequences: list = []): - - token = requests.get('https://play.vercel.ai/openai.jpeg', headers={ - 'authority': 'play.vercel.ai', - 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', - 'referer': 'https://play.vercel.ai/', - 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'}).text.replace('=','') - - print(token) - - headers = { - 'authority': 'play.vercel.ai', - 'custom-encoding': token, - 'origin': 'https://play.vercel.ai', - 'referer': 'https://play.vercel.ai/', - 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36' - } - - for chunk in requests.post('https://play.vercel.ai/api/generate', headers=headers, stream=True, json={ - 'prompt': prompt, - 'model': model, - 'temperature': temperature, - 'maxTokens': max_tokens, - 'topK': top_p, - 'topP': top_k, - 'frequencyPenalty': frequency_penalty, - 'presencePenalty': presence_penalty, - 'stopSequences': stopSequences}).iter_lines(): - - yield (chunk) \ No newline at end of file diff --git a/unfinished/vercelai/test.js b/unfinished/vercelai/test.js deleted file mode 100644 index 0f822cfd..00000000 --- a/unfinished/vercelai/test.js +++ /dev/null @@ -1,33 +0,0 @@ -(async () => { - - let response = await fetch("https://play.vercel.ai/openai.jpeg", { - "headers": { - "accept": "*/*", - "accept-language": "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3", - "sec-ch-ua": "\"Chromium\";v=\"112\", \"Google Chrome\";v=\"112\", \"Not:A-Brand\";v=\"99\"", - "sec-ch-ua-mobile": "?0", - "sec-ch-ua-platform": "\"macOS\"", - "sec-fetch-dest": "empty", - "sec-fetch-mode": "cors", - "sec-fetch-site": "same-origin" - }, - "referrer": "https://play.vercel.ai/", - "referrerPolicy": "strict-origin-when-cross-origin", - "body": null, - "method": "GET", - "mode": "cors", - "credentials": "omit" - }); - - - let data = JSON.parse(atob(await response.text())) - let ret = eval("(".concat(data.c, ")(data.a)")); - - botPreventionToken = btoa(JSON.stringify({ - r: ret, - t: data.t - })) - - console.log(botPreventionToken); - -})() \ No newline at end of file diff --git a/unfinished/vercelai/test.py b/unfinished/vercelai/test.py deleted file mode 100644 index 318e71c3..00000000 --- a/unfinished/vercelai/test.py +++ /dev/null @@ -1,67 +0,0 @@ -import requests -from base64 import b64decode, b64encode -from json import loads -from json import dumps - -headers = { - 'Accept': '*/*', - 'Accept-Language': 'en-GB,en-US;q=0.9,en;q=0.8', - 'Connection': 'keep-alive', - 'Referer': 'https://play.vercel.ai/', - 'Sec-Fetch-Dest': 'empty', - 'Sec-Fetch-Mode': 'cors', - 'Sec-Fetch-Site': 'same-origin', - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36', - 'sec-ch-ua': '"Chromium";v="110", "Google Chrome";v="110", "Not:A-Brand";v="99"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', -} - -response = requests.get('https://play.vercel.ai/openai.jpeg', headers=headers) - -token_data = loads(b64decode(response.text)) -print(token_data) - -raw_token = { - 'a': token_data['a'] * .1 * .2, - 't': token_data['t'] -} - -print(raw_token) - -new_token = b64encode(dumps(raw_token, separators=(',', ':')).encode()).decode() -print(new_token) - -import requests - -headers = { - 'authority': 'play.vercel.ai', - 'accept': '*/*', - 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', - 'content-type': 'application/json', - 'custom-encoding': new_token, - 'origin': 'https://play.vercel.ai', - 'referer': 'https://play.vercel.ai/', - 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', - 'sec-fetch-dest': 'empty', - 'sec-fetch-mode': 'cors', - 'sec-fetch-site': 'same-origin', - 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36', -} - -json_data = { - 'prompt': 'hello\n', - 'model': 'openai:gpt-3.5-turbo', - 'temperature': 0.7, - 'maxTokens': 200, - 'topK': 1, - 'topP': 1, - 'frequencyPenalty': 1, - 'presencePenalty': 1, - 'stopSequences': [], -} - -response = requests.post('https://play.vercel.ai/api/generate', headers=headers, json=json_data) -print(response.text) \ No newline at end of file diff --git a/unfinished/vercelai/token.py b/unfinished/vercelai/token.py deleted file mode 100644 index e69de29b..00000000 diff --git a/unfinished/vercelai/vercelai_test.py b/unfinished/vercelai/vercelai_test.py deleted file mode 100644 index 24cbe0bc..00000000 --- a/unfinished/vercelai/vercelai_test.py +++ /dev/null @@ -1,5 +0,0 @@ -import vercelai - -for token in vercelai.Completion.create('summarize the gnu gpl 1.0'): - print(token, end='', flush=True) - -- cgit v1.2.3