_
This commit is contained in:
parent
8fe493a38a
commit
dc912e0fc9
6 changed files with 146 additions and 27 deletions
41
unfinished/vercelai/__init__.py
Normal file
41
unfinished/vercelai/__init__.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
import requests
|
||||
|
||||
class Completion:
|
||||
def create(prompt: str,
|
||||
model: str = 'openai:gpt-3.5-turbo',
|
||||
temperature: float = 0.7,
|
||||
max_tokens: int = 200,
|
||||
top_p: float = 1,
|
||||
top_k: int = 1,
|
||||
frequency_penalty: float = 1,
|
||||
presence_penalty: float = 1,
|
||||
stopSequences: list = []):
|
||||
|
||||
token = requests.get('https://play.vercel.ai/openai.jpeg', headers={
|
||||
'authority': 'play.vercel.ai',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'referer': 'https://play.vercel.ai/',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'}).text.replace('=','')
|
||||
|
||||
print(token)
|
||||
|
||||
headers = {
|
||||
'authority': 'play.vercel.ai',
|
||||
'custom-encoding': token,
|
||||
'origin': 'https://play.vercel.ai',
|
||||
'referer': 'https://play.vercel.ai/',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
}
|
||||
|
||||
for chunk in requests.post('https://play.vercel.ai/api/generate', headers=headers, stream=True, json={
|
||||
'prompt': prompt,
|
||||
'model': model,
|
||||
'temperature': temperature,
|
||||
'maxTokens': max_tokens,
|
||||
'topK': top_p,
|
||||
'topP': top_k,
|
||||
'frequencyPenalty': frequency_penalty,
|
||||
'presencePenalty': presence_penalty,
|
||||
'stopSequences': stopSequences}).iter_lines():
|
||||
|
||||
yield (chunk)
|
33
unfinished/vercelai/test.js
Normal file
33
unfinished/vercelai/test.js
Normal file
|
@ -0,0 +1,33 @@
|
|||
(async () => {
|
||||
|
||||
let response = await fetch("https://play.vercel.ai/openai.jpeg", {
|
||||
"headers": {
|
||||
"accept": "*/*",
|
||||
"accept-language": "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
|
||||
"sec-ch-ua": "\"Chromium\";v=\"112\", \"Google Chrome\";v=\"112\", \"Not:A-Brand\";v=\"99\"",
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": "\"macOS\"",
|
||||
"sec-fetch-dest": "empty",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-origin"
|
||||
},
|
||||
"referrer": "https://play.vercel.ai/",
|
||||
"referrerPolicy": "strict-origin-when-cross-origin",
|
||||
"body": null,
|
||||
"method": "GET",
|
||||
"mode": "cors",
|
||||
"credentials": "omit"
|
||||
});
|
||||
|
||||
|
||||
let data = JSON.parse(atob(await response.text()))
|
||||
let ret = eval("(".concat(data.c, ")(data.a)"));
|
||||
|
||||
botPreventionToken = btoa(JSON.stringify({
|
||||
r: ret,
|
||||
t: data.t
|
||||
}))
|
||||
|
||||
console.log(botPreventionToken);
|
||||
|
||||
})()
|
67
unfinished/vercelai/test.py
Normal file
67
unfinished/vercelai/test.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
import requests
|
||||
from base64 import b64decode, b64encode
|
||||
from json import loads
|
||||
from json import dumps
|
||||
|
||||
headers = {
|
||||
'Accept': '*/*',
|
||||
'Accept-Language': 'en-GB,en-US;q=0.9,en;q=0.8',
|
||||
'Connection': 'keep-alive',
|
||||
'Referer': 'https://play.vercel.ai/',
|
||||
'Sec-Fetch-Dest': 'empty',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'Sec-Fetch-Site': 'same-origin',
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
|
||||
'sec-ch-ua': '"Chromium";v="110", "Google Chrome";v="110", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
}
|
||||
|
||||
response = requests.get('https://play.vercel.ai/openai.jpeg', headers=headers)
|
||||
|
||||
token_data = loads(b64decode(response.text))
|
||||
print(token_data)
|
||||
|
||||
raw_token = {
|
||||
'a': token_data['a'] * .1 * .2,
|
||||
't': token_data['t']
|
||||
}
|
||||
|
||||
print(raw_token)
|
||||
|
||||
new_token = b64encode(dumps(raw_token, separators=(',', ':')).encode()).decode()
|
||||
print(new_token)
|
||||
|
||||
import requests
|
||||
|
||||
headers = {
|
||||
'authority': 'play.vercel.ai',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'content-type': 'application/json',
|
||||
'custom-encoding': new_token,
|
||||
'origin': 'https://play.vercel.ai',
|
||||
'referer': 'https://play.vercel.ai/',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
json_data = {
|
||||
'prompt': 'hello\n',
|
||||
'model': 'openai:gpt-3.5-turbo',
|
||||
'temperature': 0.7,
|
||||
'maxTokens': 200,
|
||||
'topK': 1,
|
||||
'topP': 1,
|
||||
'frequencyPenalty': 1,
|
||||
'presencePenalty': 1,
|
||||
'stopSequences': [],
|
||||
}
|
||||
|
||||
response = requests.post('https://play.vercel.ai/api/generate', headers=headers, json=json_data)
|
||||
print(response.text)
|
0
unfinished/vercelai/token.py
Normal file
0
unfinished/vercelai/token.py
Normal file
|
@ -1,27 +0,0 @@
|
|||
import requests
|
||||
|
||||
token = requests.get('https://play.vercel.ai/openai.jpeg', headers={
|
||||
'authority': 'play.vercel.ai',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'referer': 'https://play.vercel.ai/',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'}).text + '.'
|
||||
|
||||
headers = {
|
||||
'authority': 'play.vercel.ai',
|
||||
'custom-encoding': token,
|
||||
'origin': 'https://play.vercel.ai',
|
||||
'referer': 'https://play.vercel.ai/',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
}
|
||||
|
||||
for chunk in requests.post('https://play.vercel.ai/api/generate', headers=headers, stream=True, json={
|
||||
'prompt': 'hi',
|
||||
'model': 'openai:gpt-3.5-turbo',
|
||||
'temperature': 0.7,
|
||||
'maxTokens': 200,
|
||||
'topK': 1,
|
||||
'topP': 1,
|
||||
'frequencyPenalty': 1,
|
||||
'presencePenalty': 1,
|
||||
'stopSequences': []}).iter_lines():
|
||||
print(chunk)
|
5
unfinished/vercelai/vercelai_test.py
Normal file
5
unfinished/vercelai/vercelai_test.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
import vercelai
|
||||
|
||||
for token in vercelai.Completion.create('summarize the gnu gpl 1.0'):
|
||||
print(token, end='', flush=True)
|
||||
|
Loading…
Reference in a new issue