opena, bard & bing [experimental]
This commit is contained in:
parent
9bc8e2ec77
commit
0fed30596a
4 changed files with 281 additions and 0 deletions
115
experimental/bard/__init__.py
Normal file
115
experimental/bard/__init__.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
from requests import Session
|
||||
from re import search
|
||||
from random import randint
|
||||
from json import dumps, loads
|
||||
from random import randint
|
||||
from urllib.parse import urlencode
|
||||
from dotenv import load_dotenv; load_dotenv()
|
||||
from os import getenv
|
||||
|
||||
from bard.typings import BardResponse
|
||||
|
||||
token = getenv('1psid')
|
||||
proxy = getenv('proxy')
|
||||
|
||||
temperatures = {
|
||||
0 : "Generate text strictly following known patterns, with no creativity.",
|
||||
0.1: "Produce text adhering closely to established patterns, allowing minimal creativity.",
|
||||
0.2: "Create text with modest deviations from familiar patterns, injecting a slight creative touch.",
|
||||
0.3: "Craft text with a mild level of creativity, deviating somewhat from common patterns.",
|
||||
0.4: "Formulate text balancing creativity and recognizable patterns for coherent results.",
|
||||
0.5: "Generate text with a moderate level of creativity, allowing for a mix of familiarity and novelty.",
|
||||
0.6: "Compose text with an increased emphasis on creativity, while partially maintaining familiar patterns.",
|
||||
0.7: "Produce text favoring creativity over typical patterns for more original results.",
|
||||
0.8: "Create text heavily focused on creativity, with limited concern for familiar patterns.",
|
||||
0.9: "Craft text with a strong emphasis on unique and inventive ideas, largely ignoring established patterns.",
|
||||
1 : "Generate text with maximum creativity, disregarding any constraints of known patterns or structures."
|
||||
}
|
||||
|
||||
class Completion:
|
||||
# def __init__(self, _token, proxy: str or None = None) -> None:
|
||||
# self.client = Session()
|
||||
# self.client.proxies = {
|
||||
# 'http': f'http://{proxy}',
|
||||
# 'https': f'http://{proxy}' } if proxy else None
|
||||
|
||||
# self.client.headers = {
|
||||
# 'authority' : 'bard.google.com',
|
||||
# 'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||
# 'origin' : 'https://bard.google.com',
|
||||
# 'referer' : 'https://bard.google.com/',
|
||||
# 'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
||||
# 'x-same-domain' : '1',
|
||||
# 'cookie' : f'__Secure-1PSID={_token}'
|
||||
# }
|
||||
|
||||
# self.snlm0e = self.__init_client()
|
||||
# self.conversation_id = ''
|
||||
# self.response_id = ''
|
||||
# self.choice_id = ''
|
||||
# self.reqid = randint(1111, 9999)
|
||||
|
||||
def create(
|
||||
prompt : str = 'hello world',
|
||||
temperature : int = None,
|
||||
conversation_id : str = '',
|
||||
response_id : str = '',
|
||||
choice_id : str = '') -> BardResponse:
|
||||
|
||||
if temperature:
|
||||
prompt = f'''settings: follow these settings for your response: [temperature: {temperature} - {temperatures[temperature]}] | prompt : {prompt}'''
|
||||
|
||||
client = Session()
|
||||
client.proxies = {
|
||||
'http': f'http://{proxy}',
|
||||
'https': f'http://{proxy}' } if proxy else None
|
||||
|
||||
client.headers = {
|
||||
'authority' : 'bard.google.com',
|
||||
'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||
'origin' : 'https://bard.google.com',
|
||||
'referer' : 'https://bard.google.com/',
|
||||
'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
||||
'x-same-domain' : '1',
|
||||
'cookie' : f'__Secure-1PSID={token}'
|
||||
}
|
||||
|
||||
snlm0e = search(r'SNlM0e\":\"(.*?)\"', client.get('https://bard.google.com/').text).group(1)
|
||||
|
||||
params = urlencode({
|
||||
'bl' : 'boq_assistant-bard-web-server_20230326.21_p0',
|
||||
'_reqid' : randint(1111, 9999),
|
||||
'rt' : 'c',
|
||||
})
|
||||
|
||||
response = client.post(f'https://bard.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate?{params}',
|
||||
data = {
|
||||
'at': snlm0e,
|
||||
'f.req': dumps([None, dumps([
|
||||
[prompt],
|
||||
None,
|
||||
[conversation_id, response_id, choice_id],
|
||||
])
|
||||
])
|
||||
}
|
||||
)
|
||||
|
||||
chat_data = loads(response.content.splitlines()[3])[0][2]
|
||||
if not chat_data: print('error, retrying'); Completion.create(prompt, temperature, conversation_id, response_id, choice_id)
|
||||
|
||||
json_chat_data = loads(chat_data)
|
||||
results = {
|
||||
'content' : json_chat_data[0][0],
|
||||
'conversation_id' : json_chat_data[1][0],
|
||||
'response_id' : json_chat_data[1][1],
|
||||
'factualityQueries' : json_chat_data[3],
|
||||
'textQuery' : json_chat_data[2][0] if json_chat_data[2] is not None else '',
|
||||
'choices' : [{'id': i[0], 'content': i[1]} for i in json_chat_data[4]],
|
||||
}
|
||||
|
||||
# self.conversation_id = results['conversation_id']
|
||||
# self.response_id = results['response_id']
|
||||
# self.choice_id = results['choices'][0]['id']
|
||||
# self.reqid += 100000
|
||||
|
||||
return BardResponse(results)
|
15
experimental/bard/typings.py
Normal file
15
experimental/bard/typings.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
class BardResponse:
|
||||
def __init__(self, json_dict):
|
||||
self.json = json_dict
|
||||
|
||||
self.content = json_dict.get('content')
|
||||
self.conversation_id = json_dict.get('conversation_id')
|
||||
self.response_id = json_dict.get('response_id')
|
||||
self.factuality_queries = json_dict.get('factualityQueries', [])
|
||||
self.text_query = json_dict.get('textQuery', [])
|
||||
self.choices = [self.BardChoice(choice) for choice in json_dict.get('choices', [])]
|
||||
|
||||
class BardChoice:
|
||||
def __init__(self, choice_dict):
|
||||
self.id = choice_dict.get('id')
|
||||
self.content = choice_dict.get('content')[0]
|
151
experimental/bing/__ini__.py
Normal file
151
experimental/bing/__ini__.py
Normal file
|
@ -0,0 +1,151 @@
|
|||
from requests import get
|
||||
from browser_cookie3 import edge, chrome
|
||||
from ssl import create_default_context
|
||||
from certifi import where
|
||||
from uuid import uuid4
|
||||
from random import randint
|
||||
from json import dumps, loads
|
||||
|
||||
import asyncio
|
||||
import websockets
|
||||
|
||||
ssl_context = create_default_context()
|
||||
ssl_context.load_verify_locations(where())
|
||||
|
||||
def format(msg: dict) -> str:
|
||||
return dumps(msg) + '\x1e'
|
||||
|
||||
def get_token():
|
||||
|
||||
cookies = {c.name: c.value for c in edge(domain_name='bing.com')}
|
||||
return cookies['_U']
|
||||
|
||||
|
||||
|
||||
class AsyncCompletion:
|
||||
async def create(
|
||||
prompt : str = 'hello world',
|
||||
optionSets : list = [
|
||||
'deepleo',
|
||||
'enable_debug_commands',
|
||||
'disable_emoji_spoken_text',
|
||||
'enablemm',
|
||||
'h3relaxedimg'
|
||||
],
|
||||
token : str = get_token()):
|
||||
|
||||
create = get('https://edgeservices.bing.com/edgesvc/turing/conversation/create',
|
||||
headers = {
|
||||
'host' : 'edgeservices.bing.com',
|
||||
'authority' : 'edgeservices.bing.com',
|
||||
'cookie' : f'_U={token}',
|
||||
'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
|
||||
}
|
||||
)
|
||||
|
||||
conversationId = create.json()['conversationId']
|
||||
clientId = create.json()['clientId']
|
||||
conversationSignature = create.json()['conversationSignature']
|
||||
|
||||
wss: websockets.WebSocketClientProtocol or None = None
|
||||
|
||||
wss = await websockets.connect('wss://sydney.bing.com/sydney/ChatHub', max_size = None, ssl = ssl_context,
|
||||
extra_headers = {
|
||||
'accept': 'application/json',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
'content-type': 'application/json',
|
||||
'sec-ch-ua': '"Not_A Brand";v="99", Microsoft Edge";v="110", "Chromium";v="110"',
|
||||
'sec-ch-ua-arch': '"x86"',
|
||||
'sec-ch-ua-bitness': '"64"',
|
||||
'sec-ch-ua-full-version': '"109.0.1518.78"',
|
||||
'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-model': "",
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-ch-ua-platform-version': '"15.0.0"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'x-ms-client-request-id': str(uuid4()),
|
||||
'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32',
|
||||
'Referer': 'https://www.bing.com/search?q=Bing+AI&showconv=1&FORM=hpcodx',
|
||||
'Referrer-Policy': 'origin-when-cross-origin',
|
||||
'x-forwarded-for': f'13.{randint(104, 107)}.{randint(0, 255)}.{randint(0, 255)}'
|
||||
}
|
||||
)
|
||||
|
||||
await wss.send(format({'protocol': 'json', 'version': 1}))
|
||||
await wss.recv()
|
||||
|
||||
struct = {
|
||||
'arguments': [
|
||||
{
|
||||
'source': 'cib',
|
||||
'optionsSets': optionSets,
|
||||
'isStartOfSession': True,
|
||||
'message': {
|
||||
'author': 'user',
|
||||
'inputMethod': 'Keyboard',
|
||||
'text': prompt,
|
||||
'messageType': 'Chat'
|
||||
},
|
||||
'conversationSignature': conversationSignature,
|
||||
'participant': {
|
||||
'id': clientId
|
||||
},
|
||||
'conversationId': conversationId
|
||||
}
|
||||
],
|
||||
'invocationId': '0',
|
||||
'target': 'chat',
|
||||
'type': 4
|
||||
}
|
||||
|
||||
await wss.send(format(struct))
|
||||
|
||||
base_string = ''
|
||||
|
||||
final = False
|
||||
while not final:
|
||||
objects = str(await wss.recv()).split('\x1e')
|
||||
for obj in objects:
|
||||
if obj is None or obj == '':
|
||||
continue
|
||||
|
||||
response = loads(obj)
|
||||
if response.get('type') == 1 and response['arguments'][0].get('messages',):
|
||||
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text')
|
||||
|
||||
yield (response_text.replace(base_string, ''))
|
||||
base_string = response_text
|
||||
|
||||
elif response.get('type') == 2:
|
||||
final = True
|
||||
|
||||
await wss.close()
|
||||
|
||||
async def run():
|
||||
async for value in AsyncCompletion.create(
|
||||
prompt = 'summarize cinderella with each word beginning with a consecutive letter of the alphabet, a-z',
|
||||
# optionSets = [
|
||||
# "deepleo",
|
||||
# "enable_debug_commands",
|
||||
# "disable_emoji_spoken_text",
|
||||
# "enablemm"
|
||||
# ]
|
||||
optionSets = [
|
||||
#"nlu_direct_response_filter",
|
||||
#"deepleo",
|
||||
#"disable_emoji_spoken_text",
|
||||
# "responsible_ai_policy_235",
|
||||
#"enablemm",
|
||||
"galileo",
|
||||
#"dtappid",
|
||||
# "cricinfo",
|
||||
# "cricinfov2",
|
||||
# "dv3sugg",
|
||||
]
|
||||
):
|
||||
print(value, end = '', flush=True)
|
||||
|
||||
asyncio.run(run())
|
Loading…
Reference in a new issue