1
0
Fork 0

Resolved merge conflicts and merged pr_218 into STREAMLIT_CHAT_IMPLEMENTATION

This commit is contained in:
noptuno 2023-04-27 20:40:47 -04:00
commit 6f6a739872
8444 changed files with 2932665 additions and 482 deletions

6
.gitignore vendored
View file

@ -18,8 +18,12 @@ accounts.txt
.idea/
*/__pycache__/
**/__pycache__/
*.log
cookie.json
*.pyc
dist/

View file

@ -38,7 +38,7 @@ Please note the following:
| **Copyright** | Copyright information | [![Link to Section](https://img.shields.io/badge/Link-Go%20to%20Section-blue)](#copyright) | - |
| **Star History** | Star History | [![Link to Section](https://img.shields.io/badge/Link-Go%20to%20Section-blue)](#star-history) | - |
| **Usage Examples** | | | |
| `theb` | Example usage for theb (gpt-3.5) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](./theb/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |
| `theb` | Example usage for theb (gpt-3.5) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](openai_rev/theb/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |
| `forefront` | Example usage for forefront (gpt-4) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](./forefront/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) | ||
| `quora (poe)` | Example usage for quora | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](./quora/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |
| `you` | Example usage for you | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](./you/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |

3
accounts.txt Normal file
View file

@ -0,0 +1,3 @@
wayfyspwloeuqb@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYzNTYyMSwiaWF0IjoxNjgyNjM1NTYxLCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYzNTU1MSwic2lkIjoic2Vzc18yUDFyakFuZm0wRGU2b04ydUNZcWU4VmZueGYiLCJzdWIiOiJ1c2VyXzJQMXJqRU5sT2RPQ0N5WktwaVREZjZTenR5SiJ9.MVImDU8Z2rc2Pk154BBUJZnviqw6rwd1_etJc5BV6D4TYGQkFfbGJPUmtMz2wUKZhXwmIJl-TA5S7v_dweebFKq0UQYyfPFML_5_z76Qj1njfq5E4cQgIx2YNixabyVlQNU02WOurn0EuRIRsrd3w-KKlohj05Hdi_Dn6FsHu5iJEfHB9LK9euKXGCzIWLSZJzcExp-n51WNpmPYnAkKPCQSKJxTvppJ7WP2NC1AQRjDUQ0u-ZHSMC-p9ySpQpBCpTG5rzTfPxd9j0T21FW03rYu3s_lFkLVh9p5kLadZ1DBJuXcIv3HPLsQjm_cuXi9iJFGCLs0paURxJofm-5xdg
ldldpmbdjnivjk@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYzNTg1OSwiaWF0IjoxNjgyNjM1Nzk5LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYzNTc4OSwic2lkIjoic2Vzc18yUDFzREFjeTAzU2U3VTNGMXg4c0RXcDBkMU8iLCJzdWIiOiJ1c2VyXzJQMXNEOU5aQlpxYU45eHAyekNSaFhpUmpyQSJ9.QK1OxFJU2CPyXrhLaCTA18chdYmaaCStYk1h4P9BYoYlQbW-FoWT7cHeN0frSGdA75Feo6sH47zFQzVcTTfQcQfcaVBYf_lXtnn01jhip581njgRBLwpoDwHMOJkynF5dedyuW5a7zDPQ_UvCKInTzyeKSTjewYNZTBOuZ6S07TwAO57_mwgrb52RuIqZhnZOVkzWEsuMYgygRM5PUnsJGh9nRWgITO8-VKU3E42MNEAnB5OYnjIcvN_v0-urGHVJyJHO6r5bSVhnZDYmuGK08hIqnpNzFQdQHl1J8dyPwEC5u83Q71xU3dNwRduMAjlw-cuFC0076gTi_TyWaz0gA
hqlwbdmugkkfjr@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjY0MDkzNywiaWF0IjoxNjgyNjQwODc3LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjY0MDg2Nywic2lkIjoic2Vzc18yUDIyVkhTVjFMd0dDU2FTNkZpMXhPNHNRQmMiLCJzdWIiOiJ1c2VyXzJQMjJWRjFYV3I4em9LVHh3V2J2cTROTWUwVSJ9.RDdjina2RyJzyoVzBTGqGaW4hdHoS9_HM40mOOw3DScPqqFTcGkY_JVpo_g-_vE0Zuobc5HT6A2d0kL-iEPi0m6UlNJELrN1LsmKfgiKHb-4IlVcKcd_al1Exho2qYi9Sn7iPc9cFJfBfQRFAJdMIZGtGVgAkflLSGQDIYbLIAH4gv9FCU3gqAJJaXAtdewUo232qNyjoNHLRaUIP_d8qrWm7eTs8woI-fWOEx4CoA_lsytm20dC3FcftiRrBwUzneMPV4SiNgyiH2ithIn12cWOm7aIeveGmbQlRiXYu9Hmb4pTpLQvoBY8U2ZVbJDKX9YcfUqlM5rps8Zpkeo3RQ

BIN
conversations.pkl Normal file

Binary file not shown.

View file

@ -1,15 +0,0 @@
### Example: `forefront` (use like openai pypi package) <a name="example-forefront"></a>
```python
import forefront
# create an account
token = forefront.Account.create(logging=True)
print(token)
# get a response
for response in forefront.StreamingCompletion.create(token = token,
prompt = 'hello world', model='gpt-4'):
print(response.completion.choices[0].text, end = '')
```

View file

@ -1,152 +0,0 @@
from json import loads
from re import match
from time import time, sleep
from uuid import uuid4
from requests import post
from tls_client import Session
from forefront.mail import Mail
from forefront.typing import ForeFrontResponse
class Account:
@staticmethod
def create(proxy=None, logging=False):
proxies = {
'http': 'http://' + proxy,
'https': 'http://' + proxy} if proxy else False
start = time()
mail = Mail(proxies)
mail_token = None
mail_adress = mail.get_mail()
# print(mail_adress)
client = Session(client_identifier='chrome110')
client.proxies = proxies
client.headers = {
"origin": "https://accounts.forefront.ai",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36",
}
response = client.post('https://clerk.forefront.ai/v1/client/sign_ups?_clerk_js_version=4.32.6',
data={
"email_address": mail_adress
}
)
trace_token = response.json()['response']['id']
if logging: print(trace_token)
response = client.post(
f"https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/prepare_verification?_clerk_js_version=4.32.6",
data={
"strategy": "email_code",
}
)
if logging: print(response.text)
if not 'sign_up_attempt' in response.text:
return 'Failed to create account!'
while True:
sleep(1)
for _ in mail.fetch_inbox():
print(mail.get_message_content(_["id"]))
mail_token = match(r"(\d){5,6}", mail.get_message_content(_["id"])).group(0)
if mail_token:
break
if logging: print(mail_token)
response = client.post(
f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/attempt_verification?_clerk_js_version=4.38.4',
data={
'code': mail_token,
'strategy': 'email_code'
})
if logging: print(response.json())
token = response.json()['client']['sessions'][0]['last_active_token']['jwt']
with open('accounts.txt', 'a') as f:
f.write(f'{mail_adress}:{token}\n')
if logging: print(time() - start)
return token
class StreamingCompletion:
@staticmethod
def create(
token=None,
chatId=None,
prompt='',
actionType='new',
defaultPersona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4') -> ForeFrontResponse:
if not token: raise Exception('Token is required!')
if not chatId: chatId = str(uuid4())
headers = {
'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
'accept': '*/*',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
'authorization': 'Bearer ' + token,
'cache-control': 'no-cache',
'content-type': 'application/json',
'origin': 'https://chat.forefront.ai',
'pragma': 'no-cache',
'referer': 'https://chat.forefront.ai/',
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'cross-site',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
}
json_data = {
'text': prompt,
'action': actionType,
'parentId': chatId,
'workspaceId': chatId,
'messagePersona': defaultPersona,
'model': model
}
for chunk in post('https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
headers=headers, json=json_data, stream=True).iter_lines():
if b'finish_reason":null' in chunk:
data = loads(chunk.decode('utf-8').split('data: ')[1])
token = data['choices'][0]['delta'].get('content')
if token != None:
yield ForeFrontResponse({
'id': chatId,
'object': 'text_completion',
'created': int(time()),
'model': model,
'choices': [{
'text': token,
'index': 0,
'logprobs': None,
'finish_reason': 'stop'
}],
'usage': {
'prompt_tokens': len(prompt),
'completion_tokens': len(token),
'total_tokens': len(prompt) + len(token)
}
})

View file

@ -1,36 +0,0 @@
class ForeFrontResponse:
class Completion:
class Choices:
def __init__(self, choice: dict) -> None:
self.text = choice['text']
self.content = self.text.encode()
self.index = choice['index']
self.logprobs = choice['logprobs']
self.finish_reason = choice['finish_reason']
def __repr__(self) -> str:
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
def __init__(self, choices: dict) -> None:
self.choices = [self.Choices(choice) for choice in choices]
class Usage:
def __init__(self, usage_dict: dict) -> None:
self.prompt_tokens = usage_dict['prompt_tokens']
self.completion_tokens = usage_dict['completion_tokens']
self.total_tokens = usage_dict['total_tokens']
def __repr__(self):
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
def __init__(self, response_dict: dict) -> None:
self.response_dict = response_dict
self.id = response_dict['id']
self.object = response_dict['object']
self.created = response_dict['created']
self.model = response_dict['model']
self.completion = self.Completion(response_dict['choices'])
self.usage = self.Usage(response_dict['usage'])
def json(self) -> dict:
return self.response_dict

View file

@ -1,8 +1,8 @@
import forefront, quora, theb, you
import openai_rev
from openai_rev import forefront, quora, theb, you
import random
def query_forefront(question: str) -> str:
# create an account
token = forefront.Account.create(logging=True)
@ -47,8 +47,7 @@ def query_you(question: str) -> str:
try:
result = you.Completion.create(
prompt = question)
return result['response']
return result.text
except Exception as e:
# Return error message if an exception occurs
@ -57,7 +56,7 @@ def query_you(question: str) -> str:
# Define a dictionary containing all query methods
avail_query_methods = {
"Forefront": query_forefront,
"Quora": query_quora,
"Poe": query_quora,
"Theb": query_theb,
"You": query_you,
# "Writesonic": query_writesonic,

View file

@ -4,19 +4,21 @@ import sys
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
import streamlit as st
import you
from openai_rev import you
def get_answer(question: str) -> str:
# Set cloudflare clearance cookie and get answer from GPT-4 model
try:
result = you.Completion.create(
prompt = question)
return result['response']
result = you.Completion.create(prompt=question)
return result.text
except Exception as e:
# Return error message if an exception occurs
return f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
return (
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
)
# Set page configuration and add header
@ -27,14 +29,13 @@ st.set_page_config(
menu_items={
'Get Help': 'https://github.com/xtekky/gpt4free/blob/main/README.md',
'Report a bug': "https://github.com/xtekky/gpt4free/issues",
'About': "### gptfree GUI"
}
'About': "### gptfree GUI",
},
)
st.header('GPT4free GUI')
# Add text area for user input and button to get answer
question_text_area = st.text_area(
'🤖 Ask Any Question :', placeholder='Explain quantum computing in 50 words')
question_text_area = st.text_area('🤖 Ask Any Question :', placeholder='Explain quantum computing in 50 words')
if st.button('🧠 Think'):
answer = get_answer(question_text_area)
# Display answer

View file

@ -74,8 +74,7 @@ if st.sidebar.button("New Conversation"):
print(openai_rev.Provider.__methods__.keys())
st.session_state['query_method'] = st.sidebar.selectbox(
"Select API:",
options=avail_query_methods.keys(),
options=openai_rev.Provider.__members__.keys(),
index=0
)

1
openai_rev/__init__.py Normal file
View file

@ -0,0 +1 @@
from .openai_rev import Provider

View file

@ -0,0 +1,15 @@
### Example: `forefront` (use like openai pypi package) <a name="example-forefront"></a>
```python
from openai_rev import forefront
# create an account
token = forefront.Account.create(logging=True)
print(token)
# get a response
for response in forefront.StreamingCompletion.create(token=token,
prompt='hello world', model='gpt-4'):
print(response.completion.choices[0].text, end='')
```

View file

@ -0,0 +1,189 @@
from json import loads
from re import match
from time import time, sleep
from typing import Generator, Optional
from uuid import uuid4
from fake_useragent import UserAgent
from requests import post
from tls_client import Session
from .mail import Mail
from .models import ForeFrontResponse
class Account:
@staticmethod
def create(proxy: Optional[str] = None, logging: bool = False):
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
start = time()
mail_client = Mail(proxies)
mail_token = None
mail_address = mail_client.get_mail()
# print(mail_address)
client = Session(client_identifier='chrome110')
client.proxies = proxies
client.headers = {
'origin': 'https://accounts.forefront.ai',
'user-agent': UserAgent().random,
}
response = client.post(
'https://clerk.forefront.ai/v1/client/sign_ups?_clerk_js_version=4.32.6',
data={'email_address': mail_address},
)
trace_token = response.json()['response']['id']
if logging:
print(trace_token)
response = client.post(
f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/prepare_verification?_clerk_js_version=4.32.6',
data={
'strategy': 'email_code',
},
)
if logging:
print(response.text)
if 'sign_up_attempt' not in response.text:
return 'Failed to create account!'
while True:
sleep(1)
for _ in mail_client.fetch_inbox():
if logging:
print(mail_client.get_message_content(_['id']))
mail_token = match(r'(\d){5,6}', mail_client.get_message_content(_['id'])).group(0)
if mail_token:
break
if logging:
print(mail_token)
response = client.post(
f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/attempt_verification?_clerk_js_version=4.38.4',
data={'code': mail_token, 'strategy': 'email_code'},
)
if logging:
print(response.json())
token = response.json()['client']['sessions'][0]['last_active_token']['jwt']
with open('accounts.txt', 'a') as f:
f.write(f'{mail_address}:{token}\n')
if logging:
print(time() - start)
return token
class StreamingCompletion:
@staticmethod
def create(
token=None,
chat_id=None,
prompt='',
action_type='new',
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4',
) -> Generator[ForeFrontResponse, None, None]:
if not token:
raise Exception('Token is required!')
if not chat_id:
chat_id = str(uuid4())
headers = {
'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
'accept': '*/*',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
'authorization': 'Bearer ' + token,
'cache-control': 'no-cache',
'content-type': 'application/json',
'origin': 'https://chat.forefront.ai',
'pragma': 'no-cache',
'referer': 'https://chat.forefront.ai/',
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'cross-site',
'user-agent': UserAgent().random,
}
json_data = {
'text': prompt,
'action': action_type,
'parentId': chat_id,
'workspaceId': chat_id,
'messagePersona': default_persona,
'model': model,
}
for chunk in post(
'https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
headers=headers,
json=json_data,
stream=True,
).iter_lines():
if b'finish_reason":null' in chunk:
data = loads(chunk.decode('utf-8').split('data: ')[1])
token = data['choices'][0]['delta'].get('content')
if token is not None:
yield ForeFrontResponse(
**{
'id': chat_id,
'object': 'text_completion',
'created': int(time()),
'text': token,
'model': model,
'choices': [{'text': token, 'index': 0, 'logprobs': None, 'finish_reason': 'stop'}],
'usage': {
'prompt_tokens': len(prompt),
'completion_tokens': len(token),
'total_tokens': len(prompt) + len(token),
},
}
)
class Completion:
@staticmethod
def create(
token=None,
chat_id=None,
prompt='',
action_type='new',
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4',
) -> ForeFrontResponse:
text = ''
final_response = None
for response in StreamingCompletion.create(
token=token,
chat_id=chat_id,
prompt=prompt,
action_type=action_type,
default_persona=default_persona,
model=model,
):
if response:
final_response = response
text += response.text
if final_response:
final_response.text = text
else:
raise Exception('Unable to get the response, Please try again')
return final_response

View file

@ -23,21 +23,17 @@ class Mail:
"sec-fetch-dest": "empty",
"referer": "https://mail.tm/",
"accept-encoding": "gzip, deflate, br",
"accept-language": "en-GB,en-US;q=0.9,en;q=0.8"
"accept-language": "en-GB,en-US;q=0.9,en;q=0.8",
}
def get_mail(self) -> str:
token = ''.join(choices(ascii_letters, k=14)).lower()
init = self.client.post("https://api.mail.tm/accounts", json={
"address": f"{token}@bugfoo.com",
"password": token
})
init = self.client.post(
"https://api.mail.tm/accounts", json={"address": f"{token}@bugfoo.com", "password": token}
)
if init.status_code == 201:
resp = self.client.post("https://api.mail.tm/token", json={
**init.json(),
"password": token
})
resp = self.client.post("https://api.mail.tm/token", json={**init.json(), "password": token})
self.client.headers['authorization'] = 'Bearer ' + resp.json()['token']

View file

@ -0,0 +1,26 @@
from typing import Any, List
from pydantic import BaseModel
class Choice(BaseModel):
text: str
index: int
logprobs: Any
finish_reason: str
class Usage(BaseModel):
prompt_tokens: int
completion_tokens: int
total_tokens: int
class ForeFrontResponse(BaseModel):
id: str
object: str
created: int
model: str
choices: List[Choice]
usage: Usage
text: str

42
openai_rev/openai_rev.py Normal file
View file

@ -0,0 +1,42 @@
from enum import Enum
from openai_rev import forefront
from openai_rev import quora
from openai_rev import theb
from openai_rev import you
class Provider(Enum):
You = 'you'
Poe = 'poe'
ForeFront = 'fore_front'
Theb = 'theb'
class Completion:
@staticmethod
def create(provider: Provider, prompt: str, **kwargs):
if provider == Provider.Poe:
return Completion.__poe_service(prompt, **kwargs)
elif provider == Provider.You:
return Completion.__you_service(prompt, **kwargs)
elif provider == Provider.ForeFront:
return Completion.__fore_front_service(prompt, **kwargs)
elif provider == Provider.Theb:
return Completion.__theb_service(prompt, **kwargs)
@classmethod
def __you_service(cls, prompt: str, **kwargs) -> str:
return you.Completion.create(prompt, **kwargs).text
@classmethod
def __poe_service(cls, prompt: str, **kwargs) -> str:
return quora.Completion.create(prompt=prompt, **kwargs).text
@classmethod
def __fore_front_service(cls, prompt: str, **kwargs) -> str:
return forefront.Completion.create(prompt=prompt, **kwargs).text
@classmethod
def __theb_service(cls, prompt: str, **kwargs):
return ''.join(theb.Completion.create(prompt=prompt))

View file

@ -20,37 +20,37 @@ models = {
```python
# import quora (poe) package
import quora
from openai_rev import quora
# create account
# make sure to set enable_bot_creation to True
token = quora.Account.create(logging = True, enable_bot_creation=True)
token = quora.Account.create(logging=True, enable_bot_creation=True)
model = quora.Model.create(
token = token,
model = 'gpt-3.5-turbo', # or claude-instant-v1.0
system_prompt = 'you are ChatGPT a large language model ...'
token=token,
model='gpt-3.5-turbo', # or claude-instant-v1.0
system_prompt='you are ChatGPT a large language model ...'
)
print(model.name) # gptx....
print(model.name) # gptx....
# streaming response
for response in quora.StreamingCompletion.create(
custom_model = model.name,
prompt ='hello world',
token = token):
print(response.completion.choices[0].text)
custom_model=model.name,
prompt='hello world',
token=token):
print(response.text)
```
#### Normal Response:
```python
import quora
response = quora.Completion.create(model = 'gpt-4',
prompt = 'hello world',
token = token)
token = 'token')
print(response.completion.choices[0].text)
print(response.text)
```
#### Update Use This For Poe

View file

@ -6,11 +6,12 @@ from pathlib import Path
from random import choice, choices, randint
from re import search, findall
from string import ascii_letters, digits
from typing import Optional, Union
from typing import Optional, Union, List, Any, Generator
from urllib.parse import unquote
import selenium.webdriver.support.expected_conditions as EC
from fake_useragent import UserAgent
from pydantic import BaseModel
from pypasser import reCaptchaV3
from requests import Session
from selenium.webdriver import Firefox, Chrome, FirefoxOptions, ChromeOptions
@ -18,8 +19,8 @@ from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from tls_client import Session as TLS
from quora.api import Client as PoeClient
from quora.mail import Emailnator
from .api import Client as PoeClient
from .mail import Emailnator
SELENIUM_WEB_DRIVER_ERROR_MSG = b'''The error message you are receiving is due to the `geckodriver` executable not
being found in your system\'s PATH. To resolve this issue, you need to download the geckodriver and add its location
@ -67,42 +68,27 @@ def extract_formkey(html):
return formkey
class PoeResponse:
class Completion:
class Choices:
def __init__(self, choice: dict) -> None:
self.text = choice['text']
self.content = self.text.encode()
self.index = choice['index']
self.logprobs = choice['logprobs']
self.finish_reason = choice['finish_reason']
class Choice(BaseModel):
text: str
index: int
logprobs: Any
finish_reason: str
def __repr__(self) -> str:
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
def __init__(self, choices: dict) -> None:
self.choices = [self.Choices(choice) for choice in choices]
class Usage(BaseModel):
prompt_tokens: int
completion_tokens: int
total_tokens: int
class Usage:
def __init__(self, usage_dict: dict) -> None:
self.prompt_tokens = usage_dict['prompt_tokens']
self.completion_tokens = usage_dict['completion_tokens']
self.total_tokens = usage_dict['total_tokens']
def __repr__(self):
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
def __init__(self, response_dict: dict) -> None:
self.response_dict = response_dict
self.id = response_dict['id']
self.object = response_dict['object']
self.created = response_dict['created']
self.model = response_dict['model']
self.completion = self.Completion(response_dict['choices'])
self.usage = self.Usage(response_dict['usage'])
def json(self) -> dict:
return self.response_dict
class PoeResponse(BaseModel):
id: int
object: str
created: int
model: str
choices: List[Choice]
usage: Usage
text: str
class ModelResponse:
@ -116,17 +102,12 @@ class ModelResponse:
class Model:
@staticmethod
def create(
token: str,
model: str = 'gpt-3.5-turbo', # claude-instant
system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
handle: str = None,
token: str,
model: str = 'gpt-3.5-turbo', # claude-instant
system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
handle: str = None,
) -> ModelResponse:
models = {
'gpt-3.5-turbo': 'chinchilla',
'claude-instant-v1.0': 'a2',
'gpt-4': 'beaver',
}
if not handle:
handle = f'gptx{randint(1111111, 9999999)}'
@ -162,7 +143,7 @@ class Model:
obj={
'queryName': 'CreateBotMain_poeBotCreate_Mutation',
'variables': {
'model': models[model],
'model': MODELS[model],
'handle': handle,
'prompt': system_prompt,
'isPromptPublic': True,
@ -202,9 +183,9 @@ class Model:
class Account:
@staticmethod
def create(
proxy: Optional[str] = None,
logging: bool = False,
enable_bot_creation: bool = False,
proxy: Optional[str] = None,
logging: bool = False,
enable_bot_creation: bool = False,
):
client = TLS(client_identifier='chrome110')
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None
@ -309,22 +290,23 @@ class Account:
class StreamingCompletion:
@staticmethod
def create(
model: str = 'gpt-4',
custom_model: bool = None,
prompt: str = 'hello world',
token: str = '',
):
model: str = 'gpt-4',
custom_model: bool = None,
prompt: str = 'hello world',
token: str = '',
) -> Generator[PoeResponse, None, None]:
_model = MODELS[model] if not custom_model else custom_model
client = PoeClient(token)
for chunk in client.send_message(_model, prompt):
yield PoeResponse(
{
**{
'id': chunk['messageId'],
'object': 'text_completion',
'created': chunk['creationTime'],
'model': _model,
'text': chunk['text_new'],
'choices': [
{
'text': chunk['text_new'],
@ -343,33 +325,28 @@ class StreamingCompletion:
class Completion:
@staticmethod
def create(
model: str = 'gpt-4',
custom_model: str = None,
prompt: str = 'hello world',
token: str = '',
):
models = {
'sage': 'capybara',
'gpt-4': 'beaver',
'claude-v1.2': 'a2_2',
'claude-instant-v1.0': 'a2',
'gpt-3.5-turbo': 'chinchilla',
}
_model = models[model] if not custom_model else custom_model
model: str = 'gpt-4',
custom_model: str = None,
prompt: str = 'hello world',
token: str = '',
) -> PoeResponse:
_model = MODELS[model] if not custom_model else custom_model
client = PoeClient(token)
for chunk in client.send_message(_model, prompt):
pass
chunk = None
for response in client.send_message(_model, prompt):
chunk = response
return PoeResponse(
{
**{
'id': chunk['messageId'],
'object': 'text_completion',
'created': chunk['creationTime'],
'model': _model,
'text': chunk['text'],
'choices': [
{
'text': chunk['text'],
@ -389,12 +366,12 @@ class Completion:
class Poe:
def __init__(
self,
model: str = 'ChatGPT',
driver: str = 'firefox',
download_driver: bool = False,
driver_path: Optional[str] = None,
cookie_path: str = './quora/cookie.json',
self,
model: str = 'ChatGPT',
driver: str = 'firefox',
download_driver: bool = False,
driver_path: Optional[str] = None,
cookie_path: str = './quora/cookie.json',
):
# validating the model
if model and model not in MODELS:
@ -473,12 +450,12 @@ class Poe:
return response
def create_bot(
self,
name: str,
/,
prompt: str = '',
base_model: str = 'ChatGPT',
description: str = '',
self,
name: str,
/,
prompt: str = '',
base_model: str = 'ChatGPT',
description: str = '',
) -> None:
if base_model not in MODELS:
raise RuntimeError('Sorry, the base_model you provided does not exist. Please check and try again.')

View file

@ -384,7 +384,7 @@ class Client:
continue
# update info about response
message["text_new"] = message["text"][len(last_text):]
message["text_new"] = message["text"][len(last_text) :]
last_text = message["text"]
message_id = message["messageId"]
@ -456,21 +456,21 @@ class Client:
logger.info(f"No more messages left to delete.")
def create_bot(
self,
handle,
prompt="",
base_model="chinchilla",
description="",
intro_message="",
api_key=None,
api_bot=False,
api_url=None,
prompt_public=True,
pfp_url=None,
linkification=False,
markdown_rendering=True,
suggested_replies=False,
private=False,
self,
handle,
prompt="",
base_model="chinchilla",
description="",
intro_message="",
api_key=None,
api_bot=False,
api_url=None,
prompt_public=True,
pfp_url=None,
linkification=False,
markdown_rendering=True,
suggested_replies=False,
private=False,
):
result = self.send_query(
"PoeBotCreateMutation",
@ -499,21 +499,21 @@ class Client:
return data
def edit_bot(
self,
bot_id,
handle,
prompt="",
base_model="chinchilla",
description="",
intro_message="",
api_key=None,
api_url=None,
private=False,
prompt_public=True,
pfp_url=None,
linkification=False,
markdown_rendering=True,
suggested_replies=False,
self,
bot_id,
handle,
prompt="",
base_model="chinchilla",
description="",
intro_message="",
api_key=None,
api_url=None,
private=False,
prompt_public=True,
pfp_url=None,
linkification=False,
markdown_rendering=True,
suggested_replies=False,
):
result = self.send_query(
"PoeBotEditMutation",

View file

@ -42,9 +42,7 @@ class Emailnator:
while True:
sleep(2)
mail_token = self.client.post(
"https://www.emailnator.com/message-list", json={"email": self.email}
)
mail_token = self.client.post("https://www.emailnator.com/message-list", json={"email": self.email})
mail_token = loads(mail_token.text)["messageData"]

View file

@ -1,9 +1,8 @@
### Example: `theb` (use like openai pypi package) <a name="example-theb"></a>
```python
# import library
import theb
from openai_rev import theb
# simple streaming completion
for token in theb.Completion.create('hello world'):

View file

@ -1,9 +1,11 @@
from re import findall
from json import loads
from queue import Queue, Empty
from re import findall
from threading import Thread
from curl_cffi import requests
class Completion:
# experimental
part1 = '{"role":"assistant","id":"chatcmpl'
@ -14,7 +16,8 @@ class Completion:
message_queue = Queue()
stream_completed = False
def request(prompt: str):
@classmethod
def request(cls, prompt: str):
headers = {
'authority': 'chatbot.theb.ai',
'content-type': 'application/json',
@ -22,12 +25,11 @@ class Completion:
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
}
requests.post('https://chatbot.theb.ai/api/chat-process', headers=headers,
content_callback = Completion.handle_stream_response,
json = {
'prompt': prompt,
'options': {}
}
requests.post(
'https://chatbot.theb.ai/api/chat-process',
headers=headers,
content_callback=Completion.handle_stream_response,
json={'prompt': prompt, 'options': {}},
)
Completion.stream_completed = True
@ -36,7 +38,7 @@ class Completion:
def create(prompt: str):
Thread(target=Completion.request, args=[prompt]).start()
while Completion.stream_completed != True or not Completion.message_queue.empty():
while not Completion.stream_completed or not Completion.message_queue.empty():
try:
message = Completion.message_queue.get(timeout=0.01)
for message in findall(Completion.regex, message):

View file

@ -0,0 +1,4 @@
from openai_rev import theb
for token in theb.Completion.create('hello world'):
print(token, end='', flush=True)

View file

@ -1,7 +1,8 @@
### Example: `you` (use like openai pypi package) <a name="example-you"></a>
```python
import you
from openai_rev import you
# simple request with links and details
response = you.Completion.create(

View file

@ -1,28 +1,36 @@
import json
import re
from json import loads
from typing import Optional, List, Dict, Any
from uuid import uuid4
from fake_useragent import UserAgent
from pydantic import BaseModel
from tls_client import Session
class PoeResponse(BaseModel):
text: Optional[str] = None
links: List[str] = []
extra: Dict[str, Any] = {}
class Completion:
@staticmethod
def create(
prompt: str,
page: int = 1,
count: int = 10,
safe_search: str = 'Moderate',
on_shopping_page: bool = False,
mkt: str = '',
response_filter: str = 'WebPages,Translations,TimeZone,Computation,RelatedSearches',
domain: str = 'youchat',
query_trace_id: str = None,
chat: list = None,
include_links: bool = False,
detailed: bool = False,
debug: bool = False,
) -> dict:
prompt: str,
page: int = 1,
count: int = 10,
safe_search: str = 'Moderate',
on_shopping_page: bool = False,
mkt: str = '',
response_filter: str = 'WebPages,Translations,TimeZone,Computation,RelatedSearches',
domain: str = 'youchat',
query_trace_id: str = None,
chat: list = None,
include_links: bool = False,
detailed: bool = False,
debug: bool = False,
) -> PoeResponse:
if chat is None:
chat = []
@ -57,23 +65,25 @@ class Completion:
r'(?<=event: youChatSerpResults\ndata:)(.*\n)*?(?=event: )', response.text
).group()
third_party_search_results = re.search(
r'(?<=event: thirdPartySearchResults\ndata:)(.*\n)*?(?=event: )', response.text).group()
r'(?<=event: thirdPartySearchResults\ndata:)(.*\n)*?(?=event: )', response.text
).group()
# slots = findall(r"slots\ndata: (.*)\n\nevent", response.text)[0]
text = ''.join(re.findall(r'{\"youChatToken\": \"(.*?)\"}', response.text))
extra = {
'youChatSerpResults': loads(you_chat_serp_results),
'youChatSerpResults': json.loads(you_chat_serp_results),
# 'slots' : loads(slots)
}
return {
'response': text.replace('\\n', '\n').replace('\\\\', '\\').replace('\\"', '"'),
'links': loads(third_party_search_results)['search']['third_party_search_results']
if include_links
else None,
'extra': extra if detailed else None,
}
response = PoeResponse(text=text.replace('\\n', '\n').replace('\\\\', '\\').replace('\\"', '"'))
if include_links:
response.links = json.loads(third_party_search_results)['search']['third_party_search_results']
if detailed:
response.extra = extra
return response
@classmethod
def __get_headers(cls) -> dict:
@ -94,5 +104,5 @@ class Completion:
}
@classmethod
def __get_failure_response(cls) -> dict:
return dict(response='Unable to fetch the response, Please try again.', links=[], extra={})
def __get_failure_response(cls) -> PoeResponse:
return PoeResponse(text='Unable to fetch the response, Please try again.')

702
poetry.lock generated Normal file
View file

@ -0,0 +1,702 @@
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
[[package]]
name = "async-generator"
version = "1.10"
description = "Async generators and context managers for Python 3.5+"
category = "main"
optional = false
python-versions = ">=3.5"
files = [
{file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"},
{file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"},
]
[[package]]
name = "attrs"
version = "23.1.0"
description = "Classes Without Boilerplate"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
{file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
]
[package.dependencies]
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
dev = ["attrs[docs,tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
[[package]]
name = "certifi"
version = "2022.12.7"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
]
[[package]]
name = "cffi"
version = "1.15.1"
description = "Foreign Function Interface for Python calling C code."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
{file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
{file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
{file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
{file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
{file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
{file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
{file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
{file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
{file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
{file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
{file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
{file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
{file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
{file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
{file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
{file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
{file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
{file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
{file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
{file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
{file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
{file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
{file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
{file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
]
[package.dependencies]
pycparser = "*"
[[package]]
name = "charset-normalizer"
version = "3.1.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
{file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
{file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
{file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
{file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
{file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
{file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
{file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
{file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
{file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
{file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
]
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "curl-cffi"
version = "0.5.5"
description = "libcurl ffi bindings for Python, with impersonation support"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "curl_cffi-0.5.5-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:4322f330167a5c87f6913d32b73eb7da9fe3e3dd86b28f137469f432b346d9bb"},
{file = "curl_cffi-0.5.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:48b0dcc6e91d68694e6472fa47b7f3457d8bd24e42c91e15d6e2b650f0d9d206"},
{file = "curl_cffi-0.5.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda4c35f03ae593b7667d1a09bcd718d1399a5596b936cacb65dcd4bd705e95f"},
{file = "curl_cffi-0.5.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660b2174b71d86bd7b136a6b91434a4c3edbcb1de718f3d337b688955872fbcc"},
{file = "curl_cffi-0.5.5-cp37-abi3-win_amd64.whl", hash = "sha256:7adb44515cb165ac661a8e5453c41d75bc284f494921051b64f2889c2c518544"},
{file = "curl_cffi-0.5.5.tar.gz", hash = "sha256:db94b8d0ad52f3b5c55d32225c29a8219a19592d882075965a78aa9e1a0dead1"},
]
[package.dependencies]
cffi = ">=1.12.0"
[package.extras]
build = ["cibuildwheel", "wheel"]
dev = ["autoflake (==1.4)", "black (==22.8.0)", "coverage (==6.4.1)", "cryptography (==38.0.3)", "flake8 (==6.0.0)", "flake8-bugbear (==22.7.1)", "flake8-pie (==0.15.0)", "httpx (==0.23.1)", "isort (==5.10.1)", "mypy (==0.971)", "pytest (==7.1.2)", "pytest-asyncio (==0.19.0)", "pytest-trio (==0.7.0)", "trio (==0.21.0)", "trio-typing (==0.7.0)", "trustme (==0.9.0)", "types-certifi (==2021.10.8.2)", "uvicorn (==0.18.3)"]
test = ["cryptography (==38.0.3)", "httpx (==0.23.1)", "pytest (==7.1.2)", "pytest-asyncio (==0.19.0)", "pytest-trio (==0.7.0)", "trio (==0.21.0)", "trio-typing (==0.7.0)", "trustme (==0.9.0)", "types-certifi (==2021.10.8.2)", "uvicorn (==0.18.3)"]
[[package]]
name = "exceptiongroup"
version = "1.1.1"
description = "Backport of PEP 654 (exception groups)"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
{file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]]
name = "fake-useragent"
version = "1.1.3"
description = "Up-to-date simple useragent faker with real world database"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "fake-useragent-1.1.3.tar.gz", hash = "sha256:1c06f0aa7d6e4894b919b30b9c7ebd72ff497325191057fbb5df3d5db06b93fc"},
{file = "fake_useragent-1.1.3-py3-none-any.whl", hash = "sha256:695d3b1bf7d11d04ab0f971fb73b0ca8de98b78bbadfbc8bacbc9a48423f7531"},
]
[package.dependencies]
importlib-metadata = {version = ">=4.0,<5.0", markers = "python_version < \"3.8\""}
importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""}
[[package]]
name = "h11"
version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
[package.dependencies]
typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
[[package]]
name = "idna"
version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
[[package]]
name = "importlib-metadata"
version = "4.13.0"
description = "Read metadata from Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
{file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
]
[package.dependencies]
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
perf = ["ipython"]
testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
[[package]]
name = "importlib-resources"
version = "5.12.0"
description = "Read resources from Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"},
{file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"},
]
[package.dependencies]
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
[[package]]
name = "names"
version = "0.3.0"
description = "Generate random names"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "names-0.3.0.tar.gz", hash = "sha256:726e46254f2ed03f1ffb5d941dae3bc67c35123941c29becd02d48d0caa2a671"},
]
[[package]]
name = "outcome"
version = "1.2.0"
description = "Capture the outcome of Python function calls."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "outcome-1.2.0-py2.py3-none-any.whl", hash = "sha256:c4ab89a56575d6d38a05aa16daeaa333109c1f96167aba8901ab18b6b5e0f7f5"},
{file = "outcome-1.2.0.tar.gz", hash = "sha256:6f82bd3de45da303cf1f771ecafa1633750a358436a8bb60e06a1ceb745d2672"},
]
[package.dependencies]
attrs = ">=19.2.0"
[[package]]
name = "pycparser"
version = "2.21"
description = "C parser in Python"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
[[package]]
name = "pydantic"
version = "1.10.7"
description = "Data validation and settings management using python type hints"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"},
{file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"},
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"},
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"},
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"},
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"},
{file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"},
{file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"},
{file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"},
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"},
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"},
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"},
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"},
{file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"},
{file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"},
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"},
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"},
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"},
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"},
{file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"},
{file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"},
{file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"},
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"},
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"},
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"},
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"},
{file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"},
{file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"},
{file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"},
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"},
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"},
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"},
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"},
{file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"},
{file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"},
{file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"},
]
[package.dependencies]
typing-extensions = ">=4.2.0"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pydub"
version = "0.25.1"
description = "Manipulate audio with an simple and easy high level interface"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"},
{file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"},
]
[[package]]
name = "pypasser"
version = "0.0.5"
description = "Bypassing reCaptcha V3 by sending HTTP requests & solving reCaptcha V2 using speech to text."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "PyPasser-0.0.5.tar.gz", hash = "sha256:72b0ded34edcfa885a13ecc825c5a058503b68521ab87294205d7ff5cd569515"},
]
[package.dependencies]
pydub = "0.25.1"
PySocks = "1.7.1"
requests = ">=2.25.1,<3.0"
selenium = "*"
SpeechRecognition = "3.8.1"
[[package]]
name = "pysocks"
version = "1.7.1"
description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"},
{file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"},
{file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"},
]
[[package]]
name = "requests"
version = "2.29.0"
description = "Python HTTP for Humans."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "requests-2.29.0-py3-none-any.whl", hash = "sha256:e8f3c9be120d3333921d213eef078af392fba3933ab7ed2d1cba3b56f2568c3b"},
{file = "requests-2.29.0.tar.gz", hash = "sha256:f2e34a75f4749019bb0e3effb66683630e4ffeaf75819fb51bebef1bf5aef059"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "selenium"
version = "4.9.0"
description = ""
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "selenium-4.9.0-py3-none-any.whl", hash = "sha256:4c19e6aac202719373108d53a5a8e9336ba8d2b25822ca32ae6ff37acbabbdbe"},
{file = "selenium-4.9.0.tar.gz", hash = "sha256:478fae77cdfaec32adb1e68d59632c8c191f920535282abcaa2d1a3d98655624"},
]
[package.dependencies]
certifi = ">=2021.10.8"
trio = ">=0.17,<1.0"
trio-websocket = ">=0.9,<1.0"
urllib3 = {version = ">=1.26,<2.0", extras = ["socks"]}
[[package]]
name = "sniffio"
version = "1.3.0"
description = "Sniff out which async library your code is running under"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
]
[[package]]
name = "sortedcontainers"
version = "2.4.0"
description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
]
[[package]]
name = "speechrecognition"
version = "3.8.1"
description = "Library for performing speech recognition, with support for several engines and APIs, online and offline."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "SpeechRecognition-3.8.1-py2.py3-none-any.whl", hash = "sha256:4d8f73a0c05ec70331c3bacaa89ecc06dfa8d9aba0899276664cda06ab597e8e"},
]
[[package]]
name = "tls-client"
version = "0.2"
description = "Advanced Python HTTP Client."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "tls_client-0.2-py3-none-any.whl", hash = "sha256:26012084e753d7531d32960ec706f81188bb7d825586675fa300b2b44f791412"},
{file = "tls_client-0.2.tar.gz", hash = "sha256:eef3860c6f186fa866dc782f1b9e43ae837e40e831f50831c3515cee7c84fd0f"},
]
[[package]]
name = "trio"
version = "0.22.0"
description = "A friendly Python library for async concurrency and I/O"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "trio-0.22.0-py3-none-any.whl", hash = "sha256:f1dd0780a89bfc880c7c7994519cb53f62aacb2c25ff487001c0052bd721cdf0"},
{file = "trio-0.22.0.tar.gz", hash = "sha256:ce68f1c5400a47b137c5a4de72c7c901bd4e7a24fbdebfe9b41de8c6c04eaacf"},
]
[package.dependencies]
async-generator = ">=1.9"
attrs = ">=19.2.0"
cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""}
exceptiongroup = {version = ">=1.0.0rc9", markers = "python_version < \"3.11\""}
idna = "*"
outcome = "*"
sniffio = "*"
sortedcontainers = "*"
[[package]]
name = "trio-websocket"
version = "0.10.2"
description = "WebSocket library for Trio"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "trio-websocket-0.10.2.tar.gz", hash = "sha256:af13e9393f9051111300287947ec595d601758ce3d165328e7d36325135a8d62"},
{file = "trio_websocket-0.10.2-py3-none-any.whl", hash = "sha256:0908435e4eecc49d830ae1c4d6c47b978a75f00594a2be2104d58b61a04cdb53"},
]
[package.dependencies]
exceptiongroup = "*"
trio = ">=0.11"
wsproto = ">=0.14"
[[package]]
name = "twocaptcha"
version = "0.0.1"
description = "2Captcha Python3 API Wrapper"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "TwoCaptcha-0.0.1.tar.gz", hash = "sha256:fd04127de71ca4bd31c22add84a5bcb7c683cf9ee5bf503ca14a8f372ac76a0e"},
]
[package.dependencies]
requests = "*"
[[package]]
name = "typing-extensions"
version = "4.5.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
]
[[package]]
name = "urllib3"
version = "1.26.15"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
{file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
{file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
]
[package.dependencies]
PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""}
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "websocket-client"
version = "1.5.1"
description = "WebSocket client for Python with low level API options"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"},
{file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"},
]
[package.extras]
docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"]
optional = ["python-socks", "wsaccel"]
test = ["websockets"]
[[package]]
name = "wsproto"
version = "1.2.0"
description = "WebSockets state-machine based protocol implementation"
category = "main"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"},
{file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"},
]
[package.dependencies]
h11 = ">=0.9.0,<1"
[[package]]
name = "zipp"
version = "3.15.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
{file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.7"
content-hash = "d22aa72ee6bd554c0676a3b6b723090d156a720d03d3b05422a01aa9bf22dda2"

28
pyproject.toml Normal file
View file

@ -0,0 +1,28 @@
[tool.poetry]
name = "openai-rev"
version = "0.1.0"
description = ""
authors = ["Raju Komati <komatiraju032@gmail.com>"]
license = "GPL-3.0"
readme = "README.md"
packages = [{ include = "openai_rev" }]
exclude = ["**/*.txt"]
[tool.poetry.dependencies]
python = "^3.7"
websocket-client = "^1.5.1"
requests = "2.29.0"
tls-client = "^0.2"
pypasser = "^0.0.5"
names = "^0.3.0"
colorama = "^0.4.6"
curl-cffi = "^0.5.5"
selenium = "^4.9.0"
fake-useragent = "^1.1.3"
twocaptcha = "^0.0.1"
pydantic = "^1.10.7"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View file

@ -10,3 +10,4 @@ selenium
fake-useragent
twocaptcha
https://github.com/AI-Yash/st-chat/archive/refs/pull/24/head.zip
pydantic

20
testing/accounts.txt Normal file
View file

@ -0,0 +1,20 @@
idkaovprvbrjbb@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYxNDUxOSwiaWF0IjoxNjgyNjE0NDU5LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYxNDQ0OSwic2lkIjoic2Vzc18yUDFBeFNHNnFHdzhWSFNla2lhMVB0NWJYdW8iLCJzdWIiOiJ1c2VyXzJQMUF4TWlNRzFLb00xeFFUQVExcFFjd0d3dyJ9.Nw82ExUQMvJE8oVAmUYl81dS9J43YK0AQYwLJjE5fCozanqQi946q_6DRRQz-9kkm-Zd60zuFzl_ANgZ9lLOOSMd89oJArbu3VUOA8Q-BOtUE0INmwvPCP7KtLDEVRGEsF8oIddwE1Da1rEToBEHrwBuzOVbRvoKqbJ7KZrRB7tHzUVyik3n7K3bddyecIA5Gtj8Nmc_NvwVaBelHOfjH0eoU7y0oza-bOJPNCRosjV3kFdoYhHNtNvQkfIF7MSmqyeC-NHhDVdmv57ehkVdr_H0_MyoT2kNWEk1ETzST36Lhu06yaaWzAPhjpcThN8Q7OFNXSHUAhTJibMHER6UGw
vpmysvdoydnumi@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMDM4MSwiaWF0IjoxNjgyNjIwMzIxLCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMDMxMSwic2lkIjoic2Vzc18yUDFNcTM1Tmtld3lIa2NOYUJvNDcyV1haR0MiLCJzdWIiOiJ1c2VyXzJQMU1xNFRYcmJlYVhSYVdKUWFjZGRZTVNyMiJ9.PVQlpsxJxgTOaizJ34AGkP3yJzhDAUFBw29gEP0cHCuUZcAUk1MVKeXYoXr32nqQmUmPO5-kmbp3p639idzA3E7EYnOCJ6xOTs6qB4QvmtEfAJQLD84-h5cYXkLzpqcWZeszDkoAQAZKhmI5kGn_OJzblYVodSjz92klrGSASuqIwqywvGNKb2NwQ80yRrNHUcGeI6tRPtMdZ0ieOi04SphXpyB7-JLZuco8nwqivs3uX97Jtkw3EYsTjhp5MKwfG9hGjHk54IwxAJtbMCSMDxDcOYxO2vxVL4W4syCHCB3yUJANmuFjL7mIv76uy4w0YIJtDFAYn3oad7dbHWKtSQ
xjwollilwtumif@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMDcwNywiaWF0IjoxNjgyNjIwNjQ3LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMDYzNywic2lkIjoic2Vzc18yUDFOVjBjMm5JdWVLak82SjVWUTVoTHVZTWQiLCJzdWIiOiJ1c2VyXzJQMU5VelpRWjZUZXRETkhnT0dWOHlaZ09pciJ9.O6GsP0QtekV5UMxvMEpUa65I5E_2ahihRbLGAfjSAnyWNY72fQgCgbi75n4L85ej9V9qeDxMqCPF0BA5hXN606ybdlBY_HvlvwRIgyKU2r5JyaltOow7bg6vu_qwj91YejvTDHR7w_cwq-FF4VRYysiJfoc-8jcO2HZl8HM0R8aaSf8xDHy0wCfpBYKN2Mw3-mP23Z08NxlehKXNdKoQq_zKQOJbl0DxfWaoFB6b-xocIl8RO2PR6r1yf0VzHaPNxa1PUrPcs94iaqXuOjpCzqEqO9kZf6WM0rWKRx6-u9A0BECaeirfYpNvdaNQHzIRSndzpTO-Kvnww6HtjDD-tg
idivbwwtapqzsk@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMjk0NSwiaWF0IjoxNjgyNjIyODg1LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMjg3NSwic2lkIjoic2Vzc18yUDFTMkp1emJkOGhLcGkyNnpLR3JZRmJua1UiLCJzdWIiOiJ1c2VyXzJQMVMySFRuTXlWbkhIaXBMQjNCMTc2N1JiWCJ9.Aa1uXE_lLN3hbb1zvEdMtM6j9V6LGV7iQ1v0e7X_NkIrUw0-WxhvKBj07AuiTe4AXgMy9rmbJegTO0ZN6t9xpZqiz6NYmSfqY-d5icf_OvQXJEap701JW81Wk3jeUYuJHwa34ZpTiRb3p6lkkcI4n4steLvoanGKTpXMMqB4fChX9qogcnw-kNe1c8jq3BVlyj7Q6twInKnT33BxyW4AnAiqFeXCzQ8g4kZkYhSIFEhM3Wkrvv_82rhtoJ0US1s20SMW0BVjfu5pXI0mXr2EFG50BRAvtnRIrQeYi4kzjCP9IPVni-AheFaUdSZ1jKsW-Qq_1u2TQY586qqs-nARTw
ldbkpwhqiypxth@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMjk2NSwiaWF0IjoxNjgyNjIyOTA1LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMjg5NSwic2lkIjoic2Vzc18yUDFTNGIyS1JQS2lhcTVYOHNjeU1NT3dzUEYiLCJzdWIiOiJ1c2VyXzJQMVM0ZHlwT2NKTmRFb3VlNTFoaWY5ek5KZyJ9.i5zyh-lEgMlhClGhtY92m_8HxAzMwth3Rpgv7KbWba7ABrcDUmwt5PP555PpBPxB98bF32XmakSYgOkVJ0UYRBkOPjce90AVBObmoBLA2-iD7ftL9-ijqwHIlGX-uDiNv-p2CJyUUdKx6qj2q_8Eeh8fqSWj0eI1sXp4aJENqDlmoA1bGnGaMuBYlE8BGDf0OcqpvDJbf9puV-K0JqV_fkttOBWqhQkEmyIXSil4NnuCgraFlD9PdFpH0m9PmedZvZpJ6qNPxOIighWrFjDx71MUg3eGXcBX2tX0neo9hPAskg016jhaIeLZXedmDMYMR3X1zsC10Z5RNGulJNh-hA
poqxbjcnzlacln@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMzExNiwiaWF0IjoxNjgyNjIzMDU2LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMzA0Niwic2lkIjoic2Vzc18yUDFTTm82ZklpSDRTQ09NV2N5aGxaZ1FWMVIiLCJzdWIiOiJ1c2VyXzJQMVNObEZqS3E3V0RPWU5WOEo0eGtjWXN4YyJ9.Z2XhJHoUGPZ_N9W_iwJfBmoaen1vVsnIlM5_ioyzTrGKjfusoBDSYiEx8qEG2-L5cLbUlaqlF4_dge0NSxdmLoTNKfGlBWyxVQV40hqZyV1FEPkLlkwuuE5nytrC1IuamNycZxp9DBpe20PuAc2fMysTAJPZ6ATuvj2L8lzeydi_uHZ6rB-C0-D8N84doYSeiBmwCS199Qlro1VCR5qEMowx00yq-4-oJ6i8ey6DXkZOyppdlv7zgHVcLEFdhUtgvJ0WXvEQtwvYhMOsxv83d2E1Z-XIlmpInbIPguijxm6FkDhG8fCiMZXgO7sHiQ6LmTBnp2o7E2ipYcodQ0hWBQ
mfrvfrzhhbaepu@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMzE3NCwiaWF0IjoxNjgyNjIzMTE0LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMzEwNCwic2lkIjoic2Vzc18yUDFTVjNzczIwcmtMRVZJWGpkTVBjVGJBdEwiLCJzdWIiOiJ1c2VyXzJQMVNWMTRad2xkNTZrTjF1WG9EOFNpSW51VSJ9.m5dEhTWPV3BH9rNqDEtl7w4nP4oHbwkc6eLchn68_zg8dJjpwshL1KR8OUHGsOCsBZ8UsGO2vm22o46cV-FPwnmSJGJihF7utyJ2ZVumdvkmUz3vMwOmudDMkQKc8n0Bx05QedYI_TbaJvMIjd6elJDQwkVrt05O3__0BawT2936HBRRI3Owp7gbwwc-P_0nnFxqgKTFawi-Aeh0GB26W--HpnGYzyaS7RdF9HVeVNGYTEcSLVkz7TvxtqS-QSABEGyaAinnOGuP_9BVN1jJbpDMp6EWZWmfb_oEbJ6yB7DwqrDacyMMocnPb9M4j713NXJ8GD1kLDJOyOs1neoy9A
dcgvznfsmollao@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMzMzNiwiaWF0IjoxNjgyNjIzMjc2LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMzI2Niwic2lkIjoic2Vzc18yUDFTcE9ZejR3ZFNYS3VaRTJCYkhaNUJpaWEiLCJzdWIiOiJ1c2VyXzJQMVNwTTJ1SE04aWtrTTlIaGs5VzhuNERVciJ9.HT8tB6BH7dyMTcT8b2zWw_KDt_F2mGIYQqnsAtoytuWwzxyt4VpXNcddsBHqSQsV1Qh5QLBhL6zHVgJIXZo4OXrBJcDZxIcAkG6PUbeloL-2QQmMExBgEzIVNuWum7xBmeDHVYU2W_b5Eaqihsvs8pDrhaDkPSQu8p3Qwxxswds7APca6YAGyCzvObGX-XBOlHJY07TZR3t7zhnkLrY91yAbkkogkQJbGiMJv7NZb3er1JpjFHRr-kCv5v2z8KeF_Q9R8D8hRkuXANWOElapwLNRw7CbL4itvHUKq9bl2wd4ABu6vGWaPvLeG3V5islsjwo7D7NIsUOOTjUTq9BjeQ
gduxptzcxwotgq@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMzUwNywiaWF0IjoxNjgyNjIzNDQ3LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMzQzNywic2lkIjoic2Vzc18yUDFUQXNPOUJyT01GSmFPc2YxNExlYmo1U3kiLCJzdWIiOiJ1c2VyXzJQMVRBc0h6UHdVaHdHWVhBM1NnOHltdDY4diJ9.DfQMvyTdDba4F3In2mLefXWG01wMGUzqbPRXlmVryEkcWfLCS8AdaUGvwsm51phV7SvTs7J4PJ3uOThxaPvIQ7iSXeSU_PdvYXaz7Ira9lDSJehPs3sRjprow30a5W_HkUo4Jdr89Je9DtsPX2Tx_qD394Q3ksS4FjFWzxgkOCHuE-AdvHCIP_AExOBePm7vH0bjqSMEjo5YLjWjT2rMhQNiUTP8Gao0yFqB0EjgkosCH1FW3drOCGH5PzhPcZSaNhbauOWabmeyi88tBCFpIWbPq8GXbg3q-CS_aWNwzYrHdiV0m2d7bBOVXHy6o0WNT0sLKl0PBjvo5rg5DWuleQ
ilurrzfuyfgwxg@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyMzgxNywiaWF0IjoxNjgyNjIzNzU3LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyMzc0Nywic2lkIjoic2Vzc18yUDFUbm4zeUVOanFDT1prc2NRVTN0aHdKak0iLCJzdWIiOiJ1c2VyXzJQMVRudDF4Q2VrZzJhaWt4WXdZTGszU3JhMyJ9.OGQAXq97bD3cA4aWrcxQHwVPRDbNEi_CqVRts0T3qtO8Ol_vL5EkMe1MBQSeSdetrXr8Oyeqwc6TFkc7JKLyQHg6uFL2Sli7GGhOkWnKysBZLrm-kSi-wBfxdd2gR0ZL3ZyoWIgQLldxqa127tQLI85DtNUT16e2XpzRprmqDCvQvZTiL9GdphjAZ26DP_PpGBwldOWHDelrUpSmHm7n35GB80DAGOlAoEPS04ksNsym9E7847lJnVEXUuiXpC8_zJZup6GDFqZTZ9NY_itQOzUheYQt1EuW8teEOMsJAGCeWYajJpfkjXzMz4Qg9RbVU32Zsne6lmYvjcMNC_iomg
oguaaixijvkhus@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyNjY2MSwiaWF0IjoxNjgyNjI2NjAxLCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyNjU5MSwic2lkIjoic2Vzc18yUDFaWkY0T2FyekVCbEZaVFl6VTFyMWlucjAiLCJzdWIiOiJ1c2VyXzJQMVpaQllWSFVUVmRHWEx3dlZpU01ZbGVXdCJ9.TO7-QvuRh8RgEStltI64WcoAxCz9WBbs5xaUs6M0NbKLZX0f5eIAJ4NtO4BUet9anWYkozI0dK7E7BvKBaJLC0UJH8bSoVotZuws4U1ravXImNxc4mzRPns3EH7dk2nZDbbnRp2f_SZbl-DfH1vKK183acAogWwBJZoXBSUucIdSugfg25kOKBrj4W4aUYwtpE82H4zS13Lk5qdfpwetRHZ-tXqxzyZSzgxHEdcSvvWG0xAVSzhXkahb-gQQs3FmLHW8rh-SvsUFjHWrhnsRTwEPfVz9F5rzwlEnhkpEY5oAqn0rIXrOz0n2zrElJGmArCDaP0dtJqMEa0zeW6pyIg
soeaazqscgotbe@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyNjczMywiaWF0IjoxNjgyNjI2NjczLCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyNjY2Mywic2lkIjoic2Vzc18yUDFaaU00RzhGRVFsNzZEaUs4VWVVc0RDYUgiLCJzdWIiOiJ1c2VyXzJQMVppTDlsMWZObjg3RDdXeWo3aGpsa3JFTyJ9.ooDGCVxddQXhEwKUTgRr6MiWFDyinzb_ioRTMTFd5kUUBKY7mdVtSUcm3AFTRYaMKI09YIIZZI2c6OpFyITGNPL6NMwvEq0EWR9iziaeosg8OW1CKwB3k_h7m92lB2APvvRXmn6fgm00T_xkD7ySpp0xDPFFLKT2c6UmfBdb0s1azTu-Sv3r862GNM1CnzXrIa2YqDdpCeSCZdK4L189yihN3eiWHTksDIBcp0lmAA4O5dSELfPqXuzXTe-ltc9QXyQhASdJ9v-555_ZDTH5l2G6mK4iXLZEIrIGieXPBohz4RWCbO04a_o9k8kHbNNcaXb4lVqunyorKpzW80J46A
nspnxbtkiernuh@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyNjgzMSwiaWF0IjoxNjgyNjI2NzcxLCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyNjc2MSwic2lkIjoic2Vzc18yUDFadVJXaHM0b05hWjhNcWZ4cEhhMzVXTGsiLCJzdWIiOiJ1c2VyXzJQMVp1VHZhTEY1MWtmZ1FZTW5CVTdiNERLZyJ9.o9adEdZ4ND80Me8X7VF2Q1Of0dxwKsyBHOc29-NkuHQXQSQc9nhHiF8Odblyeb2JwhuzfRbFiFK2Sb7gQFw4hzoj7-xb2mWJ5U5R-xwXQAoLA4Vtt3yJIBJ4myx00btgqlELmzHRrbanuM258wVNqHbc6PvP-CLNL6KJCb7EY_rAM5HdU6zX-nWDiP3ccE6PDcBBdfopgmyhGm1b2hA1FRAlgijD0gq8zX1_HEZW5nLGzSKiGUIJ6ozvYriPSAXNAv9-Mhjje7VhpjWNxQLfpiEEErEbAYFV6MyG7Lg6-LcT-bcvVc2ckEf3grqFkzTwqhUyWic-ECZbFIcAabXebA
trbfmqhnokevio@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyNjg4MSwiaWF0IjoxNjgyNjI2ODIxLCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyNjgxMSwic2lkIjoic2Vzc18yUDFhMHdPc3E1VWx2SXp2ZTF1bGthTjZMNjMiLCJzdWIiOiJ1c2VyXzJQMWEwdjBDMjdhR0NRM0dxekNWMmdaY1d6ViJ9.ArPSGwcr6zQDxc2M-PQv-bXcyP7ElOsf3nOruDtplHZBLfF_PBgZ8dazkzboqMzS1qZ2F5gGCY5e4tl1-jOBP951VSQszNgYcv1Xcn5QBc5-8j2r1AdAhAyilZIHrQ-sSodcXWk5d-qVReSRKP-kqx_zVdo7OGhbNxrfJh6gr-6j09eZ4HA5EttL5T-d38ws_qaOns-k1NYh4nxmr6Q8xMXmrw9pjmH3phQ60XbdWoq7MWvQAwouqCmVhAE81UMWFBovOPjG0o-aaGbx254SRWUR5awegbpl2GcPgMdEp4yl7AbyHedGx3VJ4NPBJKSqtunipJfq1gMtcMtVBGPPkA
gprzdsftrckwbj@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyNzM3NiwiaWF0IjoxNjgyNjI3MzE2LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyNzMwNiwic2lkIjoic2Vzc18yUDFiMTkxUUdYZ3YxZ2VDTHRJUXA2QjZVWjIiLCJzdWIiOiJ1c2VyXzJQMWIxNkRSdmpNS2FQaGV6aGFSWFF3WDhDRyJ9.MzKp7sLGFxWyupd6V2Ohl9btKV0SOMGixeV-qPxpNi4uh4r0CMSctSLR0SfxArsYskFwUF9DNqAp2YbrPP4c9rEiL8ShNimvUzCncG7pG1I4roPxQcz_8zm1mquh3BdDQD7L5-RvTm4_KLCrJ6d9QGIoNxHYhbotCfr_7T90iMzLCYs1hPjVU0XGtTMB9cQI_1b49tHswcWouqpP7RdqMupAACo1lxC9JjG1LHs2WIqHNkbyD8q3WepbjH3_4d4AoP4-XdD5lO7FHTr4xcc-fKJLV7D6EThH2BRDjuRNywUTa_jc4UlIpxMWT11RV8G9DE-ExckUm7e0EUO-0eNiVg
hsecrzgezneibp@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyODU4NiwiaWF0IjoxNjgyNjI4NTI2LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyODUxNiwic2lkIjoic2Vzc18yUDFkVERHT3I5b2l5NktRajRWUnQ0YVg1UHYiLCJzdWIiOiJ1c2VyXzJQMWRUN2FUZUJtUndjcVlMMDNtaVlnamVaNSJ9.UKqZKGsCY-wWuPPMYnziaB-3cVKAvBWtddjNnuCakbl_nh2BnQugb1iPp0Ov8IKbHQr7vIE14CHp_07-BK_1GRCoFlmk4bhOrtZ8FeKIqVR_1JpcF6YYiZwc0ebV_rtEoD9mSuFytjcG_cufdKCTZoS_ddAckQZDduzfK1LtenbUMNigd9YjTXBeJV6tNOJIyiKXJCA58_puT7_baMKKnCiSlOA4OazatbLBv8vwKrCz1zdFZvkvPLmXoZ4-EA16CWxbmaJmi-G7fzdfohqxuZn7vguWumSgx8P3YDODggYfqvy3cd2-PAcpde066UgWCvRqXWn8s-01EropKbI_lg
ogokkecbrckufe@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYyODgwMSwiaWF0IjoxNjgyNjI4NzQxLCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYyODczMSwic2lkIjoic2Vzc18yUDFkdThHWkdJUHR4bXZvY2ZQMVJaSU12TnEiLCJzdWIiOiJ1c2VyXzJQMWR1N1RIN0VEeGx6NTQwTktITHdFalZSWCJ9.S-VMHfS9U-OYRBJsL6VjQzQawnV3opwYf0AQNQYiXaieMQTD8Op351zx9X6wKUk7JWeNZykP9-D8eNMBR8MeQ4ePxIRWmLtVnTBxJsoTNkXpC9FpbRzhTr84fXwkFccg-Yr3uPqdYuDz2EiPpROi3D2o072cNU1ZE2P4gSf6MOkatXP2l7ZYyNyI9WJEjkDJ5nRuGErPk9HLUuWzpqefmJSoo0PyrCT0OZSSGaNeB-QrcrLrw19UxqHcBdMYsay0HicX8A94HFerVlGz14s8aM4TdZNHWRZAgI627-HP23DZe53HUVTBCsRA6KYv4D7TI1PzVGrZVMweG1oaMkvr0A
xprysdgqgcljde@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYzMDMxNSwiaWF0IjoxNjgyNjMwMjU1LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYzMDI0NSwic2lkIjoic2Vzc18yUDFneVNHbWNFYzFjUmJwaGVPcVBNaEZNeWciLCJzdWIiOiJ1c2VyXzJQMWd5Uk45VXZHOHpzSjB5RGV2dW9IM0lRNiJ9.l4_UxRJy33HCl2hsjA3GBVrRH9gE8j9zusPvxYt531ze70UuXT8kaKXolDEJmub32LMEyGFjpTqvvKUdKAVfdhLnlIWvTbaIrwP5IQ1x38RA5ADNFGXRIizLjU2unsJR6Y_50yTuLerux9AVn_o2B0777FSm9aFAip0JdQjP_LHPCGE5fa6VU93VkP90qHf-m-WpimhIf8aqO4N8YeIr_E39TNM_RSWmRj6nqNp8aeY6PQYTq4csd57kMRCM4bgL_B9VeNI0khdQE42qbLvFqWQu2Cf1j2a7ZX6Qng2m6IauykzmJ8zMlbqGBTBIVHUviX_RuEYFWcCECQLVtOIlJQ
othjyimaedmaum@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYzMDUwNywiaWF0IjoxNjgyNjMwNDQ3LCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYzMDQzNywic2lkIjoic2Vzc18yUDFoTWFCaEw5Nzg3MzJJWTVFcFpMamZWOVAiLCJzdWIiOiJ1c2VyXzJQMWhNYUhYaXF3dnV3SUZQZTYwNDducDcwQyJ9.xKg5zVlDlLC0ZVT21g9WmJrm6dW4Z9-WmMdBjskyaK_Gp7zytF0QFaCit_nmI-lWZ5mfUaiVm1sMAJcbAP8uyV1SJcv2dbBQ7vsYSoklDRSdiQCwHS5YL_fH77a7Bk5z3QMnssouIne3oJLD1qadD5Dv68uumvyZ5iq4a-PB8kV0cprl44GiMOkgBsoseZ8kgAkUUMmktScgtEFDO5ALBbNAPFZzt8nsrKDZO_fD9hQE9eWhF50Htchj3HE27gRGY5zC6n_XyMxsFmxoHSIt0vi74atqbFtlzp4Ct7YuDm_PFA6Dy0VoWx2FtkyjFow4ZcqHUubn2Nw3r8BlwpUReA
vcwbipuximmpzo@bugfoo.com:eyJhbGciOiJSUzI1NiIsImtpZCI6Imluc18yTzZ3UTFYd3dxVFdXUWUyQ1VYZHZ2bnNaY2UiLCJ0eXAiOiJKV1QifQ.eyJhenAiOiJodHRwczovL2FjY291bnRzLmZvcmVmcm9udC5haSIsImV4cCI6MTY4MjYzMDg2MywiaWF0IjoxNjgyNjMwODAzLCJpc3MiOiJodHRwczovL2NsZXJrLmZvcmVmcm9udC5haSIsIm5iZiI6MTY4MjYzMDc5Mywic2lkIjoic2Vzc18yUDFpNUhDM0JHUTRlRXY2VnFkcUdCQnNMUzgiLCJzdWIiOiJ1c2VyXzJQMWk1R2U5VFRzQjFCMDVXTFRIcXBjNzA2ZSJ9.vZt1afDAZ1UANpZwW30p4GWEezpQ2pCtQ6oD7bKxl68-_tsyOHlTd_Tqo1EChkqHKfaKdiSN2opjl3EpPqOq1DEwn7Oc0xFGod489juO-KSpbT5zyJtuUQn8t56UnOvC3ofCJvveE51aaPNt2it7-8vyhqDgkrEfw03PGFCv8PFK1zhqpjERuhBA_Cw8lFCtsc-uDcgMMSrenYmGnlPt8cD49k7OGNzE-GAXjVoxaVUFNQSloP76C-Nh919ycDId6x8U6P9DkOojLRUhEaGRNPI5SopO7IAOWt3S7jgpczzRW70pC_tn3EEcxFPm67tb2c41PdXk2vxrOwRo38cXPA

View file

@ -1,10 +1,9 @@
import forefront
from openai_rev import forefront
# create an account
token = forefront.Account.create(logging=True)
print(token)
# get a response
for response in forefront.StreamingCompletion.create(token=token,
prompt='hello world', model='gpt-4'):
print(response.completion.choices[0].text, end='')
for response in forefront.StreamingCompletion.create(token=token, prompt='hello world', model='gpt-4'):
print(response.text, end='')

View file

@ -1,27 +1,28 @@
from hashlib import md5
from json import dumps
from re import findall
from typing import Optional
from tls_client import Session as TLS
from twocaptcha import TwoCaptcha
from quora import extract_formkey
from quora.mail import Emailnator
from openai_rev.quora import extract_formkey
from openai_rev.quora.mail import Emailnator
solver = TwoCaptcha('72747bf24a9d89b4dcc1b24875efd358')
class Account:
def create(proxy: None or str = None, logging: bool = False, enable_bot_creation: bool = False):
@staticmethod
def create(proxy: Optional[str] = None, logging: bool = False, enable_bot_creation: bool = False):
client = TLS(client_identifier='chrome110')
client.proxies = {
'http': f'http://{proxy}',
'https': f'http://{proxy}'} if proxy else None
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None
mail_client = Emailnator()
mail_address = mail_client.get_mail()
if logging: print('email', mail_address)
if logging:
print('email', mail_address)
client.headers = {
'authority': 'poe.com',
@ -39,29 +40,30 @@ class Account:
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
}
client.headers["poe-formkey"] = extract_formkey(client.get('https://poe.com/login').text)
client.headers["poe-tchannel"] = client.get('https://poe.com/api/settings').json()['tchannelData']['channel']
# token = reCaptchaV3('https://www.recaptcha.net/recaptcha/enterprise/anchor?ar=1&k=6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG&co=aHR0cHM6Ly9wb2UuY29tOjQ0Mw..&hl=en&v=4PnKmGB9wRHh1i04o7YUICeI&size=invisible&cb=bi6ivxoskyal')
token = solver.recaptcha(sitekey='6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG',
url='https://poe.com/login?redirect_url=%2F',
version='v3',
enterprise=1,
invisible=1,
action='login', )['code']
token = solver.recaptcha(
sitekey='6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG',
url='https://poe.com/login?redirect_url=%2F',
version='v3',
enterprise=1,
invisible=1,
action='login',
)['code']
payload = dumps(separators=(',', ':'), obj={
'queryName': 'MainSignupLoginSection_sendVerificationCodeMutation_Mutation',
'variables': {
'emailAddress': mail_address,
'phoneNumber': None,
'recaptchaToken': token
payload = dumps(
separators=(',', ':'),
obj={
'queryName': 'MainSignupLoginSection_sendVerificationCodeMutation_Mutation',
'variables': {'emailAddress': mail_address, 'phoneNumber': None, 'recaptchaToken': token},
'query': 'mutation MainSignupLoginSection_sendVerificationCodeMutation_Mutation(\n $emailAddress: String\n $phoneNumber: String\n $recaptchaToken: String\n) {\n sendVerificationCode(verificationReason: login, emailAddress: $emailAddress, phoneNumber: $phoneNumber, recaptchaToken: $recaptchaToken) {\n status\n errorMessage\n }\n}\n',
},
'query': 'mutation MainSignupLoginSection_sendVerificationCodeMutation_Mutation(\n $emailAddress: String\n $phoneNumber: String\n $recaptchaToken: String\n) {\n sendVerificationCode(verificationReason: login, emailAddress: $emailAddress, phoneNumber: $phoneNumber, recaptchaToken: $recaptchaToken) {\n status\n errorMessage\n }\n}\n',
})
)
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
@ -74,31 +76,34 @@ class Account:
print('please try using a proxy / wait for fix')
if 'Bad Request' in response.text:
if logging: print('bad request, retrying...', response.json())
if logging:
print('bad request, retrying...', response.json())
quit()
if logging: print('send_code', response.json())
if logging:
print('send_code', response.json())
mail_content = mail_client.get_message()
mail_token = findall(r';">(\d{6,7})</div>', mail_content)[0]
if logging: print('code', mail_token)
if logging:
print('code', mail_token)
payload = dumps(separators=(',', ':'), obj={
"queryName": "SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation",
"variables": {
"verificationCode": str(mail_token),
"emailAddress": mail_address,
"phoneNumber": None
payload = dumps(
separators=(',', ':'),
obj={
"queryName": "SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation",
"variables": {"verificationCode": str(mail_token), "emailAddress": mail_address, "phoneNumber": None},
"query": "mutation SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation(\n $verificationCode: String!\n $emailAddress: String\n $phoneNumber: String\n) {\n signupWithVerificationCode(verificationCode: $verificationCode, emailAddress: $emailAddress, phoneNumber: $phoneNumber) {\n status\n errorMessage\n }\n}\n",
},
"query": "mutation SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation(\n $verificationCode: String!\n $emailAddress: String\n $phoneNumber: String\n) {\n signupWithVerificationCode(verificationCode: $verificationCode, emailAddress: $emailAddress, phoneNumber: $phoneNumber) {\n status\n errorMessage\n }\n}\n"
})
)
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
response = client.post('https://poe.com/api/gql_POST', data=payload)
if logging: print('verify_code', response.json())
if logging:
print('verify_code', response.json())
Account.create(proxy='xtekky:wegwgwegwed_streaming-1@geo.iproyal.com:12321', logging=True)

View file

@ -1,13 +1,11 @@
from time import sleep
import quora
from openai_rev import quora
token = quora.Account.create(proxy=None, logging=True)
print('token', token)
sleep(2)
for response in quora.StreamingCompletion.create(model='gpt-3.5-turbo',
prompt='hello world',
token=token):
print(response.completion.choices[0].text, end="", flush=True)
for response in quora.StreamingCompletion.create(model='ChatGPT', prompt='hello world', token=token):
print(response.text, flush=True)

View file

@ -1,17 +1,12 @@
import quora
from openai_rev import quora
token = quora.Account.create(logging=True, enable_bot_creation=True)
model = quora.Model.create(
token=token,
model='gpt-3.5-turbo', # or claude-instant-v1.0
system_prompt='you are ChatGPT a large language model ...'
token=token, model='ChatGPT', system_prompt='you are ChatGPT a large language model ...' # or claude-instant-v1.0
)
print(model.name)
for response in quora.StreamingCompletion.create(
custom_model=model.name,
prompt='hello world',
token=token):
print(response.completion.choices[0].text)
for response in quora.StreamingCompletion.create(custom_model=model.name, prompt='hello world', token=token):
print(response.text)

View file

@ -1,6 +1,4 @@
import sqlchat
for response in sqlchat.StreamCompletion.create(
prompt='write python code to reverse a string',
messages=[]):
for response in sqlchat.StreamCompletion.create(prompt='write python code to reverse a string', messages=[]):
print(response.completion.choices[0].text, end='')

View file

@ -1,6 +1,4 @@
import t3nsor
for response in t3nsor.StreamCompletion.create(
prompt='write python code to reverse a string',
messages=[]):
for response in t3nsor.StreamCompletion.create(prompt='write python code to reverse a string', messages=[]):
print(response.completion.choices[0].text)

24
testing/test_main.py Normal file
View file

@ -0,0 +1,24 @@
from openai_rev import openai_rev, Provider, quora, forefront
# usage You
response = openai_rev.Completion.create(Provider.You, prompt='Write a poem on Lionel Messi')
print(response)
# usage Poe
token = quora.Account.create(logging=False)
response = openai_rev.Completion.create(
Provider.Poe, prompt='Write a poem on Lionel Messi', token=token, model='ChatGPT'
)
print(response)
# usage forefront
token = forefront.Account.create(logging=False)
response = openai_rev.Completion.create(
Provider.ForeFront, prompt='Write a poem on Lionel Messi', model='gpt-4', token=token
)
print(response)
print(f'END')
# usage theb
response = openai_rev.Completion.create(Provider.Theb, prompt='Write a poem on Lionel Messi')
print(response)

View file

@ -4,17 +4,14 @@ import writesonic
# create account (3-4s)
account = writesonic.Account.create(logging=True)
# with loging:
# with loging:
# 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s)
# 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf'
# 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...'
# 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s)
# simple completion
response = writesonic.Completion.create(
api_key=account.key,
prompt='hello world'
)
response = writesonic.Completion.create(api_key=account.key, prompt='hello world')
print(response.completion.choices[0].text) # Hello! How may I assist you today?
@ -24,16 +21,7 @@ response = writesonic.Completion.create(
api_key=account.key,
prompt='what is my name ?',
enable_memory=True,
history_data=[
{
'is_sent': True,
'message': 'my name is Tekky'
},
{
'is_sent': False,
'message': 'hello Tekky'
}
]
history_data=[{'is_sent': True, 'message': 'my name is Tekky'}, {'is_sent': False, 'message': 'hello Tekky'}],
)
print(response.completion.choices[0].text) # Your name is Tekky.
@ -41,9 +29,7 @@ print(response.completion.choices[0].text) # Your name is Tekky.
# enable internet
response = writesonic.Completion.create(
api_key=account.key,
prompt='who won the quatar world cup ?',
enable_google_results=True
api_key=account.key, prompt='who won the quatar world cup ?', enable_google_results=True
)
print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ...

View file

@ -1,4 +1,4 @@
import you
from openai_rev import you
# simple request with links and details
response = you.Completion.create(prompt="hello world", detailed=True, include_links=True)

View file

@ -1,4 +0,0 @@
import theb
for token in theb.Completion.create('hello world'):
print(token, end='', flush=True)

241
venv/bin/Activate.ps1 Normal file
View file

@ -0,0 +1,241 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

66
venv/bin/activate Normal file
View file

@ -0,0 +1,66 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="/Users/reymarrero/Developer/chatgpt/gpt4free/venv"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1="(venv) ${PS1:-}"
export PS1
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi

25
venv/bin/activate.csh Normal file
View file

@ -0,0 +1,25 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/Users/reymarrero/Developer/chatgpt/gpt4free/venv"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = "(venv) $prompt"
endif
alias pydoc python -m pydoc
rehash

64
venv/bin/activate.fish Normal file
View file

@ -0,0 +1,64 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/); you cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
functions -e fish_prompt
set -e _OLD_FISH_PROMPT_OVERRIDE
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
set -e VIRTUAL_ENV
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV "/Users/reymarrero/Developer/chatgpt/gpt4free/venv"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
end

8
venv/bin/f2py Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/f2py3 Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/f2py3.9 Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/jsonschema Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from jsonschema.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/markdown-it Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from markdown_it.cli.parse import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/names Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from names.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/normalizer Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from charset_normalizer.cli.normalizer import cli_detect
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli_detect())

8
venv/bin/pip Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/pip3 Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/pip3.9 Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/plasma_store Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from pyarrow import _plasma_store_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(_plasma_store_entry_point())

8
venv/bin/pygmentize Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from pygments.cmdline import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

1
venv/bin/python Symbolic link
View file

@ -0,0 +1 @@
python3.9

1
venv/bin/python3 Symbolic link
View file

@ -0,0 +1 @@
python3.9

1
venv/bin/python3.9 Symbolic link
View file

@ -0,0 +1 @@
/usr/local/opt/python@3.9/bin/python3.9

8
venv/bin/streamlit Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from streamlit.web.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

16
venv/bin/streamlit.cmd Executable file
View file

@ -0,0 +1,16 @@
rem Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022)
rem
rem Licensed under the Apache License, Version 2.0 (the "License");
rem you may not use this file except in compliance with the License.
rem You may obtain a copy of the License at
rem
rem http://www.apache.org/licenses/LICENSE-2.0
rem
rem Unless required by applicable law or agreed to in writing, software
rem distributed under the License is distributed on an "AS IS" BASIS,
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
rem See the License for the specific language governing permissions and
rem limitations under the License.
@echo OFF
python -m streamlit %*

8
venv/bin/watchmedo Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from watchdog.watchmedo import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/wsdump Executable file
View file

@ -0,0 +1,8 @@
#!/Users/reymarrero/Developer/chatgpt/gpt4free/venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from websocket._wsdump import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -0,0 +1,5 @@
{
"load_extensions": {
"pydeck/extension": true
}
}

View file

@ -0,0 +1,54 @@
GitPython was originally written by Michael Trier.
GitPython 0.2 was partially (re)written by Sebastian Thiel, based on 0.1.6 and git-dulwich.
Contributors are:
-Michael Trier <mtrier _at_ gmail.com>
-Alan Briolat
-Florian Apolloner <florian _at_ apolloner.eu>
-David Aguilar <davvid _at_ gmail.com>
-Jelmer Vernooij <jelmer _at_ samba.org>
-Steve Frécinaux <code _at_ istique.net>
-Kai Lautaportti <kai _at_ lautaportti.fi>
-Paul Sowden <paul _at_ idontsmoke.co.uk>
-Sebastian Thiel <byronimo _at_ gmail.com>
-Jonathan Chu <jonathan.chu _at_ me.com>
-Vincent Driessen <me _at_ nvie.com>
-Phil Elson <pelson _dot_ pub _at_ gmail.com>
-Bernard `Guyzmo` Pratz <guyzmo+gitpython+pub@m0g.net>
-Timothy B. Hartman <tbhartman _at_ gmail.com>
-Konstantin Popov <konstantin.popov.89 _at_ yandex.ru>
-Peter Jones <pjones _at_ redhat.com>
-Anson Mansfield <anson.mansfield _at_ gmail.com>
-Ken Odegard <ken.odegard _at_ gmail.com>
-Alexis Horgix Chotard
-Piotr Babij <piotr.babij _at_ gmail.com>
-Mikuláš Poul <mikulaspoul _at_ gmail.com>
-Charles Bouchard-Légaré <cblegare.atl _at_ ntis.ca>
-Yaroslav Halchenko <debian _at_ onerussian.com>
-Tim Swast <swast _at_ google.com>
-William Luc Ritchie
-David Host <hostdm _at_ outlook.com>
-A. Jesse Jiryu Davis <jesse _at_ emptysquare.net>
-Steven Whitman <ninloot _at_ gmail.com>
-Stefan Stancu <stefan.stancu _at_ gmail.com>
-César Izurieta <cesar _at_ caih.org>
-Arthur Milchior <arthur _at_ milchior.fr>
-Anil Khatri <anil.soccer.khatri _at_ gmail.com>
-JJ Graham <thetwoj _at_ gmail.com>
-Ben Thayer <ben _at_ benthayer.com>
-Dries Kennes <admin _at_ dries007.net>
-Pratik Anurag <panurag247365 _at_ gmail.com>
-Harmon <harmon.public _at_ gmail.com>
-Liam Beguin <liambeguin _at_ gmail.com>
-Ram Rachum <ram _at_ rachum.com>
-Alba Mendez <me _at_ alba.sh>
-Robert Westman <robert _at_ byteflux.io>
-Hugo van Kemenade
-Hiroki Tokunaga <tokusan441 _at_ gmail.com>
-Julien Mauroy <pro.julien.mauroy _at_ gmail.com>
-Patrick Gerard
-Luke Twist <itsluketwist@gmail.com>
-Joseph Hale <me _at_ jhale.dev>
-Santos Gallegos <stsewd _at_ proton.me>
Portions derived from other open source works and are clearly marked.

Some files were not shown because too many files have changed in this diff Show more