From 2e6b523a70bf3ab2a94a470eb238d210c9c1cd7d Mon Sep 17 00:00:00 2001 From: Mishal <91066601+mishalhossin@users.noreply.github.com> Date: Sat, 29 Apr 2023 19:23:12 +0600 Subject: [PATCH 01/12] Update README.md --- gpt4free/quora/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gpt4free/quora/README.md b/gpt4free/quora/README.md index c6eeac3..9c652c5 100644 --- a/gpt4free/quora/README.md +++ b/gpt4free/quora/README.md @@ -55,7 +55,7 @@ print(response.completion.choices[0].text) ### Update Use This For Poe ```python -from quora import Poe +from gpt4free.quora import Poe # available models: ['Sage', 'GPT-4', 'Claude+', 'Claude-instant', 'ChatGPT', 'Dragonfly', 'NeevaAI'] From 3af895a0b625cd237dbc41fc4dc5b650173999cd Mon Sep 17 00:00:00 2001 From: Daniel Shemesh Date: Sat, 29 Apr 2023 17:19:04 +0300 Subject: [PATCH 02/12] Update README.md fixed theb link to file --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0445951..9f42aa8 100644 --- a/README.md +++ b/README.md @@ -91,7 +91,7 @@ Please note the following: | **Copyright** | Copyright information | [![Link to Section](https://img.shields.io/badge/Link-Go%20to%20Section-blue)](#copyright) | - | | **Star History** | Star History | [![Link to Section](https://img.shields.io/badge/Link-Go%20to%20Section-blue)](#star-history) | - | | **Usage Examples** | | | | -| `theb` | Example usage for theb (gpt-3.5) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](openai_rev/theb/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) | +| `theb` | Example usage for theb (gpt-3.5) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](gpt4free/theb/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) | | `forefront` | Example usage for forefront (gpt-4) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](gpt4free/forefront/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) | || | `quora (poe)` | Example usage for quora | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](gpt4free/quora/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) | | `you` | Example usage for you | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](gpt4free/you/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) | From d6819fd6316453f8e09cdca19dbf03f30b6e8a92 Mon Sep 17 00:00:00 2001 From: "t.me/xtekky" <98614666+xtekky@users.noreply.github.com> Date: Sat, 29 Apr 2023 23:48:53 +0100 Subject: [PATCH 03/12] _ --- unfinished/writesonic/README.md | 53 ---------- unfinished/writesonic/__init__.py | 163 ------------------------------ 2 files changed, 216 deletions(-) delete mode 100644 unfinished/writesonic/README.md delete mode 100644 unfinished/writesonic/__init__.py diff --git a/unfinished/writesonic/README.md b/unfinished/writesonic/README.md deleted file mode 100644 index a658a87..0000000 --- a/unfinished/writesonic/README.md +++ /dev/null @@ -1,53 +0,0 @@ -### Example: `writesonic` (use like openai pypi package) - -```python -# import writesonic -import writesonic - -# create account (3-4s) -account = writesonic.Account.create(logging = True) - -# with loging: - # 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s) - # 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf' - # 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...' - # 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s) - -# simple completion -response = writesonic.Completion.create( - api_key = account.key, - prompt = 'hello world' -) - -print(response.completion.choices[0].text) # Hello! How may I assist you today? - -# conversation - -response = writesonic.Completion.create( - api_key = account.key, - prompt = 'what is my name ?', - enable_memory = True, - history_data = [ - { - 'is_sent': True, - 'message': 'my name is Tekky' - }, - { - 'is_sent': False, - 'message': 'hello Tekky' - } - ] -) - -print(response.completion.choices[0].text) # Your name is Tekky. - -# enable internet - -response = writesonic.Completion.create( - api_key = account.key, - prompt = 'who won the quatar world cup ?', - enable_google_results = True -) - -print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ... -``` \ No newline at end of file diff --git a/unfinished/writesonic/__init__.py b/unfinished/writesonic/__init__.py deleted file mode 100644 index ce68491..0000000 --- a/unfinished/writesonic/__init__.py +++ /dev/null @@ -1,163 +0,0 @@ -from random import choice -from time import time - -from colorama import Fore, init; -from names import get_first_name, get_last_name -from requests import Session -from requests import post - -init() - - -class logger: - @staticmethod - def info(string) -> print: - import datetime - now = datetime.datetime.now() - return print( - f"{Fore.CYAN}{now.strftime('%Y-%m-%d %H:%M:%S')} {Fore.BLUE}INFO {Fore.MAGENTA}__main__ -> {Fore.RESET}{string}") - - -class SonicResponse: - class Completion: - class Choices: - def __init__(self, choice: dict) -> None: - self.text = choice['text'] - self.content = self.text.encode() - self.index = choice['index'] - self.logprobs = choice['logprobs'] - self.finish_reason = choice['finish_reason'] - - def __repr__(self) -> str: - return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>''' - - def __init__(self, choices: dict) -> None: - self.choices = [self.Choices(choice) for choice in choices] - - class Usage: - def __init__(self, usage_dict: dict) -> None: - self.prompt_tokens = usage_dict['prompt_chars'] - self.completion_tokens = usage_dict['completion_chars'] - self.total_tokens = usage_dict['total_chars'] - - def __repr__(self): - return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>''' - - def __init__(self, response_dict: dict) -> None: - self.response_dict = response_dict - self.id = response_dict['id'] - self.object = response_dict['object'] - self.created = response_dict['created'] - self.model = response_dict['model'] - self.completion = self.Completion(response_dict['choices']) - self.usage = self.Usage(response_dict['usage']) - - def json(self) -> dict: - return self.response_dict - - -class Account: - session = Session() - session.headers = { - "connection": "keep-alive", - "sec-ch-ua": "\"Not_A Brand\";v=\"99\", \"Google Chrome\";v=\"109\", \"Chromium\";v=\"109\"", - "accept": "application/json, text/plain, */*", - "content-type": "application/json", - "sec-ch-ua-mobile": "?0", - "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36", - "sec-ch-ua-platform": "\"Windows\"", - "sec-fetch-site": "same-origin", - "sec-fetch-mode": "cors", - "sec-fetch-dest": "empty", - # "accept-encoding" : "gzip, deflate, br", - "accept-language": "en-GB,en-US;q=0.9,en;q=0.8", - "cookie": "" - } - - @staticmethod - def get_user(): - password = f'0opsYouGoTme@1234' - f_name = get_first_name() - l_name = get_last_name() - hosts = ['gmail.com', 'protonmail.com', 'proton.me', 'outlook.com'] - - return { - "email": f"{f_name.lower()}.{l_name.lower()}@{choice(hosts)}", - "password": password, - "confirm_password": password, - "full_name": f'{f_name} {l_name}' - } - - @staticmethod - def create(logging: bool = False): - while True: - try: - user = Account.get_user() - start = time() - response = Account.session.post("https://app.writesonic.com/api/session-login", json=user | { - "utmParams": "{}", - "visitorId": "0", - "locale": "en", - "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36", - "signInWith": "password", - "request_type": "signup", - }) - - if logging: - logger.info(f"\x1b[31mregister success\x1b[0m : '{response.text[:30]}...' ({int(time() - start)}s)") - logger.info(f"\x1b[31mid\x1b[0m : '{response.json()['id']}'") - logger.info(f"\x1b[31mtoken\x1b[0m : '{response.json()['token'][:30]}...'") - - start = time() - response = Account.session.post("https://api.writesonic.com/v1/business/set-business-active", - headers={"authorization": "Bearer " + response.json()['token']}) - key = response.json()["business"]["api_key"] - if logging: logger.info(f"\x1b[31mgot key\x1b[0m : '{key}' ({int(time() - start)}s)") - - return Account.AccountResponse(user['email'], user['password'], key) - - except Exception as e: - if logging: logger.info(f"\x1b[31merror\x1b[0m : '{e}'") - continue - - class AccountResponse: - def __init__(self, email, password, key): - self.email = email - self.password = password - self.key = key - - -class Completion: - def create( - api_key: str, - prompt: str, - enable_memory: bool = False, - enable_google_results: bool = False, - history_data: list = []) -> SonicResponse: - response = post('https://api.writesonic.com/v2/business/content/chatsonic?engine=premium', - headers={"X-API-KEY": api_key}, - json={ - "enable_memory": enable_memory, - "enable_google_results": enable_google_results, - "input_text": prompt, - "history_data": history_data}).json() - - return SonicResponse({ - 'id': f'cmpl-premium-{int(time())}', - 'object': 'text_completion', - 'created': int(time()), - 'model': 'premium', - - 'choices': [{ - 'text': response['message'], - 'index': 0, - 'logprobs': None, - 'finish_reason': 'stop' - }], - - 'usage': { - 'prompt_chars': len(prompt), - 'completion_chars': len(response['message']), - 'total_chars': len(prompt) + len(response['message']) - } - }) From 55990be732f159dd669ac53d478f8c8b94138702 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=93=20sanz?= Date: Sun, 30 Apr 2023 09:42:57 +0800 Subject: [PATCH 04/12] fixed: bug fix on GUI the known bug has been fixed when trying to load a conversation and etc, the bug on the README --- gui/streamlit_chat_app.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/gui/streamlit_chat_app.py b/gui/streamlit_chat_app.py index 6801122..cd8c652 100644 --- a/gui/streamlit_chat_app.py +++ b/gui/streamlit_chat_app.py @@ -24,9 +24,9 @@ def load_conversations(): def save_conversations(conversations, current_conversation): updated = False - for i, conversation in enumerate(conversations): + for idx, conversation in enumerate(conversations): if conversation == current_conversation: - conversations[i] = current_conversation + conversations[idx] = current_conversation updated = True break if not updated: @@ -71,19 +71,20 @@ if 'current_conversation' not in st.session_state or st.session_state['current_c input_placeholder = st.empty() user_input = input_placeholder.text_input( - 'You:', key=f'input_text_{len(st.session_state["current_conversation"]["user_inputs"])}' + 'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}' ) submit_button = st.button("Submit") -if user_input or submit_button: +if (user_input and user_input != st.session_state['input_text']) or submit_button: output = query(user_input, st.session_state['query_method']) escaped_output = output.encode('utf-8').decode('unicode-escape') st.session_state.current_conversation['user_inputs'].append(user_input) st.session_state.current_conversation['generated_responses'].append(escaped_output) save_conversations(st.session_state.conversations, st.session_state.current_conversation) + st.session_state['input_text'] = '' user_input = input_placeholder.text_input( - 'You:', value='', key=f'input_text_{len(st.session_state["current_conversation"]["user_inputs"])}' + 'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}' ) # Clear the input field # Add a button to create a new conversation @@ -97,10 +98,10 @@ st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=a # Sidebar st.sidebar.header("Conversation History") -for i, conversation in enumerate(st.session_state.conversations): - if st.sidebar.button(f"Conversation {i + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{i}"): - st.session_state['selected_conversation'] = i - st.session_state['current_conversation'] = st.session_state.conversations[i] +for idx, conversation in enumerate(st.session_state.conversations): + if st.sidebar.button(f"Conversation {idx + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{idx}"): + st.session_state['selected_conversation'] = idx + st.session_state['current_conversation'] = st.session_state.conversations[idx] if st.session_state['selected_conversation'] is not None: conversation_to_display = st.session_state.conversations[st.session_state['selected_conversation']] From dece50601f962f392c2a2697721d10bc8da5cc5c Mon Sep 17 00:00:00 2001 From: Grant Hur Date: Sat, 29 Apr 2023 20:16:02 -0700 Subject: [PATCH 05/12] Update README.md --- gpt4free/README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/gpt4free/README.md b/gpt4free/README.md index 23f8178..f3ba27a 100644 --- a/gpt4free/README.md +++ b/gpt4free/README.md @@ -19,7 +19,6 @@ pip install gpt4free ```python import gpt4free -import gpt4free from gpt4free import Provider, quora, forefront # usage You From a86ddffb9ff61e651f372f38028c9a0abad39317 Mon Sep 17 00:00:00 2001 From: taiyi747 <63543716+taiyi747@users.noreply.github.com> Date: Sun, 30 Apr 2023 11:19:14 +0800 Subject: [PATCH 06/12] backup options to Emailnator Emailnator seems to be a bit unstable, and there may be emails already in the original mailbox. This code is completely modeled after the original Mail.py. --- gpt4free/quora/backup-mail.py | 37 +++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 gpt4free/quora/backup-mail.py diff --git a/gpt4free/quora/backup-mail.py b/gpt4free/quora/backup-mail.py new file mode 100644 index 0000000..0a2a5e9 --- /dev/null +++ b/gpt4free/quora/backup-mail.py @@ -0,0 +1,37 @@ +from requests import Session +from time import sleep +from json import loads +from re import findall +class Mail: + def __init__(self) -> None: + self.client = Session() + self.client.post("https://etempmail.com/") + self.cookies = {'acceptcookie': 'true'} + self.cookies["ci_session"] = self.client.cookies.get_dict()["ci_session"] + self.email = None + def get_mail(self): + respone=self.client.post("https://etempmail.com/getEmailAddress") + #cookies + self.cookies["lisansimo"] = eval(respone.text)["recover_key"] + self.email = eval(respone.text)["address"] + return self.email + def get_message(self): + print("Waiting for message...") + while True: + sleep(5) + respone=self.client.post("https://etempmail.com/getInbox") + mail_token=loads(respone.text) + print(self.client.cookies.get_dict()) + if len(mail_token) == 1: + break + + params = {'id': '1',} + self.mail_context = self.client.post("https://etempmail.com/getInbox",params=params) + self.mail_context = eval(self.mail_context.text)[0]["body"] + return self.mail_context + #,cookies=self.cookies + def get_verification_code(self): + message = self.mail_context + code = findall(r';">(\d{6,7})', message)[0] + print(f"Verification code: {code}") + return code \ No newline at end of file From 70acea30a58c55288c1d31c630801e9498b24114 Mon Sep 17 00:00:00 2001 From: Civitasv Date: Sun, 30 Apr 2023 13:19:46 +0800 Subject: [PATCH 07/12] Feat: add proxy settings in GUI and library. --- gpt4free/forefront/__init__.py | 6 ++++++ gpt4free/quora/__init__.py | 8 +++++++- gpt4free/theb/__init__.py | 11 +++++++---- gpt4free/you/__init__.py | 4 ++++ gui/query_methods.py | 29 +++++++++++++++-------------- gui/streamlit_chat_app.py | 5 ++++- 6 files changed, 43 insertions(+), 20 deletions(-) diff --git a/gpt4free/forefront/__init__.py b/gpt4free/forefront/__init__.py index f0ca1a1..aa78cfa 100644 --- a/gpt4free/forefront/__init__.py +++ b/gpt4free/forefront/__init__.py @@ -98,12 +98,15 @@ class StreamingCompletion: action_type='new', default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default model='gpt-4', + proxy=None ) -> Generator[ForeFrontResponse, None, None]: if not token: raise Exception('Token is required!') if not chat_id: chat_id = str(uuid4()) + proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else None + headers = { 'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com', 'accept': '*/*', @@ -135,6 +138,7 @@ class StreamingCompletion: for chunk in post( 'https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat', headers=headers, + proxies=proxies, json=json_data, stream=True, ).iter_lines(): @@ -169,6 +173,7 @@ class Completion: action_type='new', default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default model='gpt-4', + proxy=None ) -> ForeFrontResponse: text = '' final_response = None @@ -179,6 +184,7 @@ class Completion: action_type=action_type, default_persona=default_persona, model=model, + proxy=proxy ): if response: final_response = response diff --git a/gpt4free/quora/__init__.py b/gpt4free/quora/__init__.py index f548ff4..afbfb68 100644 --- a/gpt4free/quora/__init__.py +++ b/gpt4free/quora/__init__.py @@ -187,7 +187,7 @@ class Account: enable_bot_creation: bool = False, ): client = TLS(client_identifier='chrome110') - client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None + client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else {} mail_client = Emailnator() mail_address = mail_client.get_mail() @@ -293,10 +293,13 @@ class StreamingCompletion: custom_model: bool = None, prompt: str = 'hello world', token: str = '', + proxy: Optional[str] = None ) -> Generator[PoeResponse, None, None]: _model = MODELS[model] if not custom_model else custom_model + proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else False client = PoeClient(token) + client.proxy = proxies for chunk in client.send_message(_model, prompt): yield PoeResponse( @@ -330,10 +333,13 @@ class Completion: custom_model: str = None, prompt: str = 'hello world', token: str = '', + proxy: Optional[str] = None ) -> PoeResponse: _model = MODELS[model] if not custom_model else custom_model + proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False client = PoeClient(token) + client.proxy = proxies chunk = None for response in client.send_message(_model, prompt): diff --git a/gpt4free/theb/__init__.py b/gpt4free/theb/__init__.py index 9605387..75a1506 100644 --- a/gpt4free/theb/__init__.py +++ b/gpt4free/theb/__init__.py @@ -2,7 +2,7 @@ from json import loads from queue import Queue, Empty from re import findall from threading import Thread -from typing import Generator +from typing import Generator, Optional from curl_cffi import requests from fake_useragent import UserAgent @@ -19,7 +19,7 @@ class Completion: stream_completed = False @staticmethod - def request(prompt: str): + def request(prompt: str, proxy: Optional[str]=None): headers = { 'authority': 'chatbot.theb.ai', 'content-type': 'application/json', @@ -27,9 +27,12 @@ class Completion: 'user-agent': UserAgent().random, } + proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None + requests.post( 'https://chatbot.theb.ai/api/chat-process', headers=headers, + proxies=proxies, content_callback=Completion.handle_stream_response, json={'prompt': prompt, 'options': {}}, ) @@ -37,8 +40,8 @@ class Completion: Completion.stream_completed = True @staticmethod - def create(prompt: str) -> Generator[str, None, None]: - Thread(target=Completion.request, args=[prompt]).start() + def create(prompt: str, proxy: Optional[str]=None) -> Generator[str, None, None]: + Thread(target=Completion.request, args=[prompt, proxy]).start() while not Completion.stream_completed or not Completion.message_queue.empty(): try: diff --git a/gpt4free/you/__init__.py b/gpt4free/you/__init__.py index 97b4846..d084a84 100644 --- a/gpt4free/you/__init__.py +++ b/gpt4free/you/__init__.py @@ -30,12 +30,16 @@ class Completion: include_links: bool = False, detailed: bool = False, debug: bool = False, + proxy: Optional[str] = None ) -> PoeResponse: if chat is None: chat = [] + proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else {} + client = Session(client_identifier='chrome_108') client.headers = Completion.__get_headers() + client.proxies = proxies response = client.get( f'https://you.com/api/streamingSearch', diff --git a/gui/query_methods.py b/gui/query_methods.py index 6225453..2d6adac 100644 --- a/gui/query_methods.py +++ b/gui/query_methods.py @@ -1,5 +1,6 @@ import os import sys +from typing import Optional sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir)) @@ -7,14 +8,14 @@ from gpt4free import quora, forefront, theb, you import random -def query_forefront(question: str) -> str: +def query_forefront(question: str, proxy: Optional[str] = None) -> str: # create an account - token = forefront.Account.create(logging=False) + token = forefront.Account.create(logging=False, proxy=proxy) response = "" # get a response try: - return forefront.Completion.create(token=token, prompt='hello world', model='gpt-4').text + return forefront.Completion.create(token=token, prompt='hello world', model='gpt-4', proxy=proxy).text except Exception as e: # Return error message if an exception occurs return ( @@ -22,16 +23,16 @@ def query_forefront(question: str) -> str: ) -def query_quora(question: str) -> str: - token = quora.Account.create(logging=False, enable_bot_creation=True) - return quora.Completion.create(model='gpt-4', prompt=question, token=token).text +def query_quora(question: str, proxy: Optional[str] = None) -> str: + token = quora.Account.create(logging=False, enable_bot_creation=True, proxy=proxy) + return quora.Completion.create(model='gpt-4', prompt=question, token=token, proxy=proxy).text -def query_theb(question: str) -> str: +def query_theb(question: str, proxy: Optional[str] = None) -> str: # Set cloudflare clearance cookie and get answer from GPT-4 model response = "" try: - return ''.join(theb.Completion.create(prompt=question)) + return ''.join(theb.Completion.create(prompt=question, proxy=proxy)) except Exception as e: # Return error message if an exception occurs @@ -40,11 +41,11 @@ def query_theb(question: str) -> str: ) -def query_you(question: str) -> str: +def query_you(question: str, proxy: Optional[str] = None) -> str: # Set cloudflare clearance cookie and get answer from GPT-4 model try: - result = you.Completion.create(prompt=question) - return result["response"] + result = you.Completion.create(prompt=question, proxy=proxy) + return result.text except Exception as e: # Return error message if an exception occurs @@ -66,11 +67,11 @@ avail_query_methods = { } -def query(user_input: str, selected_method: str = "Random") -> str: +def query(user_input: str, selected_method: str = "Random", proxy: Optional[str] = None) -> str: # If a specific query method is selected (not "Random") and the method is in the dictionary, try to call it if selected_method != "Random" and selected_method in avail_query_methods: try: - return avail_query_methods[selected_method](user_input) + return avail_query_methods[selected_method](user_input, proxy=proxy) except Exception as e: print(f"Error with {selected_method}: {e}") return "😵 Sorry, some error occurred please try again." @@ -89,7 +90,7 @@ def query(user_input: str, selected_method: str = "Random") -> str: chosen_query_name = [k for k, v in avail_query_methods.items() if v == chosen_query][0] try: # Try to call the chosen method with the user input - result = chosen_query(user_input) + result = chosen_query(user_input, proxy=proxy) success = True except Exception as e: print(f"Error with {chosen_query_name}: {e}") diff --git a/gui/streamlit_chat_app.py b/gui/streamlit_chat_app.py index 6801122..3d18333 100644 --- a/gui/streamlit_chat_app.py +++ b/gui/streamlit_chat_app.py @@ -76,7 +76,7 @@ user_input = input_placeholder.text_input( submit_button = st.button("Submit") if user_input or submit_button: - output = query(user_input, st.session_state['query_method']) + output = query(user_input, st.session_state['query_method'], st.session_state['proxy']) escaped_output = output.encode('utf-8').decode('unicode-escape') st.session_state.current_conversation['user_inputs'].append(user_input) @@ -94,6 +94,9 @@ if st.sidebar.button("New Conversation"): st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=avail_query_methods, index=0) +# Proxy +st.session_state['proxy'] = st.sidebar.text_input("Proxy: ") + # Sidebar st.sidebar.header("Conversation History") From 0953981b53fabd11bea74702696470bafcae20f4 Mon Sep 17 00:00:00 2001 From: ezerinz Date: Sun, 30 Apr 2023 14:26:02 +0800 Subject: [PATCH 08/12] usesless test --- testing/usesless_test.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 testing/usesless_test.py diff --git a/testing/usesless_test.py b/testing/usesless_test.py new file mode 100644 index 0000000..e2e3554 --- /dev/null +++ b/testing/usesless_test.py @@ -0,0 +1,13 @@ +import usesless + +question1 = "Who won the world series in 2020?" +req = usesless.Completion.create(prompt=question1) +answer = req["text"] +message_id = req["parentMessageId"] + +question2 = "Where was it played?" +req2 = usesless.Completion.create(prompt=question2, parentMessageId=message_id) +answer2 = req2["text"] + +print(answer) +print(answer2) From 5bbd0f5a6b270379a15b16cb6844638546123f71 Mon Sep 17 00:00:00 2001 From: ezerinz Date: Sun, 30 Apr 2023 14:26:56 +0800 Subject: [PATCH 09/12] ai.usesless.com --- unfinished/usesless/README.md | 24 ++++++++ unfinished/usesless/__init__.py | 52 ++++++++++++++++++ .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 1768 bytes 3 files changed, 76 insertions(+) create mode 100644 unfinished/usesless/README.md create mode 100644 unfinished/usesless/__init__.py create mode 100644 unfinished/usesless/__pycache__/__init__.cpython-310.pyc diff --git a/unfinished/usesless/README.md b/unfinished/usesless/README.md new file mode 100644 index 0000000..4ff6d94 --- /dev/null +++ b/unfinished/usesless/README.md @@ -0,0 +1,24 @@ +ai.usesless.com + +to do: + +- use random user agent in header +- make the code better I guess (?) + +### Example: `usesless` + +```python +import usesless + +question1 = "Who won the world series in 2020?" +req = usesless.Completion.create(prompt=question1) +answer = req["text"] +message_id = req["parentMessageId"] + +question2 = "Where was it played?" +req2 = usesless.Completion.create(prompt=question2, parentMessageId=message_id) +answer2 = req2["text"] + +print(answer) +print(answer2) +``` diff --git a/unfinished/usesless/__init__.py b/unfinished/usesless/__init__.py new file mode 100644 index 0000000..28207aa --- /dev/null +++ b/unfinished/usesless/__init__.py @@ -0,0 +1,52 @@ +from curl_cffi.requests import headers +import requests +import json + + +class Completion: + headers = { + "authority": "ai.usesless.com", + "accept": "application/json, text/plain, */*", + "accept-language": "en-US,en;q=0.5", + "cache-control": "no-cache", + "sec-fetch-dest": "empty", + "sec-fetch-mode": "cors", + "sec-fetch-site": "same-origin", + "user-agent": "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/112.0", + } + + @staticmethod + def create( + systemMessage: str = "You are a helpful assistant", + prompt: str = "", + parentMessageId: str = "", + presence_penalty: float = 1, + temperature: float = 1, + model: str = "gpt-3.5-turbo", + ): + json_data = { + "openaiKey": "", + "prompt": prompt, + "options": { + "parentMessageId": parentMessageId, + "systemMessage": systemMessage, + "completionParams": { + "presence_penalty": presence_penalty, + "temperature": temperature, + "model": model, + }, + }, + } + + url = "https://ai.usesless.com/api/chat-process" + request = requests.post(url, headers=Completion.headers, json=json_data) + content = request.content + response = Completion.__response_to_json(content) + return response + + @classmethod + def __response_to_json(cls, text) -> dict: + text = str(text.decode("utf-8")) + split_text = text.rsplit("\n", 1)[1] + to_json = json.loads(split_text) + return to_json diff --git a/unfinished/usesless/__pycache__/__init__.cpython-310.pyc b/unfinished/usesless/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..12d76fac8942d958abacc529aef19a3804f1eea7 GIT binary patch literal 1768 zcmZWpTW{P%6!wfS>%HaH6k1Br^46`g>uhPN(g2E7lnOy3qzVcAVhuC1SqHD}%#2ZD zd3i}*`3HGO9{Ef2%2WRV3IsUgP3Y2&eU4|&c+T}ZlXbf-3&yFl$A0lF>vywQ95yU= zfaDMuvlvRODGCuXbDZE1*IzrdUs%j$&Z)(m6C64Wzq0!7YdB-|k#0;lVO+>PY6Od8 z2^Kp*asrH7Az~J&L<}2c%w-;HFrPKg{Lto3v(;?F(}AbUK444SWy?S-K&#wiYe2WS zJMOS`cKg&lL1BaY>^2bo=o0&EZz*r`doKI^o*o(_9^g z4$Xut+mcNAAQSO^oR+&UvocN+5^N1Odk?-JjkbH=#A$KVJ9_#gdh&R?Cw_i5+Wd65 z+3&sJ^rtL%v^m;@chuXB1s`We!D#ddcKxtP3N-<(V8^>>hvbiW|`yzJKQ z3}}`rktOAl1Jz>oBZP9^`=o)6w2?AqM zG=YUH0FP_fd`kqG%Dx>oGvj*vH9r=nmBL1r8{o+D!9=NCJ_~}k@D-3e4(Noa0c3?j zhWc1wIH?_jwZ5l)!4C^4k_??ZlS-H#od3}<<%9(4`_UT!M-k*O2a7yXSybz2*Y4K- z%)S4AOkvaf6)~bnXcr<08`l=as)+#X&-MLgiCZAJ+zBSxln1=z0&pCBnbBejUdRB7 z{&*}n4~ldgr?H%H7TmN_5JezTQ8dhtwMPXfim!n-`4KSO!U*GQLe7TYsf<_ zxF01v)`9z{NQ>H^~LiU2BGweeP5k zDsERcRNUDs-9n8APc>?Z2AuA@+JqxvrVVPx#fmX$N-^(;+Ko5NIHdj0=8=D4uB1A{9m_{PTC@KJ D*)Y^6 literal 0 HcmV?d00001 From 76e15efe756d5c5eef506cd9452da03b2ef09c9e Mon Sep 17 00:00:00 2001 From: ezerinz Date: Sun, 30 Apr 2023 14:35:53 +0800 Subject: [PATCH 10/12] ai.usesless.com --- .../__pycache__/__init__.cpython-310.pyc | Bin 1768 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 unfinished/usesless/__pycache__/__init__.cpython-310.pyc diff --git a/unfinished/usesless/__pycache__/__init__.cpython-310.pyc b/unfinished/usesless/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 12d76fac8942d958abacc529aef19a3804f1eea7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1768 zcmZWpTW{P%6!wfS>%HaH6k1Br^46`g>uhPN(g2E7lnOy3qzVcAVhuC1SqHD}%#2ZD zd3i}*`3HGO9{Ef2%2WRV3IsUgP3Y2&eU4|&c+T}ZlXbf-3&yFl$A0lF>vywQ95yU= zfaDMuvlvRODGCuXbDZE1*IzrdUs%j$&Z)(m6C64Wzq0!7YdB-|k#0;lVO+>PY6Od8 z2^Kp*asrH7Az~J&L<}2c%w-;HFrPKg{Lto3v(;?F(}AbUK444SWy?S-K&#wiYe2WS zJMOS`cKg&lL1BaY>^2bo=o0&EZz*r`doKI^o*o(_9^g z4$Xut+mcNAAQSO^oR+&UvocN+5^N1Odk?-JjkbH=#A$KVJ9_#gdh&R?Cw_i5+Wd65 z+3&sJ^rtL%v^m;@chuXB1s`We!D#ddcKxtP3N-<(V8^>>hvbiW|`yzJKQ z3}}`rktOAl1Jz>oBZP9^`=o)6w2?AqM zG=YUH0FP_fd`kqG%Dx>oGvj*vH9r=nmBL1r8{o+D!9=NCJ_~}k@D-3e4(Noa0c3?j zhWc1wIH?_jwZ5l)!4C^4k_??ZlS-H#od3}<<%9(4`_UT!M-k*O2a7yXSybz2*Y4K- z%)S4AOkvaf6)~bnXcr<08`l=as)+#X&-MLgiCZAJ+zBSxln1=z0&pCBnbBejUdRB7 z{&*}n4~ldgr?H%H7TmN_5JezTQ8dhtwMPXfim!n-`4KSO!U*GQLe7TYsf<_ zxF01v)`9z{NQ>H^~LiU2BGweeP5k zDsERcRNUDs-9n8APc>?Z2AuA@+JqxvrVVPx#fmX$N-^(;+Ko5NIHdj0=8=D4uB1A{9m_{PTC@KJ D*)Y^6 From 8a76728343f04caf73346f04a94ecfbee8f35faf Mon Sep 17 00:00:00 2001 From: ezerinz Date: Sun, 30 Apr 2023 14:58:46 +0800 Subject: [PATCH 11/12] delete module that is not accessed --- unfinished/usesless/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/unfinished/usesless/__init__.py b/unfinished/usesless/__init__.py index 28207aa..6f9a47e 100644 --- a/unfinished/usesless/__init__.py +++ b/unfinished/usesless/__init__.py @@ -1,4 +1,3 @@ -from curl_cffi.requests import headers import requests import json From 3b13c3df921379bd7ffd8f605c723e82ebf26a8f Mon Sep 17 00:00:00 2001 From: ezerinz Date: Sun, 30 Apr 2023 15:05:51 +0800 Subject: [PATCH 12/12] Update example --- unfinished/usesless/README.md | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/unfinished/usesless/README.md b/unfinished/usesless/README.md index 4ff6d94..13e9df8 100644 --- a/unfinished/usesless/README.md +++ b/unfinished/usesless/README.md @@ -10,15 +10,14 @@ to do: ```python import usesless -question1 = "Who won the world series in 2020?" -req = usesless.Completion.create(prompt=question1) -answer = req["text"] -message_id = req["parentMessageId"] +message_id = "" +while True: + prompt = input("Question: ") + if prompt == "!stop": + break -question2 = "Where was it played?" -req2 = usesless.Completion.create(prompt=question2, parentMessageId=message_id) -answer2 = req2["text"] + req = usesless.Completion.create(prompt=prompt, parentMessageId=message_id) -print(answer) -print(answer2) + print(f"Answer: {req['text']}") + message_id = req["id"] ```