1
0
Fork 0

Merge pull request #306 from Civitasv/feat-proxy-setting

Feat: add proxy settings in GUI and library.
This commit is contained in:
t.me/xtekky 2023-04-30 12:12:54 +01:00 committed by GitHub
commit ea9ff431d2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 43 additions and 20 deletions

View file

@ -98,12 +98,15 @@ class StreamingCompletion:
action_type='new', action_type='new',
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4', model='gpt-4',
proxy=None
) -> Generator[ForeFrontResponse, None, None]: ) -> Generator[ForeFrontResponse, None, None]:
if not token: if not token:
raise Exception('Token is required!') raise Exception('Token is required!')
if not chat_id: if not chat_id:
chat_id = str(uuid4()) chat_id = str(uuid4())
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else None
headers = { headers = {
'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com', 'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
'accept': '*/*', 'accept': '*/*',
@ -135,6 +138,7 @@ class StreamingCompletion:
for chunk in post( for chunk in post(
'https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat', 'https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
headers=headers, headers=headers,
proxies=proxies,
json=json_data, json=json_data,
stream=True, stream=True,
).iter_lines(): ).iter_lines():
@ -169,6 +173,7 @@ class Completion:
action_type='new', action_type='new',
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4', model='gpt-4',
proxy=None
) -> ForeFrontResponse: ) -> ForeFrontResponse:
text = '' text = ''
final_response = None final_response = None
@ -179,6 +184,7 @@ class Completion:
action_type=action_type, action_type=action_type,
default_persona=default_persona, default_persona=default_persona,
model=model, model=model,
proxy=proxy
): ):
if response: if response:
final_response = response final_response = response

View file

@ -187,7 +187,7 @@ class Account:
enable_bot_creation: bool = False, enable_bot_creation: bool = False,
): ):
client = TLS(client_identifier='chrome110') client = TLS(client_identifier='chrome110')
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else {}
mail_client = Emailnator() mail_client = Emailnator()
mail_address = mail_client.get_mail() mail_address = mail_client.get_mail()
@ -293,10 +293,13 @@ class StreamingCompletion:
custom_model: bool = None, custom_model: bool = None,
prompt: str = 'hello world', prompt: str = 'hello world',
token: str = '', token: str = '',
proxy: Optional[str] = None
) -> Generator[PoeResponse, None, None]: ) -> Generator[PoeResponse, None, None]:
_model = MODELS[model] if not custom_model else custom_model _model = MODELS[model] if not custom_model else custom_model
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else False
client = PoeClient(token) client = PoeClient(token)
client.proxy = proxies
for chunk in client.send_message(_model, prompt): for chunk in client.send_message(_model, prompt):
yield PoeResponse( yield PoeResponse(
@ -330,10 +333,13 @@ class Completion:
custom_model: str = None, custom_model: str = None,
prompt: str = 'hello world', prompt: str = 'hello world',
token: str = '', token: str = '',
proxy: Optional[str] = None
) -> PoeResponse: ) -> PoeResponse:
_model = MODELS[model] if not custom_model else custom_model _model = MODELS[model] if not custom_model else custom_model
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
client = PoeClient(token) client = PoeClient(token)
client.proxy = proxies
chunk = None chunk = None
for response in client.send_message(_model, prompt): for response in client.send_message(_model, prompt):

View file

@ -2,7 +2,7 @@ from json import loads
from queue import Queue, Empty from queue import Queue, Empty
from re import findall from re import findall
from threading import Thread from threading import Thread
from typing import Generator from typing import Generator, Optional
from curl_cffi import requests from curl_cffi import requests
from fake_useragent import UserAgent from fake_useragent import UserAgent
@ -19,7 +19,7 @@ class Completion:
stream_completed = False stream_completed = False
@staticmethod @staticmethod
def request(prompt: str): def request(prompt: str, proxy: Optional[str]=None):
headers = { headers = {
'authority': 'chatbot.theb.ai', 'authority': 'chatbot.theb.ai',
'content-type': 'application/json', 'content-type': 'application/json',
@ -27,9 +27,12 @@ class Completion:
'user-agent': UserAgent().random, 'user-agent': UserAgent().random,
} }
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
requests.post( requests.post(
'https://chatbot.theb.ai/api/chat-process', 'https://chatbot.theb.ai/api/chat-process',
headers=headers, headers=headers,
proxies=proxies,
content_callback=Completion.handle_stream_response, content_callback=Completion.handle_stream_response,
json={'prompt': prompt, 'options': {}}, json={'prompt': prompt, 'options': {}},
) )
@ -37,8 +40,8 @@ class Completion:
Completion.stream_completed = True Completion.stream_completed = True
@staticmethod @staticmethod
def create(prompt: str) -> Generator[str, None, None]: def create(prompt: str, proxy: Optional[str]=None) -> Generator[str, None, None]:
Thread(target=Completion.request, args=[prompt]).start() Thread(target=Completion.request, args=[prompt, proxy]).start()
while not Completion.stream_completed or not Completion.message_queue.empty(): while not Completion.stream_completed or not Completion.message_queue.empty():
try: try:

View file

@ -30,12 +30,16 @@ class Completion:
include_links: bool = False, include_links: bool = False,
detailed: bool = False, detailed: bool = False,
debug: bool = False, debug: bool = False,
proxy: Optional[str] = None
) -> PoeResponse: ) -> PoeResponse:
if chat is None: if chat is None:
chat = [] chat = []
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else {}
client = Session(client_identifier='chrome_108') client = Session(client_identifier='chrome_108')
client.headers = Completion.__get_headers() client.headers = Completion.__get_headers()
client.proxies = proxies
response = client.get( response = client.get(
f'https://you.com/api/streamingSearch', f'https://you.com/api/streamingSearch',

View file

@ -1,5 +1,6 @@
import os import os
import sys import sys
from typing import Optional
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir)) sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
@ -7,14 +8,14 @@ from gpt4free import quora, forefront, theb, you
import random import random
def query_forefront(question: str) -> str: def query_forefront(question: str, proxy: Optional[str] = None) -> str:
# create an account # create an account
token = forefront.Account.create(logging=False) token = forefront.Account.create(logging=False, proxy=proxy)
response = "" response = ""
# get a response # get a response
try: try:
return forefront.Completion.create(token=token, prompt='hello world', model='gpt-4').text return forefront.Completion.create(token=token, prompt='hello world', model='gpt-4', proxy=proxy).text
except Exception as e: except Exception as e:
# Return error message if an exception occurs # Return error message if an exception occurs
return ( return (
@ -22,16 +23,16 @@ def query_forefront(question: str) -> str:
) )
def query_quora(question: str) -> str: def query_quora(question: str, proxy: Optional[str] = None) -> str:
token = quora.Account.create(logging=False, enable_bot_creation=True) token = quora.Account.create(logging=False, enable_bot_creation=True, proxy=proxy)
return quora.Completion.create(model='gpt-4', prompt=question, token=token).text return quora.Completion.create(model='gpt-4', prompt=question, token=token, proxy=proxy).text
def query_theb(question: str) -> str: def query_theb(question: str, proxy: Optional[str] = None) -> str:
# Set cloudflare clearance cookie and get answer from GPT-4 model # Set cloudflare clearance cookie and get answer from GPT-4 model
response = "" response = ""
try: try:
return ''.join(theb.Completion.create(prompt=question)) return ''.join(theb.Completion.create(prompt=question, proxy=proxy))
except Exception as e: except Exception as e:
# Return error message if an exception occurs # Return error message if an exception occurs
@ -40,11 +41,11 @@ def query_theb(question: str) -> str:
) )
def query_you(question: str) -> str: def query_you(question: str, proxy: Optional[str] = None) -> str:
# Set cloudflare clearance cookie and get answer from GPT-4 model # Set cloudflare clearance cookie and get answer from GPT-4 model
try: try:
result = you.Completion.create(prompt=question) result = you.Completion.create(prompt=question, proxy=proxy)
return result["response"] return result.text
except Exception as e: except Exception as e:
# Return error message if an exception occurs # Return error message if an exception occurs
@ -66,11 +67,11 @@ avail_query_methods = {
} }
def query(user_input: str, selected_method: str = "Random") -> str: def query(user_input: str, selected_method: str = "Random", proxy: Optional[str] = None) -> str:
# If a specific query method is selected (not "Random") and the method is in the dictionary, try to call it # If a specific query method is selected (not "Random") and the method is in the dictionary, try to call it
if selected_method != "Random" and selected_method in avail_query_methods: if selected_method != "Random" and selected_method in avail_query_methods:
try: try:
return avail_query_methods[selected_method](user_input) return avail_query_methods[selected_method](user_input, proxy=proxy)
except Exception as e: except Exception as e:
print(f"Error with {selected_method}: {e}") print(f"Error with {selected_method}: {e}")
return "😵 Sorry, some error occurred please try again." return "😵 Sorry, some error occurred please try again."
@ -89,7 +90,7 @@ def query(user_input: str, selected_method: str = "Random") -> str:
chosen_query_name = [k for k, v in avail_query_methods.items() if v == chosen_query][0] chosen_query_name = [k for k, v in avail_query_methods.items() if v == chosen_query][0]
try: try:
# Try to call the chosen method with the user input # Try to call the chosen method with the user input
result = chosen_query(user_input) result = chosen_query(user_input, proxy=proxy)
success = True success = True
except Exception as e: except Exception as e:
print(f"Error with {chosen_query_name}: {e}") print(f"Error with {chosen_query_name}: {e}")

View file

@ -76,7 +76,7 @@ user_input = input_placeholder.text_input(
submit_button = st.button("Submit") submit_button = st.button("Submit")
if user_input or submit_button: if user_input or submit_button:
output = query(user_input, st.session_state['query_method']) output = query(user_input, st.session_state['query_method'], st.session_state['proxy'])
escaped_output = output.encode('utf-8').decode('unicode-escape') escaped_output = output.encode('utf-8').decode('unicode-escape')
st.session_state.current_conversation['user_inputs'].append(user_input) st.session_state.current_conversation['user_inputs'].append(user_input)
@ -94,6 +94,9 @@ if st.sidebar.button("New Conversation"):
st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=avail_query_methods, index=0) st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=avail_query_methods, index=0)
# Proxy
st.session_state['proxy'] = st.sidebar.text_input("Proxy: ")
# Sidebar # Sidebar
st.sidebar.header("Conversation History") st.sidebar.header("Conversation History")