1
0
Fork 0

Feat: add proxy settings in GUI and library.

This commit is contained in:
Civitasv 2023-04-30 13:19:46 +08:00
parent 9489dda310
commit 70acea30a5
No known key found for this signature in database
GPG Key ID: DF474EA79AEF6CD2
6 changed files with 43 additions and 20 deletions

View File

@ -98,12 +98,15 @@ class StreamingCompletion:
action_type='new',
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4',
proxy=None
) -> Generator[ForeFrontResponse, None, None]:
if not token:
raise Exception('Token is required!')
if not chat_id:
chat_id = str(uuid4())
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else None
headers = {
'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
'accept': '*/*',
@ -135,6 +138,7 @@ class StreamingCompletion:
for chunk in post(
'https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
headers=headers,
proxies=proxies,
json=json_data,
stream=True,
).iter_lines():
@ -169,6 +173,7 @@ class Completion:
action_type='new',
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
model='gpt-4',
proxy=None
) -> ForeFrontResponse:
text = ''
final_response = None
@ -179,6 +184,7 @@ class Completion:
action_type=action_type,
default_persona=default_persona,
model=model,
proxy=proxy
):
if response:
final_response = response

View File

@ -187,7 +187,7 @@ class Account:
enable_bot_creation: bool = False,
):
client = TLS(client_identifier='chrome110')
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else {}
mail_client = Emailnator()
mail_address = mail_client.get_mail()
@ -293,10 +293,13 @@ class StreamingCompletion:
custom_model: bool = None,
prompt: str = 'hello world',
token: str = '',
proxy: Optional[str] = None
) -> Generator[PoeResponse, None, None]:
_model = MODELS[model] if not custom_model else custom_model
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else False
client = PoeClient(token)
client.proxy = proxies
for chunk in client.send_message(_model, prompt):
yield PoeResponse(
@ -330,10 +333,13 @@ class Completion:
custom_model: str = None,
prompt: str = 'hello world',
token: str = '',
proxy: Optional[str] = None
) -> PoeResponse:
_model = MODELS[model] if not custom_model else custom_model
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
client = PoeClient(token)
client.proxy = proxies
chunk = None
for response in client.send_message(_model, prompt):

View File

@ -2,7 +2,7 @@ from json import loads
from queue import Queue, Empty
from re import findall
from threading import Thread
from typing import Generator
from typing import Generator, Optional
from curl_cffi import requests
from fake_useragent import UserAgent
@ -19,7 +19,7 @@ class Completion:
stream_completed = False
@staticmethod
def request(prompt: str):
def request(prompt: str, proxy: Optional[str]=None):
headers = {
'authority': 'chatbot.theb.ai',
'content-type': 'application/json',
@ -27,9 +27,12 @@ class Completion:
'user-agent': UserAgent().random,
}
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
requests.post(
'https://chatbot.theb.ai/api/chat-process',
headers=headers,
proxies=proxies,
content_callback=Completion.handle_stream_response,
json={'prompt': prompt, 'options': {}},
)
@ -37,8 +40,8 @@ class Completion:
Completion.stream_completed = True
@staticmethod
def create(prompt: str) -> Generator[str, None, None]:
Thread(target=Completion.request, args=[prompt]).start()
def create(prompt: str, proxy: Optional[str]=None) -> Generator[str, None, None]:
Thread(target=Completion.request, args=[prompt, proxy]).start()
while not Completion.stream_completed or not Completion.message_queue.empty():
try:

View File

@ -30,12 +30,16 @@ class Completion:
include_links: bool = False,
detailed: bool = False,
debug: bool = False,
proxy: Optional[str] = None
) -> PoeResponse:
if chat is None:
chat = []
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else {}
client = Session(client_identifier='chrome_108')
client.headers = Completion.__get_headers()
client.proxies = proxies
response = client.get(
f'https://you.com/api/streamingSearch',

View File

@ -1,5 +1,6 @@
import os
import sys
from typing import Optional
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
@ -7,14 +8,14 @@ from gpt4free import quora, forefront, theb, you
import random
def query_forefront(question: str) -> str:
def query_forefront(question: str, proxy: Optional[str] = None) -> str:
# create an account
token = forefront.Account.create(logging=False)
token = forefront.Account.create(logging=False, proxy=proxy)
response = ""
# get a response
try:
return forefront.Completion.create(token=token, prompt='hello world', model='gpt-4').text
return forefront.Completion.create(token=token, prompt='hello world', model='gpt-4', proxy=proxy).text
except Exception as e:
# Return error message if an exception occurs
return (
@ -22,16 +23,16 @@ def query_forefront(question: str) -> str:
)
def query_quora(question: str) -> str:
token = quora.Account.create(logging=False, enable_bot_creation=True)
return quora.Completion.create(model='gpt-4', prompt=question, token=token).text
def query_quora(question: str, proxy: Optional[str] = None) -> str:
token = quora.Account.create(logging=False, enable_bot_creation=True, proxy=proxy)
return quora.Completion.create(model='gpt-4', prompt=question, token=token, proxy=proxy).text
def query_theb(question: str) -> str:
def query_theb(question: str, proxy: Optional[str] = None) -> str:
# Set cloudflare clearance cookie and get answer from GPT-4 model
response = ""
try:
return ''.join(theb.Completion.create(prompt=question))
return ''.join(theb.Completion.create(prompt=question, proxy=proxy))
except Exception as e:
# Return error message if an exception occurs
@ -40,11 +41,11 @@ def query_theb(question: str) -> str:
)
def query_you(question: str) -> str:
def query_you(question: str, proxy: Optional[str] = None) -> str:
# Set cloudflare clearance cookie and get answer from GPT-4 model
try:
result = you.Completion.create(prompt=question)
return result["response"]
result = you.Completion.create(prompt=question, proxy=proxy)
return result.text
except Exception as e:
# Return error message if an exception occurs
@ -66,11 +67,11 @@ avail_query_methods = {
}
def query(user_input: str, selected_method: str = "Random") -> str:
def query(user_input: str, selected_method: str = "Random", proxy: Optional[str] = None) -> str:
# If a specific query method is selected (not "Random") and the method is in the dictionary, try to call it
if selected_method != "Random" and selected_method in avail_query_methods:
try:
return avail_query_methods[selected_method](user_input)
return avail_query_methods[selected_method](user_input, proxy=proxy)
except Exception as e:
print(f"Error with {selected_method}: {e}")
return "😵 Sorry, some error occurred please try again."
@ -89,7 +90,7 @@ def query(user_input: str, selected_method: str = "Random") -> str:
chosen_query_name = [k for k, v in avail_query_methods.items() if v == chosen_query][0]
try:
# Try to call the chosen method with the user input
result = chosen_query(user_input)
result = chosen_query(user_input, proxy=proxy)
success = True
except Exception as e:
print(f"Error with {chosen_query_name}: {e}")

View File

@ -76,7 +76,7 @@ user_input = input_placeholder.text_input(
submit_button = st.button("Submit")
if user_input or submit_button:
output = query(user_input, st.session_state['query_method'])
output = query(user_input, st.session_state['query_method'], st.session_state['proxy'])
escaped_output = output.encode('utf-8').decode('unicode-escape')
st.session_state.current_conversation['user_inputs'].append(user_input)
@ -94,6 +94,9 @@ if st.sidebar.button("New Conversation"):
st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=avail_query_methods, index=0)
# Proxy
st.session_state['proxy'] = st.sidebar.text_input("Proxy: ")
# Sidebar
st.sidebar.header("Conversation History")