diff --git a/README.md b/README.md
index 7e2592a..0445951 100644
--- a/README.md
+++ b/README.md
@@ -92,9 +92,9 @@ Please note the following:
| **Star History** | Star History | [![Link to Section](https://img.shields.io/badge/Link-Go%20to%20Section-blue)](#star-history) | - |
| **Usage Examples** | | | |
| `theb` | Example usage for theb (gpt-3.5) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](openai_rev/theb/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |
-| `forefront` | Example usage for forefront (gpt-4) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](./forefront/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) | ||
-| `quora (poe)` | Example usage for quora | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](./quora/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |
-| `you` | Example usage for you | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](./you/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |
+| `forefront` | Example usage for forefront (gpt-4) | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](gpt4free/forefront/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) | ||
+| `quora (poe)` | Example usage for quora | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](gpt4free/quora/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |
+| `you` | Example usage for you | [![Link to File](https://img.shields.io/badge/Link-Go%20to%20File-blue)](gpt4free/you/README.md) | ![Active](https://img.shields.io/badge/Active-brightgreen) |
| **Try it Out** | | | |
| Google Colab Jupyter Notebook | Example usage for gpt4free | [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/DanielShemesh/gpt4free-colab/blob/main/gpt4free.ipynb) | - |
| replit Example (feel free to fork this repl) | Example usage for gpt4free | [![](https://img.shields.io/badge/Open%20in-Replit-1A1E27?logo=replit)](https://replit.com/@gpt4free/gpt4free-webui) | - |
@@ -126,10 +126,10 @@ Please note the following:
## Best sites
#### gpt-4
-- [`/forefront`](./forefront/README.md)
+- [`/forefront`](gpt4free/forefront/README.md)
#### gpt-3.5
-- [`/you`](./you/README.md)
+- [`/you`](gpt4free/you/README.md)
## Install
Download or clone this GitHub repo
diff --git a/cocalc/__init__.py b/cocalc/__init__.py
deleted file mode 100644
index 5a32274..0000000
--- a/cocalc/__init__.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import requests
-
-class Completion:
- @staticmethod
- def create(prompt:str, cookieInput:str) -> str:
- # Initialize a session with custom headers
- session = Completion._initialize_session(cookieInput)
-
- # Set the data that will be submitted
- payload = Completion._create_payload(prompt, ("ASSUME I HAVE FULL ACCESS TO COCALC. "))
-
- # Submit the request and return the results
- return Completion._submit_request(session, payload)
-
- @classmethod
- def _initialize_session(cls, conversationCookie) -> requests.Session:
- """Initialize a session with custom headers for the request."""
-
- session = requests.Session()
- headers = {
- 'Accept': '*/*',
- 'Accept-Language': 'en-US,en;q=0.5',
- 'Origin': 'https://cocalc.com',
- 'Referer': 'https://cocalc.com/api/v2/openai/chatgpt',
- 'Cookie': conversationCookie,
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36',
- }
- session.headers.update(headers)
-
- return session
-
- @classmethod
- def _create_payload(
- cls,
- prompt: str,
- system_prompt: str
- ) -> dict:
-
- return {
- "input": prompt,
- "system": system_prompt,
- "tag": "next:index"
- }
-
- @classmethod
- def _submit_request(
- cls,
- session: requests.Session,
- payload: dict
- ) -> str:
-
- response = session.post(
- "https://cocalc.com/api/v2/openai/chatgpt", json=payload).json()
- return {
- "response":response["output"],
- "success":response["success"]
- }
\ No newline at end of file
diff --git a/forefront/README.md b/forefront/README.md
deleted file mode 100644
index a2be818..0000000
--- a/forefront/README.md
+++ /dev/null
@@ -1,16 +0,0 @@
-### Example: `forefront` (use like openai pypi package)
-
-```python
-import forefront
-
-# create an account
-token = forefront.Account.create(logging=False)
-print(token)
-
-# get a response
-for response in forefront.StreamingCompletion.create(token = token,
- prompt = 'hello world', model='gpt-4'):
-
- print(response.completion.choices[0].text, end = '')
-print("")
-```
diff --git a/forefront/__init__.py b/forefront/__init__.py
deleted file mode 100644
index 25d0544..0000000
--- a/forefront/__init__.py
+++ /dev/null
@@ -1,154 +0,0 @@
-from json import loads
-from re import match
-from time import time, sleep
-from uuid import uuid4
-
-from requests import post
-from tls_client import Session
-
-from forefront.mail import Mail
-from forefront.typing import ForeFrontResponse
-
-
-class Account:
- @staticmethod
- def create(proxy=None, logging=False):
-
- proxies = {
- 'http': 'http://' + proxy,
- 'https': 'http://' + proxy} if proxy else False
-
- start = time()
-
- mail = Mail(proxies)
- mail_token = None
- mail_adress = mail.get_mail()
-
- # print(mail_adress)
-
- client = Session(client_identifier='chrome110')
- client.proxies = proxies
- client.headers = {
- "origin": "https://accounts.forefront.ai",
- "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36",
- }
-
- response = client.post('https://clerk.forefront.ai/v1/client/sign_ups?_clerk_js_version=4.32.6',
- data={
- "email_address": mail_adress
- }
- )
- try:
- trace_token = response.json()['response']['id']
- if logging: print(trace_token)
- except KeyError:
- return 'Failed to create account!'
-
- response = client.post(
- f"https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/prepare_verification?_clerk_js_version=4.32.6",
- data={
- "strategy": "email_code",
- }
- )
-
- if logging: print(response.text)
-
- if not 'sign_up_attempt' in response.text:
- return 'Failed to create account!'
-
- while True:
- sleep(1)
- for _ in mail.fetch_inbox():
- if logging: print(mail.get_message_content(_["id"]))
- mail_token = match(r"(\d){5,6}", mail.get_message_content(_["id"])).group(0)
-
- if mail_token:
- break
-
- if logging: print(mail_token)
-
- response = client.post(
- f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/attempt_verification?_clerk_js_version=4.38.4',
- data={
- 'code': mail_token,
- 'strategy': 'email_code'
- })
-
- if logging: print(response.json())
-
- token = response.json()['client']['sessions'][0]['last_active_token']['jwt']
-
- with open('accounts.txt', 'a') as f:
- f.write(f'{mail_adress}:{token}\n')
-
- if logging: print(time() - start)
-
- return token
-
-
-class StreamingCompletion:
- @staticmethod
- def create(
- token=None,
- chatId=None,
- prompt='',
- actionType='new',
- defaultPersona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
- model='gpt-4') -> ForeFrontResponse:
-
- if not token: raise Exception('Token is required!')
- if not chatId: chatId = str(uuid4())
-
- headers = {
- 'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
- 'accept': '*/*',
- 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
- 'authorization': 'Bearer ' + token,
- 'cache-control': 'no-cache',
- 'content-type': 'application/json',
- 'origin': 'https://chat.forefront.ai',
- 'pragma': 'no-cache',
- 'referer': 'https://chat.forefront.ai/',
- 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
- 'sec-ch-ua-mobile': '?0',
- 'sec-ch-ua-platform': '"macOS"',
- 'sec-fetch-dest': 'empty',
- 'sec-fetch-mode': 'cors',
- 'sec-fetch-site': 'cross-site',
- 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
- }
-
- json_data = {
- 'text': prompt,
- 'action': actionType,
- 'parentId': chatId,
- 'workspaceId': chatId,
- 'messagePersona': defaultPersona,
- 'model': model
- }
-
- for chunk in post('https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
- headers=headers, json=json_data, stream=True).iter_lines():
-
- if b'finish_reason":null' in chunk:
- data = loads(chunk.decode('utf-8').split('data: ')[1])
- token = data['choices'][0]['delta'].get('content')
-
- if token != None:
- yield ForeFrontResponse({
- 'id': chatId,
- 'object': 'text_completion',
- 'created': int(time()),
- 'model': model,
- 'choices': [{
- 'text': token,
- 'index': 0,
- 'logprobs': None,
- 'finish_reason': 'stop'
- }],
- 'usage': {
- 'prompt_tokens': len(prompt),
- 'completion_tokens': len(token),
- 'total_tokens': len(prompt) + len(token)
- }
- })
diff --git a/forefront/typing.py b/forefront/typing.py
deleted file mode 100644
index a11ac49..0000000
--- a/forefront/typing.py
+++ /dev/null
@@ -1,36 +0,0 @@
-class ForeFrontResponse:
- class Completion:
- class Choices:
- def __init__(self, choice: dict) -> None:
- self.text = choice['text']
- self.content = self.text.encode()
- self.index = choice['index']
- self.logprobs = choice['logprobs']
- self.finish_reason = choice['finish_reason']
-
- def __repr__(self) -> str:
- return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
-
- def __init__(self, choices: dict) -> None:
- self.choices = [self.Choices(choice) for choice in choices]
-
- class Usage:
- def __init__(self, usage_dict: dict) -> None:
- self.prompt_tokens = usage_dict['prompt_tokens']
- self.completion_tokens = usage_dict['completion_tokens']
- self.total_tokens = usage_dict['total_tokens']
-
- def __repr__(self):
- return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
-
- def __init__(self, response_dict: dict) -> None:
- self.response_dict = response_dict
- self.id = response_dict['id']
- self.object = response_dict['object']
- self.created = response_dict['created']
- self.model = response_dict['model']
- self.completion = self.Completion(response_dict['choices'])
- self.usage = self.Usage(response_dict['usage'])
-
- def json(self) -> dict:
- return self.response_dict
diff --git a/gpt4free/__init__.py b/gpt4free/__init__.py
new file mode 100644
index 0000000..5336c82
--- /dev/null
+++ b/gpt4free/__init__.py
@@ -0,0 +1,64 @@
+from enum import Enum
+
+from gpt4free import cocalc
+from gpt4free import forefront
+from gpt4free import quora
+from gpt4free import theb
+from gpt4free import you
+
+
+class Provider(Enum):
+ """An enum representing different providers."""
+
+ You = 'you'
+ Poe = 'poe'
+ ForeFront = 'fore_front'
+ Theb = 'theb'
+ CoCalc = 'cocalc'
+
+
+class Completion:
+ """This class will be used for invoking the given provider"""
+
+ @staticmethod
+ def create(provider: Provider, prompt: str, **kwargs) -> str:
+ """
+ Invokes the given provider with given prompt and addition arguments and returns the string response
+
+ :param provider: an enum representing the provider to use while invoking
+ :param prompt: input provided by the user
+ :param kwargs: Additional keyword arguments to pass to the provider while invoking
+ :return: A string representing the response from the provider
+ """
+ if provider == Provider.Poe:
+ return Completion.__poe_service(prompt, **kwargs)
+ elif provider == Provider.You:
+ return Completion.__you_service(prompt, **kwargs)
+ elif provider == Provider.ForeFront:
+ return Completion.__fore_front_service(prompt, **kwargs)
+ elif provider == Provider.Theb:
+ return Completion.__theb_service(prompt, **kwargs)
+ elif provider == Provider.CoCalc:
+ return Completion.__cocalc_service(prompt, **kwargs)
+ else:
+ raise Exception('Provider not exist, Please try again')
+
+ @staticmethod
+ def __you_service(prompt: str, **kwargs) -> str:
+ return you.Completion.create(prompt, **kwargs).text
+
+ @staticmethod
+ def __poe_service(prompt: str, **kwargs) -> str:
+ return quora.Completion.create(prompt=prompt, **kwargs).text
+
+ @staticmethod
+ def __fore_front_service(prompt: str, **kwargs) -> str:
+ return forefront.Completion.create(prompt=prompt, **kwargs).text
+
+ @staticmethod
+ def __theb_service(prompt: str, **kwargs):
+ return ''.join(theb.Completion.create(prompt=prompt))
+
+ @staticmethod
+ def __cocalc_service(prompt: str, **kwargs):
+ return cocalc.Completion.create(prompt, cookie_input=kwargs.get('cookie_input', '')).text
diff --git a/gpt4free/cocalc/__init__.py b/gpt4free/cocalc/__init__.py
new file mode 100644
index 0000000..372f29a
--- /dev/null
+++ b/gpt4free/cocalc/__init__.py
@@ -0,0 +1,47 @@
+import requests
+from fake_useragent import UserAgent
+from pydantic import BaseModel
+
+
+class CoCalcResponse(BaseModel):
+ text: str
+ status: bool
+
+
+class Completion:
+ @staticmethod
+ def create(prompt: str, cookie_input: str) -> CoCalcResponse:
+ # Initialize a session with custom headers
+ session = Completion._initialize_session(cookie_input)
+
+ # Set the data that will be submitted
+ payload = Completion._create_payload(prompt, 'ASSUME I HAVE FULL ACCESS TO COCALC. ')
+
+ # Submit the request and return the results
+ return Completion._submit_request(session, payload)
+
+ @classmethod
+ def _initialize_session(cls, conversation_cookie) -> requests.Session:
+ """Initialize a session with custom headers for the request."""
+
+ session = requests.Session()
+ headers = {
+ 'Accept': '*/*',
+ 'Accept-Language': 'en-US,en;q=0.5',
+ 'Origin': 'https://cocalc.com',
+ 'Referer': 'https://cocalc.com/api/v2/openai/chatgpt',
+ 'Cookie': conversation_cookie,
+ 'User-Agent': UserAgent().random,
+ }
+ session.headers.update(headers)
+
+ return session
+
+ @staticmethod
+ def _create_payload(prompt: str, system_prompt: str) -> dict:
+ return {'input': prompt, 'system': system_prompt, 'tag': 'next:index'}
+
+ @staticmethod
+ def _submit_request(session: requests.Session, payload: dict) -> CoCalcResponse:
+ response = session.post('https://cocalc.com/api/v2/openai/chatgpt', json=payload).json()
+ return CoCalcResponse(text=response['output'], status=response['success'])
diff --git a/cocalc/readme.md b/gpt4free/cocalc/readme.md
similarity index 77%
rename from cocalc/readme.md
rename to gpt4free/cocalc/readme.md
index 0409533..f091115 100644
--- a/cocalc/readme.md
+++ b/gpt4free/cocalc/readme.md
@@ -1,11 +1,10 @@
### Example: `cocalc`
-
```python
# import library
-import cocalc
+from gpt4free import cocalc
-cocalc.Completion.create(prompt="How are you!", cookieInput="cookieinput") ## Tutorial
+cocalc.Completion.create(prompt="How are you!", cookie_input="cookieinput") ## Tutorial
```
### How to grab cookie input
diff --git a/gpt4free/forefront/README.md b/gpt4free/forefront/README.md
new file mode 100644
index 0000000..3d0aac4
--- /dev/null
+++ b/gpt4free/forefront/README.md
@@ -0,0 +1,16 @@
+### Example: `forefront` (use like openai pypi package)
+
+```python
+
+from gpt4free import forefront
+
+# create an account
+token = forefront.Account.create(logging=False)
+print(token)
+
+# get a response
+for response in forefront.StreamingCompletion.create(token=token,
+ prompt='hello world', model='gpt-4'):
+ print(response.completion.choices[0].text, end='')
+print("")
+```
diff --git a/gpt4free/forefront/__init__.py b/gpt4free/forefront/__init__.py
new file mode 100644
index 0000000..f0ca1a1
--- /dev/null
+++ b/gpt4free/forefront/__init__.py
@@ -0,0 +1,192 @@
+from json import loads
+from re import match
+from time import time, sleep
+from typing import Generator, Optional
+from uuid import uuid4
+
+from fake_useragent import UserAgent
+from requests import post
+from tls_client import Session
+
+from .mail import Mail
+from .typing import ForeFrontResponse
+
+
+class Account:
+ @staticmethod
+ def create(proxy: Optional[str] = None, logging: bool = False):
+ proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
+
+ start = time()
+
+ mail_client = Mail(proxies)
+ mail_token = None
+ mail_address = mail_client.get_mail()
+
+ # print(mail_address)
+
+ client = Session(client_identifier='chrome110')
+ client.proxies = proxies
+ client.headers = {
+ 'origin': 'https://accounts.forefront.ai',
+ 'user-agent': UserAgent().random,
+ }
+
+ response = client.post(
+ 'https://clerk.forefront.ai/v1/client/sign_ups?_clerk_js_version=4.32.6',
+ data={'email_address': mail_address},
+ )
+
+ try:
+ trace_token = response.json()['response']['id']
+ if logging:
+ print(trace_token)
+ except KeyError:
+ return 'Failed to create account!'
+
+ response = client.post(
+ f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/prepare_verification?_clerk_js_version=4.32.6',
+ data={
+ 'strategy': 'email_code',
+ },
+ )
+
+ if logging:
+ print(response.text)
+
+ if 'sign_up_attempt' not in response.text:
+ return 'Failed to create account!'
+
+ while True:
+ sleep(1)
+ for _ in mail_client.fetch_inbox():
+ if logging:
+ print(mail_client.get_message_content(_['id']))
+ mail_token = match(r'(\d){5,6}', mail_client.get_message_content(_['id'])).group(0)
+
+ if mail_token:
+ break
+
+ if logging:
+ print(mail_token)
+
+ response = client.post(
+ f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/attempt_verification?_clerk_js_version=4.38.4',
+ data={'code': mail_token, 'strategy': 'email_code'},
+ )
+
+ if logging:
+ print(response.json())
+
+ token = response.json()['client']['sessions'][0]['last_active_token']['jwt']
+
+ with open('accounts.txt', 'a') as f:
+ f.write(f'{mail_address}:{token}\n')
+
+ if logging:
+ print(time() - start)
+
+ return token
+
+
+class StreamingCompletion:
+ @staticmethod
+ def create(
+ token=None,
+ chat_id=None,
+ prompt='',
+ action_type='new',
+ default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
+ model='gpt-4',
+ ) -> Generator[ForeFrontResponse, None, None]:
+ if not token:
+ raise Exception('Token is required!')
+ if not chat_id:
+ chat_id = str(uuid4())
+
+ headers = {
+ 'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'authorization': 'Bearer ' + token,
+ 'cache-control': 'no-cache',
+ 'content-type': 'application/json',
+ 'origin': 'https://chat.forefront.ai',
+ 'pragma': 'no-cache',
+ 'referer': 'https://chat.forefront.ai/',
+ 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'cross-site',
+ 'user-agent': UserAgent().random,
+ }
+
+ json_data = {
+ 'text': prompt,
+ 'action': action_type,
+ 'parentId': chat_id,
+ 'workspaceId': chat_id,
+ 'messagePersona': default_persona,
+ 'model': model,
+ }
+
+ for chunk in post(
+ 'https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
+ headers=headers,
+ json=json_data,
+ stream=True,
+ ).iter_lines():
+ if b'finish_reason":null' in chunk:
+ data = loads(chunk.decode('utf-8').split('data: ')[1])
+ token = data['choices'][0]['delta'].get('content')
+
+ if token is not None:
+ yield ForeFrontResponse(
+ **{
+ 'id': chat_id,
+ 'object': 'text_completion',
+ 'created': int(time()),
+ 'text': token,
+ 'model': model,
+ 'choices': [{'text': token, 'index': 0, 'logprobs': None, 'finish_reason': 'stop'}],
+ 'usage': {
+ 'prompt_tokens': len(prompt),
+ 'completion_tokens': len(token),
+ 'total_tokens': len(prompt) + len(token),
+ },
+ }
+ )
+
+
+class Completion:
+ @staticmethod
+ def create(
+ token=None,
+ chat_id=None,
+ prompt='',
+ action_type='new',
+ default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
+ model='gpt-4',
+ ) -> ForeFrontResponse:
+ text = ''
+ final_response = None
+ for response in StreamingCompletion.create(
+ token=token,
+ chat_id=chat_id,
+ prompt=prompt,
+ action_type=action_type,
+ default_persona=default_persona,
+ model=model,
+ ):
+ if response:
+ final_response = response
+ text += response.text
+
+ if final_response:
+ final_response.text = text
+ else:
+ raise Exception('Unable to get the response, Please try again')
+
+ return final_response
diff --git a/forefront/mail.py b/gpt4free/forefront/mail.py
similarity index 100%
rename from forefront/mail.py
rename to gpt4free/forefront/mail.py
diff --git a/gpt4free/forefront/typing.py b/gpt4free/forefront/typing.py
new file mode 100644
index 0000000..d6026e0
--- /dev/null
+++ b/gpt4free/forefront/typing.py
@@ -0,0 +1,26 @@
+from typing import Any, List
+
+from pydantic import BaseModel
+
+
+class Choice(BaseModel):
+ text: str
+ index: int
+ logprobs: Any
+ finish_reason: str
+
+
+class Usage(BaseModel):
+ prompt_tokens: int
+ completion_tokens: int
+ total_tokens: int
+
+
+class ForeFrontResponse(BaseModel):
+ id: str
+ object: str
+ created: int
+ model: str
+ choices: List[Choice]
+ usage: Usage
+ text: str
\ No newline at end of file
diff --git a/quora/README.md b/gpt4free/quora/README.md
similarity index 80%
rename from quora/README.md
rename to gpt4free/quora/README.md
index 2467927..c6eeac3 100644
--- a/quora/README.md
+++ b/gpt4free/quora/README.md
@@ -21,26 +21,25 @@ models = {
```python
# import quora (poe) package
-import quora
+from gpt4free import quora
# create account
# make sure to set enable_bot_creation to True
-token = quora.Account.create(logging = True, enable_bot_creation=True)
+token = quora.Account.create(logging=True, enable_bot_creation=True)
model = quora.Model.create(
- token = token,
- model = 'gpt-3.5-turbo', # or claude-instant-v1.0
- system_prompt = 'you are ChatGPT a large language model ...'
+ token=token,
+ model='gpt-3.5-turbo', # or claude-instant-v1.0
+ system_prompt='you are ChatGPT a large language model ...'
)
-print(model.name) # gptx....
+print(model.name) # gptx....
# streaming response
for response in quora.StreamingCompletion.create(
- custom_model = model.name,
- prompt ='hello world',
- token = token):
-
+ custom_model=model.name,
+ prompt='hello world',
+ token=token):
print(response.completion.choices[0].text)
```
diff --git a/quora/__init__.py b/gpt4free/quora/__init__.py
similarity index 82%
rename from quora/__init__.py
rename to gpt4free/quora/__init__.py
index cd5ec8f..31526e0 100644
--- a/quora/__init__.py
+++ b/gpt4free/quora/__init__.py
@@ -6,11 +6,12 @@ from pathlib import Path
from random import choice, choices, randint
from re import search, findall
from string import ascii_letters, digits
-from typing import Optional, Union
+from typing import Optional, Union, List, Any, Generator
from urllib.parse import unquote
import selenium.webdriver.support.expected_conditions as EC
from fake_useragent import UserAgent
+from pydantic import BaseModel
from pypasser import reCaptchaV3
from requests import Session
from selenium.webdriver import Firefox, Chrome, FirefoxOptions, ChromeOptions
@@ -18,8 +19,8 @@ from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from tls_client import Session as TLS
-from quora.api import Client as PoeClient
-from quora.mail import Emailnator
+from .api import Client as PoeClient
+from .mail import Emailnator
SELENIUM_WEB_DRIVER_ERROR_MSG = b'''The error message you are receiving is due to the `geckodriver` executable not
being found in your system\'s PATH. To resolve this issue, you need to download the geckodriver and add its location
@@ -67,42 +68,27 @@ def extract_formkey(html):
return formkey
-class PoeResponse:
- class Completion:
- class Choices:
- def __init__(self, choice: dict) -> None:
- self.text = choice['text']
- self.content = self.text.encode()
- self.index = choice['index']
- self.logprobs = choice['logprobs']
- self.finish_reason = choice['finish_reason']
+class Choice(BaseModel):
+ text: str
+ index: int
+ logprobs: Any
+ finish_reason: str
- def __repr__(self) -> str:
- return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
- def __init__(self, choices: dict) -> None:
- self.choices = [self.Choices(choice) for choice in choices]
+class Usage(BaseModel):
+ prompt_tokens: int
+ completion_tokens: int
+ total_tokens: int
- class Usage:
- def __init__(self, usage_dict: dict) -> None:
- self.prompt_tokens = usage_dict['prompt_tokens']
- self.completion_tokens = usage_dict['completion_tokens']
- self.total_tokens = usage_dict['total_tokens']
- def __repr__(self):
- return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
-
- def __init__(self, response_dict: dict) -> None:
- self.response_dict = response_dict
- self.id = response_dict['id']
- self.object = response_dict['object']
- self.created = response_dict['created']
- self.model = response_dict['model']
- self.completion = self.Completion(response_dict['choices'])
- self.usage = self.Usage(response_dict['usage'])
-
- def json(self) -> dict:
- return self.response_dict
+class PoeResponse(BaseModel):
+ id: int
+ object: str
+ created: int
+ model: str
+ choices: List[Choice]
+ usage: Usage
+ text: str
class ModelResponse:
@@ -116,18 +102,12 @@ class ModelResponse:
class Model:
@staticmethod
def create(
- token: str,
- model: str = 'gpt-3.5-turbo', # claude-instant
- system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
- description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
- handle: str = None,
+ token: str,
+ model: str = 'gpt-3.5-turbo', # claude-instant
+ system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
+ description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
+ handle: str = None,
) -> ModelResponse:
- models = {
- 'gpt-3.5-turbo': 'chinchilla',
- 'claude-instant-v1.0': 'a2',
- 'gpt-4': 'beaver',
- }
-
if not handle:
handle = f'gptx{randint(1111111, 9999999)}'
@@ -162,7 +142,7 @@ class Model:
obj={
'queryName': 'CreateBotMain_poeBotCreate_Mutation',
'variables': {
- 'model': models[model],
+ 'model': MODELS[model],
'handle': handle,
'prompt': system_prompt,
'isPromptPublic': True,
@@ -202,9 +182,9 @@ class Model:
class Account:
@staticmethod
def create(
- proxy: Optional[str] = None,
- logging: bool = False,
- enable_bot_creation: bool = False,
+ proxy: Optional[str] = None,
+ logging: bool = False,
+ enable_bot_creation: bool = False,
):
client = TLS(client_identifier='chrome110')
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None
@@ -309,22 +289,23 @@ class Account:
class StreamingCompletion:
@staticmethod
def create(
- model: str = 'gpt-4',
- custom_model: bool = None,
- prompt: str = 'hello world',
- token: str = '',
- ):
+ model: str = 'gpt-4',
+ custom_model: bool = None,
+ prompt: str = 'hello world',
+ token: str = '',
+ ) -> Generator[PoeResponse, None, None]:
_model = MODELS[model] if not custom_model else custom_model
client = PoeClient(token)
for chunk in client.send_message(_model, prompt):
yield PoeResponse(
- {
+ **{
'id': chunk['messageId'],
'object': 'text_completion',
'created': chunk['creationTime'],
'model': _model,
+ 'text': chunk['text_new'],
'choices': [
{
'text': chunk['text_new'],
@@ -343,33 +324,28 @@ class StreamingCompletion:
class Completion:
+ @staticmethod
def create(
- model: str = 'gpt-4',
- custom_model: str = None,
- prompt: str = 'hello world',
- token: str = '',
- ):
- models = {
- 'sage': 'capybara',
- 'gpt-4': 'beaver',
- 'claude-v1.2': 'a2_2',
- 'claude-instant-v1.0': 'a2',
- 'gpt-3.5-turbo': 'chinchilla',
- }
-
- _model = models[model] if not custom_model else custom_model
+ model: str = 'gpt-4',
+ custom_model: str = None,
+ prompt: str = 'hello world',
+ token: str = '',
+ ) -> PoeResponse:
+ _model = MODELS[model] if not custom_model else custom_model
client = PoeClient(token)
- for chunk in client.send_message(_model, prompt):
- pass
+ chunk = None
+ for response in client.send_message(_model, prompt):
+ chunk = response
return PoeResponse(
- {
+ **{
'id': chunk['messageId'],
'object': 'text_completion',
'created': chunk['creationTime'],
'model': _model,
+ 'text': chunk['text'],
'choices': [
{
'text': chunk['text'],
@@ -389,12 +365,12 @@ class Completion:
class Poe:
def __init__(
- self,
- model: str = 'ChatGPT',
- driver: str = 'firefox',
- download_driver: bool = False,
- driver_path: Optional[str] = None,
- cookie_path: str = './quora/cookie.json',
+ self,
+ model: str = 'ChatGPT',
+ driver: str = 'firefox',
+ download_driver: bool = False,
+ driver_path: Optional[str] = None,
+ cookie_path: str = './quora/cookie.json',
):
# validating the model
if model and model not in MODELS:
@@ -451,8 +427,8 @@ class Poe:
driver.close()
return cookie
- @classmethod
- def __resolve_driver(cls, driver: str, driver_path: Optional[str] = None) -> Union[Firefox, Chrome]:
+ @staticmethod
+ def __resolve_driver(driver: str, driver_path: Optional[str] = None) -> Union[Firefox, Chrome]:
options = FirefoxOptions() if driver == 'firefox' else ChromeOptions()
options.add_argument('-headless')
@@ -473,12 +449,12 @@ class Poe:
return response
def create_bot(
- self,
- name: str,
- /,
- prompt: str = '',
- base_model: str = 'ChatGPT',
- description: str = '',
+ self,
+ name: str,
+ /,
+ prompt: str = '',
+ base_model: str = 'ChatGPT',
+ description: str = '',
) -> None:
if base_model not in MODELS:
raise RuntimeError('Sorry, the base_model you provided does not exist. Please check and try again.')
@@ -492,4 +468,4 @@ class Poe:
print(f'Successfully created bot with name: {response["bot"]["displayName"]}')
def list_bots(self) -> list:
- return list(self.client.bot_names.values())
+ return list(self.client.bot_names.values())
\ No newline at end of file
diff --git a/quora/api.py b/gpt4free/quora/api.py
similarity index 100%
rename from quora/api.py
rename to gpt4free/quora/api.py
diff --git a/quora/cookies.txt b/gpt4free/quora/cookies.txt
similarity index 100%
rename from quora/cookies.txt
rename to gpt4free/quora/cookies.txt
diff --git a/quora/graphql/AddHumanMessageMutation.graphql b/gpt4free/quora/graphql/AddHumanMessageMutation.graphql
similarity index 100%
rename from quora/graphql/AddHumanMessageMutation.graphql
rename to gpt4free/quora/graphql/AddHumanMessageMutation.graphql
diff --git a/quora/graphql/AddMessageBreakMutation.graphql b/gpt4free/quora/graphql/AddMessageBreakMutation.graphql
similarity index 100%
rename from quora/graphql/AddMessageBreakMutation.graphql
rename to gpt4free/quora/graphql/AddMessageBreakMutation.graphql
diff --git a/quora/graphql/AutoSubscriptionMutation.graphql b/gpt4free/quora/graphql/AutoSubscriptionMutation.graphql
similarity index 100%
rename from quora/graphql/AutoSubscriptionMutation.graphql
rename to gpt4free/quora/graphql/AutoSubscriptionMutation.graphql
diff --git a/quora/graphql/BioFragment.graphql b/gpt4free/quora/graphql/BioFragment.graphql
similarity index 100%
rename from quora/graphql/BioFragment.graphql
rename to gpt4free/quora/graphql/BioFragment.graphql
diff --git a/quora/graphql/ChatAddedSubscription.graphql b/gpt4free/quora/graphql/ChatAddedSubscription.graphql
similarity index 100%
rename from quora/graphql/ChatAddedSubscription.graphql
rename to gpt4free/quora/graphql/ChatAddedSubscription.graphql
diff --git a/quora/graphql/ChatFragment.graphql b/gpt4free/quora/graphql/ChatFragment.graphql
similarity index 100%
rename from quora/graphql/ChatFragment.graphql
rename to gpt4free/quora/graphql/ChatFragment.graphql
diff --git a/quora/graphql/ChatListPaginationQuery.graphql b/gpt4free/quora/graphql/ChatListPaginationQuery.graphql
similarity index 100%
rename from quora/graphql/ChatListPaginationQuery.graphql
rename to gpt4free/quora/graphql/ChatListPaginationQuery.graphql
diff --git a/quora/graphql/ChatPaginationQuery.graphql b/gpt4free/quora/graphql/ChatPaginationQuery.graphql
similarity index 100%
rename from quora/graphql/ChatPaginationQuery.graphql
rename to gpt4free/quora/graphql/ChatPaginationQuery.graphql
diff --git a/quora/graphql/ChatViewQuery.graphql b/gpt4free/quora/graphql/ChatViewQuery.graphql
similarity index 100%
rename from quora/graphql/ChatViewQuery.graphql
rename to gpt4free/quora/graphql/ChatViewQuery.graphql
diff --git a/quora/graphql/DeleteHumanMessagesMutation.graphql b/gpt4free/quora/graphql/DeleteHumanMessagesMutation.graphql
similarity index 100%
rename from quora/graphql/DeleteHumanMessagesMutation.graphql
rename to gpt4free/quora/graphql/DeleteHumanMessagesMutation.graphql
diff --git a/quora/graphql/DeleteMessageMutation.graphql b/gpt4free/quora/graphql/DeleteMessageMutation.graphql
similarity index 100%
rename from quora/graphql/DeleteMessageMutation.graphql
rename to gpt4free/quora/graphql/DeleteMessageMutation.graphql
diff --git a/quora/graphql/HandleFragment.graphql b/gpt4free/quora/graphql/HandleFragment.graphql
similarity index 100%
rename from quora/graphql/HandleFragment.graphql
rename to gpt4free/quora/graphql/HandleFragment.graphql
diff --git a/quora/graphql/LoginWithVerificationCodeMutation.graphql b/gpt4free/quora/graphql/LoginWithVerificationCodeMutation.graphql
similarity index 100%
rename from quora/graphql/LoginWithVerificationCodeMutation.graphql
rename to gpt4free/quora/graphql/LoginWithVerificationCodeMutation.graphql
diff --git a/quora/graphql/MessageAddedSubscription.graphql b/gpt4free/quora/graphql/MessageAddedSubscription.graphql
similarity index 100%
rename from quora/graphql/MessageAddedSubscription.graphql
rename to gpt4free/quora/graphql/MessageAddedSubscription.graphql
diff --git a/quora/graphql/MessageDeletedSubscription.graphql b/gpt4free/quora/graphql/MessageDeletedSubscription.graphql
similarity index 100%
rename from quora/graphql/MessageDeletedSubscription.graphql
rename to gpt4free/quora/graphql/MessageDeletedSubscription.graphql
diff --git a/quora/graphql/MessageFragment.graphql b/gpt4free/quora/graphql/MessageFragment.graphql
similarity index 100%
rename from quora/graphql/MessageFragment.graphql
rename to gpt4free/quora/graphql/MessageFragment.graphql
diff --git a/quora/graphql/MessageRemoveVoteMutation.graphql b/gpt4free/quora/graphql/MessageRemoveVoteMutation.graphql
similarity index 100%
rename from quora/graphql/MessageRemoveVoteMutation.graphql
rename to gpt4free/quora/graphql/MessageRemoveVoteMutation.graphql
diff --git a/quora/graphql/MessageSetVoteMutation.graphql b/gpt4free/quora/graphql/MessageSetVoteMutation.graphql
similarity index 100%
rename from quora/graphql/MessageSetVoteMutation.graphql
rename to gpt4free/quora/graphql/MessageSetVoteMutation.graphql
diff --git a/quora/graphql/PoeBotCreateMutation.graphql b/gpt4free/quora/graphql/PoeBotCreateMutation.graphql
similarity index 100%
rename from quora/graphql/PoeBotCreateMutation.graphql
rename to gpt4free/quora/graphql/PoeBotCreateMutation.graphql
diff --git a/quora/graphql/PoeBotEditMutation.graphql b/gpt4free/quora/graphql/PoeBotEditMutation.graphql
similarity index 100%
rename from quora/graphql/PoeBotEditMutation.graphql
rename to gpt4free/quora/graphql/PoeBotEditMutation.graphql
diff --git a/quora/graphql/SendMessageMutation.graphql b/gpt4free/quora/graphql/SendMessageMutation.graphql
similarity index 100%
rename from quora/graphql/SendMessageMutation.graphql
rename to gpt4free/quora/graphql/SendMessageMutation.graphql
diff --git a/quora/graphql/SendVerificationCodeForLoginMutation.graphql b/gpt4free/quora/graphql/SendVerificationCodeForLoginMutation.graphql
similarity index 100%
rename from quora/graphql/SendVerificationCodeForLoginMutation.graphql
rename to gpt4free/quora/graphql/SendVerificationCodeForLoginMutation.graphql
diff --git a/quora/graphql/ShareMessagesMutation.graphql b/gpt4free/quora/graphql/ShareMessagesMutation.graphql
similarity index 100%
rename from quora/graphql/ShareMessagesMutation.graphql
rename to gpt4free/quora/graphql/ShareMessagesMutation.graphql
diff --git a/quora/graphql/SignupWithVerificationCodeMutation.graphql b/gpt4free/quora/graphql/SignupWithVerificationCodeMutation.graphql
similarity index 100%
rename from quora/graphql/SignupWithVerificationCodeMutation.graphql
rename to gpt4free/quora/graphql/SignupWithVerificationCodeMutation.graphql
diff --git a/quora/graphql/StaleChatUpdateMutation.graphql b/gpt4free/quora/graphql/StaleChatUpdateMutation.graphql
similarity index 100%
rename from quora/graphql/StaleChatUpdateMutation.graphql
rename to gpt4free/quora/graphql/StaleChatUpdateMutation.graphql
diff --git a/quora/graphql/SubscriptionsMutation.graphql b/gpt4free/quora/graphql/SubscriptionsMutation.graphql
similarity index 100%
rename from quora/graphql/SubscriptionsMutation.graphql
rename to gpt4free/quora/graphql/SubscriptionsMutation.graphql
diff --git a/quora/graphql/SummarizePlainPostQuery.graphql b/gpt4free/quora/graphql/SummarizePlainPostQuery.graphql
similarity index 100%
rename from quora/graphql/SummarizePlainPostQuery.graphql
rename to gpt4free/quora/graphql/SummarizePlainPostQuery.graphql
diff --git a/quora/graphql/SummarizeQuotePostQuery.graphql b/gpt4free/quora/graphql/SummarizeQuotePostQuery.graphql
similarity index 100%
rename from quora/graphql/SummarizeQuotePostQuery.graphql
rename to gpt4free/quora/graphql/SummarizeQuotePostQuery.graphql
diff --git a/quora/graphql/SummarizeSharePostQuery.graphql b/gpt4free/quora/graphql/SummarizeSharePostQuery.graphql
similarity index 100%
rename from quora/graphql/SummarizeSharePostQuery.graphql
rename to gpt4free/quora/graphql/SummarizeSharePostQuery.graphql
diff --git a/quora/graphql/UserSnippetFragment.graphql b/gpt4free/quora/graphql/UserSnippetFragment.graphql
similarity index 100%
rename from quora/graphql/UserSnippetFragment.graphql
rename to gpt4free/quora/graphql/UserSnippetFragment.graphql
diff --git a/quora/graphql/ViewerInfoQuery.graphql b/gpt4free/quora/graphql/ViewerInfoQuery.graphql
similarity index 100%
rename from quora/graphql/ViewerInfoQuery.graphql
rename to gpt4free/quora/graphql/ViewerInfoQuery.graphql
diff --git a/quora/graphql/ViewerStateFragment.graphql b/gpt4free/quora/graphql/ViewerStateFragment.graphql
similarity index 100%
rename from quora/graphql/ViewerStateFragment.graphql
rename to gpt4free/quora/graphql/ViewerStateFragment.graphql
diff --git a/quora/graphql/ViewerStateUpdatedSubscription.graphql b/gpt4free/quora/graphql/ViewerStateUpdatedSubscription.graphql
similarity index 100%
rename from quora/graphql/ViewerStateUpdatedSubscription.graphql
rename to gpt4free/quora/graphql/ViewerStateUpdatedSubscription.graphql
diff --git a/quora/graphql/__init__.py b/gpt4free/quora/graphql/__init__.py
similarity index 100%
rename from quora/graphql/__init__.py
rename to gpt4free/quora/graphql/__init__.py
diff --git a/quora/mail.py b/gpt4free/quora/mail.py
similarity index 100%
rename from quora/mail.py
rename to gpt4free/quora/mail.py
diff --git a/theb/README.md b/gpt4free/theb/README.md
similarity index 90%
rename from theb/README.md
rename to gpt4free/theb/README.md
index ca978fc..a4abdf6 100644
--- a/theb/README.md
+++ b/gpt4free/theb/README.md
@@ -1,9 +1,8 @@
### Example: `theb` (use like openai pypi package)
-
```python
# import library
-import theb
+from gpt4free import theb
# simple streaming completion
for token in theb.Completion.create('hello world'):
diff --git a/theb/__init__.py b/gpt4free/theb/__init__.py
similarity index 100%
rename from theb/__init__.py
rename to gpt4free/theb/__init__.py
diff --git a/theb/theb_test.py b/gpt4free/theb/theb_test.py
similarity index 100%
rename from theb/theb_test.py
rename to gpt4free/theb/theb_test.py
diff --git a/you/README.md b/gpt4free/you/README.md
similarity index 96%
rename from you/README.md
rename to gpt4free/you/README.md
index 25c2008..11b4723 100644
--- a/you/README.md
+++ b/gpt4free/you/README.md
@@ -1,7 +1,8 @@
### Example: `you` (use like openai pypi package)
```python
-import you
+
+from gpt4free import you
# simple request with links and details
response = you.Completion.create(
diff --git a/you/__init__.py b/gpt4free/you/__init__.py
similarity index 64%
rename from you/__init__.py
rename to gpt4free/you/__init__.py
index 8bf31f0..97b4846 100644
--- a/you/__init__.py
+++ b/gpt4free/you/__init__.py
@@ -1,28 +1,36 @@
+import json
import re
-from json import loads
+from typing import Optional, List, Dict, Any
from uuid import uuid4
from fake_useragent import UserAgent
+from pydantic import BaseModel
from tls_client import Session
+class PoeResponse(BaseModel):
+ text: Optional[str] = None
+ links: List[str] = []
+ extra: Dict[str, Any] = {}
+
+
class Completion:
@staticmethod
def create(
- prompt: str,
- page: int = 1,
- count: int = 10,
- safe_search: str = 'Moderate',
- on_shopping_page: bool = False,
- mkt: str = '',
- response_filter: str = 'WebPages,Translations,TimeZone,Computation,RelatedSearches',
- domain: str = 'youchat',
- query_trace_id: str = None,
- chat: list = None,
- include_links: bool = False,
- detailed: bool = False,
- debug: bool = False,
- ) -> dict:
+ prompt: str,
+ page: int = 1,
+ count: int = 10,
+ safe_search: str = 'Moderate',
+ on_shopping_page: bool = False,
+ mkt: str = '',
+ response_filter: str = 'WebPages,Translations,TimeZone,Computation,RelatedSearches',
+ domain: str = 'youchat',
+ query_trace_id: str = None,
+ chat: list = None,
+ include_links: bool = False,
+ detailed: bool = False,
+ debug: bool = False,
+ ) -> PoeResponse:
if chat is None:
chat = []
@@ -57,26 +65,28 @@ class Completion:
r'(?<=event: youChatSerpResults\ndata:)(.*\n)*?(?=event: )', response.text
).group()
third_party_search_results = re.search(
- r'(?<=event: thirdPartySearchResults\ndata:)(.*\n)*?(?=event: )', response.text).group()
+ r'(?<=event: thirdPartySearchResults\ndata:)(.*\n)*?(?=event: )', response.text
+ ).group()
# slots = findall(r"slots\ndata: (.*)\n\nevent", response.text)[0]
text = ''.join(re.findall(r'{\"youChatToken\": \"(.*?)\"}', response.text))
extra = {
- 'youChatSerpResults': loads(you_chat_serp_results),
+ 'youChatSerpResults': json.loads(you_chat_serp_results),
# 'slots' : loads(slots)
}
- return {
- 'response': text.replace('\\n', '\n').replace('\\\\', '\\').replace('\\"', '"'),
- 'links': loads(third_party_search_results)['search']['third_party_search_results']
- if include_links
- else None,
- 'extra': extra if detailed else None,
- }
+ response = PoeResponse(text=text.replace('\\n', '\n').replace('\\\\', '\\').replace('\\"', '"'))
+ if include_links:
+ response.links = json.loads(third_party_search_results)['search']['third_party_search_results']
- @classmethod
- def __get_headers(cls) -> dict:
+ if detailed:
+ response.extra = extra
+
+ return response
+
+ @staticmethod
+ def __get_headers() -> dict:
return {
'authority': 'you.com',
'accept': 'text/event-stream',
@@ -93,6 +103,6 @@ class Completion:
'user-agent': UserAgent().random,
}
- @classmethod
- def __get_failure_response(cls) -> dict:
- return dict(response='Unable to fetch the response, Please try again.', links=[], extra={})
+ @staticmethod
+ def __get_failure_response() -> PoeResponse:
+ return PoeResponse(text='Unable to fetch the response, Please try again.')
diff --git a/gui/query_methods.py b/gui/query_methods.py
index 1a4a340..adb4ae9 100644
--- a/gui/query_methods.py
+++ b/gui/query_methods.py
@@ -3,7 +3,7 @@ import sys
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
-import forefront, quora, theb, you
+from gpt4free import quora, forefront, theb, you
import random
@@ -15,7 +15,7 @@ def query_forefront(question: str) -> str:
response = ""
# get a response
try:
- for i in forefront.StreamingCompletion.create(token = token, prompt = 'hello world', model='gpt-4'):
+ for i in forefront.StreamingCompletion.create(token = token, prompt ='hello world', model='gpt-4'):
response += i.completion.choices[0].text
return response
diff --git a/gui/streamlit_app.py b/gui/streamlit_app.py
index 8a6bcfa..2dba0a7 100644
--- a/gui/streamlit_app.py
+++ b/gui/streamlit_app.py
@@ -4,7 +4,7 @@ import sys
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
import streamlit as st
-import you
+from gpt4free import you
def get_answer(question: str) -> str:
diff --git a/pyproject.toml b/pyproject.toml
index 7d3be06..7c2244f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ description = ""
authors = ["Raju Komati "]
license = "GPL-3.0"
readme = "README.md"
-packages = [{ include = "openai_rev" }]
+packages = [{ include = "gpt4free" }]
exclude = ["**/*.txt"]
[tool.poetry.dependencies]
diff --git a/test.py b/test.py
index 4b39bd0..0fd2ec8 100644
--- a/test.py
+++ b/test.py
@@ -1,4 +1,4 @@
-import theb
+from gpt4free import theb
for token in theb.Completion.create('hello world'):
print(token, end='', flush=True)
diff --git a/testing/forefront_test.py b/testing/forefront_test.py
index 8d25ed8..b7b5c57 100644
--- a/testing/forefront_test.py
+++ b/testing/forefront_test.py
@@ -1,4 +1,4 @@
-from openai_rev import forefront
+from gpt4free import forefront
# create an account
token = forefront.Account.create(logging=True)
diff --git a/testing/poe_account_create_test.py b/testing/poe_account_create_test.py
index 7072597..ace2306 100644
--- a/testing/poe_account_create_test.py
+++ b/testing/poe_account_create_test.py
@@ -6,8 +6,8 @@ from typing import Optional
from tls_client import Session as TLS
from twocaptcha import TwoCaptcha
-from openai_rev.quora import extract_formkey
-from openai_rev.quora.mail import Emailnator
+from gpt4free.quora import extract_formkey
+from gpt4free.quora.mail import Emailnator
solver = TwoCaptcha('72747bf24a9d89b4dcc1b24875efd358')
diff --git a/testing/poe_test.py b/testing/poe_test.py
index 809804f..22d95f5 100644
--- a/testing/poe_test.py
+++ b/testing/poe_test.py
@@ -1,6 +1,6 @@
from time import sleep
-from openai_rev import quora
+from gpt4free import quora
token = quora.Account.create(proxy=None, logging=True)
print('token', token)
diff --git a/testing/quora_test_2.py b/testing/quora_test_2.py
index 5d06f9e..297ca7a 100644
--- a/testing/quora_test_2.py
+++ b/testing/quora_test_2.py
@@ -1,4 +1,4 @@
-from openai_rev import quora
+from gpt4free import quora
token = quora.Account.create(logging=True, enable_bot_creation=True)
diff --git a/testing/test_main.py b/testing/test_main.py
index 612ec69..bd4d03e 100644
--- a/testing/test_main.py
+++ b/testing/test_main.py
@@ -1,24 +1,28 @@
-from openai_rev import openai_rev, Provider, quora, forefront
+import gpt4free
+from gpt4free import Provider, quora, forefront
# usage You
-response = openai_rev.Completion.create(Provider.You, prompt='Write a poem on Lionel Messi')
+response = gpt4free.Completion.create(Provider.You, prompt='Write a poem on Lionel Messi')
print(response)
# usage Poe
-token = quora.Account.create(logging=False)
-response = openai_rev.Completion.create(
- Provider.Poe, prompt='Write a poem on Lionel Messi', token=token, model='ChatGPT'
-)
+# token = quora.Account.create(logging=False)
+token = 'GKzCahZYGKhp76LfE197xw=='
+response = gpt4free.Completion.create(Provider.Poe, prompt='Write a poem on Lionel Messi', token=token, model='ChatGPT')
print(response)
# usage forefront
token = forefront.Account.create(logging=False)
-response = openai_rev.Completion.create(
+response = gpt4free.Completion.create(
Provider.ForeFront, prompt='Write a poem on Lionel Messi', model='gpt-4', token=token
)
print(response)
print(f'END')
# usage theb
-response = openai_rev.Completion.create(Provider.Theb, prompt='Write a poem on Lionel Messi')
+response = gpt4free.Completion.create(Provider.Theb, prompt='Write a poem on Lionel Messi')
+print(response)
+
+# usage cocalc
+response = gpt4free.Completion.create(Provider.CoCalc, prompt='Write a poem on Lionel Messi', cookie_input='')
print(response)
diff --git a/testing/you_test.py b/testing/you_test.py
index 3480030..1e9f620 100644
--- a/testing/you_test.py
+++ b/testing/you_test.py
@@ -1,4 +1,4 @@
-from openai_rev import you
+from gpt4free import you
# simple request with links and details
response = you.Completion.create(prompt="hello world", detailed=True, include_links=True)
@@ -22,6 +22,6 @@ while True:
response = you.Completion.create(prompt=prompt, chat=chat)
- print("Bot:", response["response"])
+ print("Bot:", response.text)
- chat.append({"question": prompt, "answer": response["response"]})
+ chat.append({"question": prompt, "answer": response.text})