1
0
Fork 0

discontinue ora.sh api

This commit is contained in:
t.me/xtekky 2023-04-25 18:40:17 +01:00
parent c7f00a9dac
commit e341c75e5e
15 changed files with 2 additions and 371 deletions

View file

@ -23,7 +23,6 @@ By the way, thank you so much for `2k` stars and all the support!!
- [`quora (poe)`](./quora/README.md)
- [`phind`](./phind/README.md)
- [`t3nsor`](./t3nsor/README.md)
- [`ora`](./ora/README.md)
- [`writesonic`](./writesonic/README.md)
- [`you`](./you/README.md)
- [`sqlchat`](./sqlchat/README.md)
@ -44,7 +43,6 @@ By the way, thank you so much for `2k` stars and all the support!!
| Website | Model(s) |
| ---------------------------------------------------- | ------------------------------- |
| [ora.sh](https://ora.sh) | GPT-3.5 / 4 |
| [poe.com](https://poe.com) | GPT-4/3.5 |
| [writesonic.com](https://writesonic.com) | GPT-3.5 / Internet |
| [t3nsor.com](https://t3nsor.com) | GPT-3.5 |
@ -64,8 +62,7 @@ By the way, thank you so much for `2k` stars and all the support!!
- why not `ora` anymore ? gpt-4 requires login + limited
#### gpt-3.5
- [`/ora`](./ora/README.md)
- only stable api at the moment ( for gpt-3.5, gpt-4 is dead)
- looking for a stable api at the moment
## Install <a name="install"></a>
download or clone this GitHub repo

View file

@ -1,49 +0,0 @@
### Example: `ora` (use like openai pypi package) <a name="example-ora"></a>
### load model (new)
more gpt4 models in `/testing/ora_gpt4.py`
find the userid by visiting https://ora.sh/api/auth/session ( must be logged in on the site )
and session_token in the cookies, it should be: __Secure-next-auth.session-token
```python
# if using CompletionModel.load set these
ora.user_id = '...'
ora.session_token = '...'
# normal gpt-4: b8b12eaa-5d47-44d3-92a6-4d706f2bcacf
model = ora.CompletionModel.load(chatbot_id, 'gpt-4') # or gpt-3.5
```
#### create model / chatbot:
```python
# import ora
import ora
# create model
model = ora.CompletionModel.create(
system_prompt = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
description = 'ChatGPT Openai Language Model',
name = 'gpt-3.5')
# init conversation (will give you a conversationId)
init = ora.Completion.create(
model = model,
prompt = 'hello world')
print(init.completion.choices[0].text)
while True:
# pass in conversationId to continue conversation
prompt = input('>>> ')
response = ora.Completion.create(
model = model,
prompt = prompt,
includeHistory = True, # remember history
conversationId = init.id)
print(response.completion.choices[0].text)
```

View file

@ -1,62 +0,0 @@
from ora.model import CompletionModel
from ora.typing import OraResponse
from requests import post
from time import time
from random import randint
from ora._jwt import do_jwt
user_id = None
session_token = None
class Completion:
def create(
model : CompletionModel,
prompt: str,
includeHistory: bool = True,
conversationId: str or None = None) -> OraResponse:
extra = {
'conversationId': conversationId} if conversationId else {}
cookies = {
"cookie" : f"__Secure-next-auth.session-token={session_token}"} if session_token else {}
json_data = extra | {
'chatbotId': model.id,
'input' : prompt,
'userId' : user_id if user_id else model.createdBy,
'model' : model.modelName,
'provider' : 'OPEN_AI',
'includeHistory': includeHistory}
response = post('https://ora.sh/api/conversation',
headers = cookies | {
"host" : "ora.sh",
"authorization" : f"Bearer AY0{randint(1111, 9999)}",
"user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
"origin" : "https://ora.sh",
"referer" : "https://ora.sh/chat/",
"x-signed-token": do_jwt(json_data)
},
json = json_data).json()
if response.get('error'):
raise Exception('''set ora.user_id and ora.session_token\napi response: %s''' % response['error'])
return OraResponse({
'id' : response['conversationId'],
'object' : 'text_completion',
'created': int(time()),
'model' : model.slug,
'choices': [{
'text' : response['response'],
'index' : 0,
'logprobs' : None,
'finish_reason' : 'stop'
}],
'usage': {
'prompt_tokens' : len(prompt),
'completion_tokens' : len(response['response']),
'total_tokens' : len(prompt) + len(response['response'])
}
})

View file

@ -1,75 +0,0 @@
import jwt
from datetime import datetime, timedelta
# from cryptography.hazmat.primitives import serialization
# from cryptography.hazmat.primitives.serialization import load_pem_private_key
# from cryptography.hazmat.backends import default_backend
def do_jwt(json_data: dict):
private_key = b'''-----BEGIN RSA PRIVATE KEY-----
MIIJKAIBAAKCAgEAxv9TLZP2TnsR512LqzT52N6Z9ixKmUA11jy0IXH0dEbdbfBw
eeWrXoTuIYcY8Dkg/+q33ppfujYfb0z22bs/CZ63+jBL2UmxG/0XIzmsQlHSgJd/
rnbERwIt7/ZjOHAcNrAzI0N11AI8AT0+M3XFOGRoIKzoc3Juxl7eyyPPEkNZMkEv
lYfDN5AMD/+4pZ+7SCEzUCyGtBejW2P+NwTvjBxhLjIoG+m7yh81RoIBnO+Z1o5X
ZtospuWZe1L6GNh+zezeHIyBGYgGgYbPboQ8QeHhoh+n0PuZB0GQqorqfxHjB38t
yB4qsRGi10UNcohvFhglZk8kdMYBTd0M5ik5t4sx/ujjF57gX7dCKipHimDy7McY
ElVLTDoSkwD/Lg3tV0utky42dL/iIMePlHfMrw/m2oAm33/dCaiAW8grNkJPjcwo
Y8pnqpFGgAZX+6WalQCfoSStV4kYYlaq11DB6dZjDYoKLRIyH7MCAmMxms9569qe
5gFuyQWTZgXlKoj2Zd7XIaIs5s/A6PFt7sxk8mOY/DspSbygZZCnMH3+or/8trH2
p0fGEkqpzMKAY6TYtdYhOyTbup3VOKQwhk8b5CPuEWZutE6pT0O2O81MkuEEl/Zw
/M1MJERTIjGAThsL0yvEn1Gi5HXl7s/5E61Yvc0ItORqio70PZcToRII27ECAwEA
AQKCAgEAle0H3e78Q2S1uHriH7tqAdq0ZKQ6D/wwk5honkocwv4hFhNwqmY/FpdQ
UjJWt6ZTFnzgyvXD6aedR13VHXXVqInMUtLQUoUSyuOD6yYogk7jKb76k5cnidg6
g/A+EOdmWk2mOYs52uFUFBrwIhU44aPET9n1yAUPMKWJdcMk372eFh7GmwIOMm50
qBkiJKaTk2RwJJdnZYfpq5FKlmlBkW5QSV3AmkcfFMkuelC4pmReoyfa8cKuoY+a
cy+w/ccewkcTkK7LFVFGlY/b+IfoXjqwpFT1Op5UTQM420SOJ+5x/dPzyjHwODfx
V/7OgtwH1b2bb9lwvgnwMZm5fi7RLAOC5BaSrZUb8WtVaaKURzXgdE+5LO/xXYCy
JECbRQ5o4H4CwOc3mvJZL0O/dwPKoTccjELc8HOcogdy+hrJPXFl+oXy3yKUmf5L
Lx13hh/kO4960TcGVQsUPV9oYB8XU5iYC1cMdlMVZAOwoLE1h/Tro0blisq6eafx
+4ZS+COJEM+A7UgFacxdQ9C4bL5ZgjgLxMEsCIjwBN1i/bMEKpj46ulH23I57F1S
jr6/UtMPO73c2bGcxdzRRQSI/LW5Qnb4USQsOIjYDVReLM9hDvI4OyQ2pfcgXlTL
ODky2qivbP6WA4GKCBhaDEaeKFNDiyCqx9ObftCbRk1fWu7IP4ECggEBAOnPs88o
DQLEaColCbh3ziogoANYMKiqaJUacnH5S5e1/aW3jgVK85NsMJT9hsODXyHup/CF
RT3jeJA5cRj+04KI33cH2F5X9MhPB0a2Zo0Io813l95d2Wuk9rnadNCr8+h3b/nM
HR4X+n7l0x6Y8sn60pxesYXKu8NFccUCVcGUvrrL2gsPLPB//3eqgfZuf8BCDzOB
liO8Pzt0ELjxwxUWB9kPKLNZwVa0hq4snJThZQBrlMQcuH8BmitS5vZDVwiRLGVR
L5z+tPJMz5wJ/dGbjyMMONCZgiXypqb1qHIGt8FEPLryQ6u+04ZszxW9QTsWqMqi
ZvoFo0VPGkXGIWcCggEBANnh1tTCfGJSrwK1fWVhBajtn03iE5DuIkPUmL8juBq6
LSYG8tuk+zt0RVNYLYrM2nSzU78IsuR/15XtpheDh3Fy1ZdsAe/boccdZUrLtH9h
hRcAYUfY+E0E4U6j7FVTQCy9eNGnWJ/su2N0GDJll2BQWi8bcnL8dZqsq8pZzAjo
7jBlOEe2xOVbCsBLfCW7tmeKCv4cc8digITGemig4NgCs6W03gJPnvnvvHMnuF3u
8YjD9kWWEpQr37pT6QSdhwzKMAOeHbhh/CQO/sl+fBLbcYikQa0HIuvj+29w0/jv
whVfsJxCvs6fCTMYjQE7GdTcGmCbvs+x7TrXuqeT8ycCggEAWr4Un/KAUjGd57Vm
N2Sv6+OrloC0qdExM6UHA7roHqIwJg++G8nCDNYxaLGYiurCki3Ime1vORy+XuMc
RMIpnoC2kcDGtZ7XTqJ1RXlnBZdz0zt2AoRT7JYid3EUYyRJTlCEceNI7bQKsRNL
Q5XCrKce9DdAGJfdFWUvSXGljLLI70BMiHxESbazlGLle5nZFOnOcoP5nDbkJ5Pd
JZoWx2k8dH6QokLUaW041AJWZuWvSGF4ZEBtTkV16xiKsMrjzVxiaZP/saOc4Gj1
Li8mhiIkhEqrBjJ9s3KgQS4YSODYkjaEh12c69vsxkAWgu5nkaIysiojYyeq/Sw9
GxVRQwKCAQAeYvTHL2iRfd6SjiUy4lkbuighgIoiCFQXCatT3PNsJtLtHsL4BwZS
wGB6wy120iMVa30eg2QPohS7AC3N0bYuCEnpmFKc1RC26E6cI9TEfyFEl/T5RDU8
6JVTlmD7dWTZ2ILlGmWtyCJKOIK3ZJu7/vjU4QsRJkxwiexbiDKAe5vcfAFhXwgO
xKe3Mc/ao1dJEWN/FRDAmeg6nEOuG+G/voC3d4YO5HPTf6/Uj5GS6CQfYtUR12A3
8fZ90f4Jer6+9ePEXWTftiqoDL9T8qPzLU+kMuRF8VzZcS472Ix3h1iWCoZjBJv/
zQZHbgEcTtXHbfrvxkjSRopDTprljCi5AoIBAGc6M8/FH1pLgxOgS6oEGJAtErxv
EnmELzKvfwBryphx8f0S5sHoiqli+5dqFtw5h5yy/pXrNzLi0LfpmFzxbChfO8ai
omC/oqxU0FKqY2msFYdnfwM3PZeZ3c7LALLhWG56/fIYMtV78+cfqkRPM8nRJXaF
Aza2YTTZGfh3x10KnSLWUmhIWUEj8VzCNW7SR0Ecqa+ordAYio4wBsq7sO3sCw8G
Oi0/98ondhGJWL3M6FDGai8dXewt+8o0dlq95mHkNNopCWbPI71pM7u4ABPL50Yd
spd4eADxTm2m0GR7bhVEIbYfc0aAzIoWDpVs4V3vmx+bdRbppFxV1aS/r0g=
-----END RSA PRIVATE KEY-----'''
header = {
'alg': 'RS256',
'typ': 'JWT',
'kid': '1c8a5da7-527e-4bee-aa8d-aabda16c59ce'
}
payload = {
**json_data,
'iat': int(datetime.now().timestamp()),
'exp': int((datetime.now() + timedelta(minutes=10)).timestamp()),
'iss': 'https://rick.roll'
}
return jwt.encode(payload, private_key, algorithm='RS256', headers=header)

View file

@ -1,57 +0,0 @@
from uuid import uuid4
from requests import post
class CompletionModel:
system_prompt = None
description = None
createdBy = None
createdAt = None
slug = None
id = None
modelName = None
model = 'gpt-3.5-turbo'
def create(
system_prompt: str = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
description : str = 'ChatGPT Openai Language Model',
name : str = 'gpt-3.5'):
CompletionModel.system_prompt = system_prompt
CompletionModel.description = description
CompletionModel.slug = name
json_data = {
'prompt' : system_prompt,
'userId' : f'auto:{uuid4()}',
'name' : name,
'description': description}
headers = {
'Origin' : 'https://ora.sh',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.4 Safari/605.1.15',
'Referer' : 'https://ora.sh/',
'Host' : 'ora.sh',
}
response = post('https://ora.sh/api/assistant', headers = headers, json = json_data)
print(response.json())
CompletionModel.id = response.json()['id']
CompletionModel.createdBy = response.json()['createdBy']
CompletionModel.createdAt = response.json()['createdAt']
return CompletionModel
def load(chatbotId: str, modelName: str = 'gpt-3.5-turbo', userId: str = None):
if userId is None: userId = f'{uuid4()}'
CompletionModel.system_prompt = None
CompletionModel.description = None
CompletionModel.slug = None
CompletionModel.id = chatbotId
CompletionModel.createdBy = userId
CompletionModel.createdAt = None
CompletionModel.modelName = modelName
return CompletionModel

View file

@ -1,39 +0,0 @@
class OraResponse:
class Completion:
class Choices:
def __init__(self, choice: dict) -> None:
self.text = choice['text']
self.content = self.text.encode()
self.index = choice['index']
self.logprobs = choice['logprobs']
self.finish_reason = choice['finish_reason']
def __repr__(self) -> str:
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
def __init__(self, choices: dict) -> None:
self.choices = [self.Choices(choice) for choice in choices]
class Usage:
def __init__(self, usage_dict: dict) -> None:
self.prompt_tokens = usage_dict['prompt_tokens']
self.completion_tokens = usage_dict['completion_tokens']
self.total_tokens = usage_dict['total_tokens']
def __repr__(self):
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
def __init__(self, response_dict: dict) -> None:
self.response_dict = response_dict
self.id = response_dict['id']
self.object = response_dict['object']
self.created = response_dict['created']
self.model = response_dict['model']
self.completion = self.Completion(response_dict['choices'])
self.usage = self.Usage(response_dict['usage'])
def json(self) -> dict:
return self.response_dict

View file

@ -1,15 +0,0 @@
import ora
# create model
model = ora.CompletionModel.create(
system_prompt = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
description = 'ChatGPT Openai Language Model',
name = 'gpt-3.5')
# init conversation (will give you a conversationId)
init = ora.Completion.create(
model = model,
prompt = 'hello world')
print(init.completion.choices[0].text)

View file

@ -1,45 +0,0 @@
import ora
ora.user_id = '...'
ora.session_token = '...'
gpt4_chatbot_ids = ['b8b12eaa-5d47-44d3-92a6-4d706f2bcacf', 'fbe53266-673c-4b70-9d2d-d247785ccd91', 'bd5781cf-727a-45e9-80fd-a3cfce1350c6', '993a0102-d397-47f6-98c3-2587f2c9ec3a', 'ae5c524e-d025-478b-ad46-8843a5745261', 'cc510743-e4ab-485e-9191-76960ecb6040', 'a5cd2481-8e24-4938-aa25-8e26d6233390', '6bca5930-2aa1-4bf4-96a7-bea4d32dcdac', '884a5f2b-47a2-47a5-9e0f-851bbe76b57c', 'd5f3c491-0e74-4ef7-bdca-b7d27c59e6b3', 'd72e83f6-ef4e-4702-844f-cf4bd432eef7', '6e80b170-11ed-4f1a-b992-fd04d7a9e78c', '8ef52d68-1b01-466f-bfbf-f25c13ff4a72', 'd0674e11-f22e-406b-98bc-c1ba8564f749', 'a051381d-6530-463f-be68-020afddf6a8f', '99c0afa1-9e32-4566-8909-f4ef9ac06226', '1be65282-9c59-4a96-99f8-d225059d9001', 'dba16bd8-5785-4248-a8e9-b5d1ecbfdd60', '1731450d-3226-42d0-b41c-4129fe009524', '8e74635d-000e-4819-ab2c-4e986b7a0f48', 'afe7ed01-c1ac-4129-9c71-2ca7f3800b30', 'e374c37a-8c44-4f0e-9e9f-1ad4609f24f5']
chatbot_id = gpt4_chatbot_ids[0]
model = ora.CompletionModel.load(chatbot_id, 'gpt-4')
response = ora.Completion.create(model, 'hello')
print(response.completion.choices[0].text)
conversation_id = response.id
while True:
# pass in conversationId to continue conversation
prompt = input('>>> ')
response = ora.Completion.create(
model = model,
prompt = prompt,
includeHistory = True, # remember history
conversationId = conversation_id)
print(response.completion.choices[0].text)
# bots :
# 1 normal
# 2 solidity contract helper
# 3 swift project helper
# 4 developer gpt
# 5 lawsuit bot for spam call
# 6 p5.js code help bot
# 8 AI professor, for controversial topics
# 9 HustleGPT, your entrepreneurial AI
# 10 midjourney prompts bot
# 11 AI philosophy professor
# 12 TypeScript and JavaScript code review bot
# 13 credit card transaction details to merchant and location bot
# 15 Chemical Compound Similarity and Purchase Tool bot
# 16 expert full-stack developer AI
# 17 Solana development bot
# 18 price guessing game bot
# 19 AI Ethicist and Philosopher

View file

@ -1,24 +0,0 @@
import ora
complex_question = '''
James is talking to two people, his father, and his friend.
Douglas asks him, "What did you do today James?"
James replies, "I went on a fishing trip."
Josh then asks, "Did you catch anything?"
James replies, "Yes, I caught a couple of nice rainbow trout. It was a lot of fun."
Josh replies, "Good job son, tell your mother we should eat them tonight, she'll be very happy."
Douglas then says, "I wish my family would eat fish tonight, my father is making pancakes."
Question: Who is James' father?
'''
# right answer is josh
model = ora.CompletionModel.load('b8b12eaa-5d47-44d3-92a6-4d706f2bcacf', 'gpt-4')
# init conversation (will give you a conversationId)
init = ora.Completion.create(
model = model,
prompt = complex_question)
print(init.completion.choices[0].text) # James' father is Josh.

0
v2.py Normal file
View file

View file

@ -4,7 +4,7 @@ import you
response = you.Completion.create(
prompt = "hello world",
detailed = True,
includelinks = True,)
includelinks = True)
print(response)