diff options
author | t.me/xtekky <98614666+xtekky@users.noreply.github.com> | 2023-04-22 14:54:01 +0200 |
---|---|---|
committer | t.me/xtekky <98614666+xtekky@users.noreply.github.com> | 2023-04-22 14:54:01 +0200 |
commit | 789b209d03ba45d4dbe9af924f5806abda371cd9 (patch) | |
tree | 2518e63234b6a437745070d90810098a96fca3e4 /README.md | |
parent | Merge branch 'main' of https://github.com/xtekky/gpt4free (diff) | |
download | gpt4free-789b209d03ba45d4dbe9af924f5806abda371cd9.tar gpt4free-789b209d03ba45d4dbe9af924f5806abda371cd9.tar.gz gpt4free-789b209d03ba45d4dbe9af924f5806abda371cd9.tar.bz2 gpt4free-789b209d03ba45d4dbe9af924f5806abda371cd9.tar.lz gpt4free-789b209d03ba45d4dbe9af924f5806abda371cd9.tar.xz gpt4free-789b209d03ba45d4dbe9af924f5806abda371cd9.tar.zst gpt4free-789b209d03ba45d4dbe9af924f5806abda371cd9.zip |
Diffstat (limited to 'README.md')
-rw-r--r-- | README.md | 291 |
1 files changed, 11 insertions, 280 deletions
@@ -43,292 +43,23 @@ These sites will be reverse engineered but need account access: ## Usage Examples <a name="usage-examples"></a> -### Example: `quora (poe)` (use like openai pypi package) - GPT-4 <a name="example-poe"></a> - -```python -# quora model names: (use left key as argument) -models = { - 'sage' : 'capybara', - 'gpt-4' : 'beaver', - 'claude-v1.2' : 'a2_2', - 'claude-instant-v1.0' : 'a2', - 'gpt-3.5-turbo' : 'chinchilla' -} -``` - -#### !! new: bot creation - -```python -# import quora (poe) package -import quora - -# create account -# make shure to set enable_bot_creation to True -token = quora.Account.create(logging = True, enable_bot_creation=True) - -model = quora.Model.create( - token = token, - model = 'gpt-3.5-turbo', # or claude-instant-v1.0 - system_prompt = 'you are ChatGPT a large language model ...' -) - -print(model.name) # gptx.... - -# streaming response -for response in quora.StreamingCompletion.create( - custom_model = model.name, - prompt ='hello world', - token = token): - - print(response.completion.choices[0].text) -``` - -#### Normal Response: -```python - -response = quora.Completion.create(model = 'gpt-4', - prompt = 'hello world', - token = token) - -print(response.completion.choices[0].text) -``` - -### Example: `phind` (use like openai pypi package) <a name="example-phind"></a> - -```python -import phind - -# set cf_clearance cookie -phind.cf_clearance = 'xx.xx-1682166681-0-160' - -prompt = 'who won the quatar world cup' - -# help needed: not getting newlines from the stream, please submit a PR if you know how to fix this -# stream completion -for result in phind.StreamingCompletion.create( - model = 'gpt-4', - prompt = prompt, - results = phind.Search.create(prompt, actualSearch = True), # create search (set actualSearch to False to disable internet) - creative = False, - detailed = False, - codeContext = ''): # up to 3000 chars of code - - print(result.completion.choices[0].text, end='', flush=True) - -# normal completion -result = phind.Completion.create( - model = 'gpt-4', - prompt = prompt, - results = phind.Search.create(prompt, actualSearch = True), # create search (set actualSearch to False to disable internet) - creative = False, - detailed = False, - codeContext = '') # up to 3000 chars of code - -print(result.completion.choices[0].text) -``` - -### Example: `t3nsor` (use like openai pypi package) <a name="example-t3nsor"></a> - -```python -# Import t3nsor -import t3nsor - -# t3nsor.Completion.create -# t3nsor.StreamCompletion.create - -[...] - -``` - -#### Example Chatbot -```python -messages = [] - -while True: - user = input('you: ') - - t3nsor_cmpl = t3nsor.Completion.create( - prompt = user, - messages = messages - ) - - print('gpt:', t3nsor_cmpl.completion.choices[0].text) - - messages.extend([ - {'role': 'user', 'content': user }, - {'role': 'assistant', 'content': t3nsor_cmpl.completion.choices[0].text} - ]) -``` - -#### Streaming Response: - -```python -for response in t3nsor.StreamCompletion.create( - prompt = 'write python code to reverse a string', - messages = []): - - print(response.completion.choices[0].text) -``` - -### Example: `ora` (use like openai pypi package) <a name="example-ora"></a> - -### load model (new) - -more gpt4 models in `/testing/ora_gpt4.py` - -```python -# normal gpt-4: b8b12eaa-5d47-44d3-92a6-4d706f2bcacf -model = ora.CompletionModel.load(chatbot_id, 'gpt-4') # or gpt-3.5 -``` - -#### create model / chatbot: -```python -# inport ora -import ora - -# create model -model = ora.CompletionModel.create( - system_prompt = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible', - description = 'ChatGPT Openai Language Model', - name = 'gpt-3.5') - -# init conversation (will give you a conversationId) -init = ora.Completion.create( - model = model, - prompt = 'hello world') - -print(init.completion.choices[0].text) - -while True: - # pass in conversationId to continue conversation - - prompt = input('>>> ') - response = ora.Completion.create( - model = model, - prompt = prompt, - includeHistory = True, # remember history - conversationId = init.id) - - print(response.completion.choices[0].text) -``` - -### Example: `writesonic` (use like openai pypi package) <a name="example-writesonic"></a> - -```python -# import writesonic -import writesonic - -# create account (3-4s) -account = writesonic.Account.create(logging = True) - -# with loging: - # 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s) - # 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf' - # 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...' - # 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s) - -# simple completion -response = writesonic.Completion.create( - api_key = account.key, - prompt = 'hello world' -) - -print(response.completion.choices[0].text) # Hello! How may I assist you today? - -# conversation - -response = writesonic.Completion.create( - api_key = account.key, - prompt = 'what is my name ?', - enable_memory = True, - history_data = [ - { - 'is_sent': True, - 'message': 'my name is Tekky' - }, - { - 'is_sent': False, - 'message': 'hello Tekky' - } - ] -) - -print(response.completion.choices[0].text) # Your name is Tekky. - -# enable internet - -response = writesonic.Completion.create( - api_key = account.key, - prompt = 'who won the quatar world cup ?', - enable_google_results = True -) - -print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ... -``` - -### Example: `you` (use like openai pypi package) <a name="example-you"></a> - -```python -import you - -# simple request with links and details -response = you.Completion.create( - prompt = "hello world", - detailed = True, - includelinks = True,) - -print(response) - -# { -# "response": "...", -# "links": [...], -# "extra": {...}, -# "slots": {...} -# } -# } - -#chatbot - -chat = [] - -while True: - prompt = input("You: ") - - response = you.Completion.create( - prompt = prompt, - chat = chat) - - print("Bot:", response["response"]) - - chat.append({"question": prompt, "answer": response["response"]}) -``` +| NOTE: the instructions of each module has been moved in the folder of the module itsself| +| --- | ## Dependencies -The repository is written in Python and requires the following packages: - * websocket-client * requests * tls-client - -You can install these packages using the provided `requirements.txt` file. - -## Repository structure: - . - ├── ora/ - ├── quora/ (/poe) - ├── t3nsor/ - ├── testing/ - ├── writesonic/ - ├── you/ - ├── README.md <-- this file. - └── requirements.txt - - -## Star History - -[![Star History Chart](https://api.star-history.com/svg?repos=xtekky/openai-gpt4&type=Timeline)](https://star-history.com/#xtekky/openai-gpt4&Timeline) - +* pypasser +* names +* colorama +* curl_cffi + +install with: +```sh +pip3 install -r requirements.txt +``` ## Copyright: This program is licensed under the [GNU GPL v3](https://www.gnu.org/licenses/gpl-3.0.txt) |