summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/nexra/NexraChatGPT.py
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-10-11 08:33:30 +0200
committerkqlio67 <kqlio67@users.noreply.github.com>2024-10-11 08:33:30 +0200
commita9bc67362f2be529fe9165ebb13347195ba1ddcf (patch)
tree1a91836eaa94f14c18ad5d55f687ff8a2118c357 /g4f/Provider/nexra/NexraChatGPT.py
parentfeat(g4f/Provider/__init__.py): add new providers and update imports (diff)
downloadgpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar
gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.gz
gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.bz2
gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.lz
gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.xz
gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.zst
gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/nexra/NexraChatGPT.py97
1 files changed, 60 insertions, 37 deletions
diff --git a/g4f/Provider/nexra/NexraChatGPT.py b/g4f/Provider/nexra/NexraChatGPT.py
index 8ed83f98..f9f49139 100644
--- a/g4f/Provider/nexra/NexraChatGPT.py
+++ b/g4f/Provider/nexra/NexraChatGPT.py
@@ -1,22 +1,60 @@
from __future__ import annotations
+
from aiohttp import ClientSession
+import json
+
from ...typing import AsyncResult, Messages
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..helper import format_prompt
-import json
+
class NexraChatGPT(AsyncGeneratorProvider, ProviderModelMixin):
label = "Nexra ChatGPT"
+ url = "https://nexra.aryahcr.cc/documentation/chatgpt/en"
api_endpoint = "https://nexra.aryahcr.cc/api/chat/gpt"
+ working = True
+ supports_gpt_35_turbo = True
+ supports_gpt_4 = True
+ supports_stream = False
+
+ default_model = 'gpt-3.5-turbo'
+ models = ['gpt-4', 'gpt-4-0613', 'gpt-4-0314', 'gpt-4-32k-0314', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-16k-0613', 'gpt-3.5-turbo-0301', 'text-davinci-003', 'text-davinci-002', 'code-davinci-002', 'gpt-3', 'text-curie-001', 'text-babbage-001', 'text-ada-001', 'davinci', 'curie', 'babbage', 'ada', 'babbage-002', 'davinci-002']
+
+ model_aliases = {
+ "gpt-4": "gpt-4-0613",
+ "gpt-4": "gpt-4-32k",
+ "gpt-4": "gpt-4-0314",
+ "gpt-4": "gpt-4-32k-0314",
+
+ "gpt-3.5-turbo": "gpt-3.5-turbo-16k",
+ "gpt-3.5-turbo": "gpt-3.5-turbo-0613",
+ "gpt-3.5-turbo": "gpt-3.5-turbo-16k-0613",
+ "gpt-3.5-turbo": "gpt-3.5-turbo-0301",
+
+ "gpt-3": "text-davinci-003",
+ "gpt-3": "text-davinci-002",
+ "gpt-3": "code-davinci-002",
+ "gpt-3": "text-curie-001",
+ "gpt-3": "text-babbage-001",
+ "gpt-3": "text-ada-001",
+ "gpt-3": "text-ada-001",
+ "gpt-3": "davinci",
+ "gpt-3": "curie",
+ "gpt-3": "babbage",
+ "gpt-3": "ada",
+ "gpt-3": "babbage-002",
+ "gpt-3": "davinci-002",
+ }
- models = [
- 'gpt-4', 'gpt-4-0613', 'gpt-4-32k', 'gpt-4-0314', 'gpt-4-32k-0314',
- 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0613',
- 'gpt-3.5-turbo-16k-0613', 'gpt-3.5-turbo-0301',
- 'gpt-3', 'text-davinci-003', 'text-davinci-002', 'code-davinci-002',
- 'text-curie-001', 'text-babbage-001', 'text-ada-001',
- 'davinci', 'curie', 'babbage', 'ada', 'babbage-002', 'davinci-002',
- ]
+
+ @classmethod
+ def get_model(cls, model: str) -> str:
+ if model in cls.models:
+ return model
+ elif model in cls.model_aliases:
+ return cls.model_aliases[model]
+ else:
+ return cls.default_model
@classmethod
async def create_async_generator(
@@ -26,41 +64,26 @@ class NexraChatGPT(AsyncGeneratorProvider, ProviderModelMixin):
proxy: str = None,
**kwargs
) -> AsyncResult:
+ model = cls.get_model(model)
+
headers = {
- "Accept": "application/json",
- "Content-Type": "application/json",
- "Referer": f"{cls.url}/chat",
+ "Content-Type": "application/json"
}
-
async with ClientSession(headers=headers) as session:
prompt = format_prompt(messages)
data = {
+ "messages": messages,
"prompt": prompt,
"model": model,
- "markdown": False,
- "messages": messages or [],
+ "markdown": False
}
-
async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
response.raise_for_status()
-
- content_type = response.headers.get('Content-Type', '')
- if 'application/json' in content_type:
- result = await response.json()
- if result.get("status"):
- yield result.get("gpt", "")
- else:
- raise Exception(f"Error in response: {result.get('message', 'Unknown error')}")
- elif 'text/plain' in content_type:
- text = await response.text()
- try:
- result = json.loads(text)
- if result.get("status"):
- yield result.get("gpt", "")
- else:
- raise Exception(f"Error in response: {result.get('message', 'Unknown error')}")
- except json.JSONDecodeError:
- yield text # If not JSON, return text
- else:
- raise Exception(f"Unexpected response type: {content_type}. Response text: {await response.text()}")
-
+ response_text = await response.text()
+ try:
+ if response_text.startswith('_'):
+ response_text = response_text[1:]
+ response_data = json.loads(response_text)
+ yield response_data.get('gpt', '')
+ except json.JSONDecodeError:
+ yield ''