diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-09-29 22:38:25 +0200 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-09-29 22:38:25 +0200 |
commit | 58db9e03f0a25613cf90cf8184ebf159123e7477 (patch) | |
tree | e926d6f5551b4eb069e35b41479275056999e6c9 /g4f/Provider/nexra/NexraChatGPT.py | |
parent | docs/providers-and-models.md (diff) | |
download | gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.gz gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.bz2 gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.lz gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.xz gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.zst gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.zip |
Diffstat (limited to 'g4f/Provider/nexra/NexraChatGPT.py')
-rw-r--r-- | g4f/Provider/nexra/NexraChatGPT.py | 66 |
1 files changed, 66 insertions, 0 deletions
diff --git a/g4f/Provider/nexra/NexraChatGPT.py b/g4f/Provider/nexra/NexraChatGPT.py new file mode 100644 index 00000000..8ed83f98 --- /dev/null +++ b/g4f/Provider/nexra/NexraChatGPT.py @@ -0,0 +1,66 @@ +from __future__ import annotations +from aiohttp import ClientSession +from ...typing import AsyncResult, Messages +from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ..helper import format_prompt +import json + +class NexraChatGPT(AsyncGeneratorProvider, ProviderModelMixin): + label = "Nexra ChatGPT" + api_endpoint = "https://nexra.aryahcr.cc/api/chat/gpt" + + models = [ + 'gpt-4', 'gpt-4-0613', 'gpt-4-32k', 'gpt-4-0314', 'gpt-4-32k-0314', + 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0613', + 'gpt-3.5-turbo-16k-0613', 'gpt-3.5-turbo-0301', + 'gpt-3', 'text-davinci-003', 'text-davinci-002', 'code-davinci-002', + 'text-curie-001', 'text-babbage-001', 'text-ada-001', + 'davinci', 'curie', 'babbage', 'ada', 'babbage-002', 'davinci-002', + ] + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + proxy: str = None, + **kwargs + ) -> AsyncResult: + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + "Referer": f"{cls.url}/chat", + } + + async with ClientSession(headers=headers) as session: + prompt = format_prompt(messages) + data = { + "prompt": prompt, + "model": model, + "markdown": False, + "messages": messages or [], + } + + async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response: + response.raise_for_status() + + content_type = response.headers.get('Content-Type', '') + if 'application/json' in content_type: + result = await response.json() + if result.get("status"): + yield result.get("gpt", "") + else: + raise Exception(f"Error in response: {result.get('message', 'Unknown error')}") + elif 'text/plain' in content_type: + text = await response.text() + try: + result = json.loads(text) + if result.get("status"): + yield result.get("gpt", "") + else: + raise Exception(f"Error in response: {result.get('message', 'Unknown error')}") + except json.JSONDecodeError: + yield text # If not JSON, return text + else: + raise Exception(f"Unexpected response type: {content_type}. Response text: {await response.text()}") + |