diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2024-09-29 22:41:08 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-09-29 22:41:08 +0200 |
commit | 0deb0f60dd4985017d3fcb946e108be8d1f63846 (patch) | |
tree | e926d6f5551b4eb069e35b41479275056999e6c9 /g4f/Provider/nexra/NexraChatGPTWeb.py | |
parent | Added gpt-4o provider (diff) | |
parent | feat(g4f/Provider/Nexra.py): enhance model handling and add new providers (diff) | |
download | gpt4free-0deb0f60dd4985017d3fcb946e108be8d1f63846.tar gpt4free-0deb0f60dd4985017d3fcb946e108be8d1f63846.tar.gz gpt4free-0deb0f60dd4985017d3fcb946e108be8d1f63846.tar.bz2 gpt4free-0deb0f60dd4985017d3fcb946e108be8d1f63846.tar.lz gpt4free-0deb0f60dd4985017d3fcb946e108be8d1f63846.tar.xz gpt4free-0deb0f60dd4985017d3fcb946e108be8d1f63846.tar.zst gpt4free-0deb0f60dd4985017d3fcb946e108be8d1f63846.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/nexra/NexraChatGPTWeb.py | 53 |
1 files changed, 53 insertions, 0 deletions
diff --git a/g4f/Provider/nexra/NexraChatGPTWeb.py b/g4f/Provider/nexra/NexraChatGPTWeb.py new file mode 100644 index 00000000..e7738665 --- /dev/null +++ b/g4f/Provider/nexra/NexraChatGPTWeb.py @@ -0,0 +1,53 @@ +from __future__ import annotations +from aiohttp import ClientSession +from ...typing import AsyncResult, Messages +from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ..helper import format_prompt +import json + +class NexraChatGPTWeb(AsyncGeneratorProvider, ProviderModelMixin): + label = "Nexra ChatGPT Web" + api_endpoint = "https://nexra.aryahcr.cc/api/chat/gptweb" + models = ['gptweb'] + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + proxy: str = None, + **kwargs + ) -> AsyncResult: + headers = { + "Content-Type": "application/json", + } + + async with ClientSession(headers=headers) as session: + prompt = format_prompt(messages) + if prompt is None: + raise ValueError("Prompt cannot be None") + + data = { + "prompt": prompt, + "markdown": False + } + + async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response: + response.raise_for_status() + + full_response = "" + async for chunk in response.content: + if chunk: + result = chunk.decode("utf-8").strip() + + try: + json_data = json.loads(result) + + if json_data.get("status"): + full_response = json_data.get("gpt", "") + else: + full_response = f"Error: {json_data.get('message', 'Unknown error')}" + except json.JSONDecodeError: + full_response = "Error: Invalid JSON response." + + yield full_response.strip() |