diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-10-21 20:52:27 +0200 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-10-21 20:52:27 +0200 |
commit | ef6ec5d4ef49ea04a8cda2946fb2fa33c2d43c29 (patch) | |
tree | 4c9e202ac4946624d629810977e2f0a08178a5da /g4f/Provider | |
parent | Updated providers (g4f/Provider/nexra/NexraChatGPT.py g4f/Provider/nexra/NexraBlackbox.py g4f/Provider/nexra/NexraBing.py g4f/Provider/nexra/NexraChatGptV2.py g4f/Provider/nexra/NexraChatGptWeb.py) (diff) | |
download | gpt4free-ef6ec5d4ef49ea04a8cda2946fb2fa33c2d43c29.tar gpt4free-ef6ec5d4ef49ea04a8cda2946fb2fa33c2d43c29.tar.gz gpt4free-ef6ec5d4ef49ea04a8cda2946fb2fa33c2d43c29.tar.bz2 gpt4free-ef6ec5d4ef49ea04a8cda2946fb2fa33c2d43c29.tar.lz gpt4free-ef6ec5d4ef49ea04a8cda2946fb2fa33c2d43c29.tar.xz gpt4free-ef6ec5d4ef49ea04a8cda2946fb2fa33c2d43c29.tar.zst gpt4free-ef6ec5d4ef49ea04a8cda2946fb2fa33c2d43c29.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/nexra/NexraGeminiPro.py | 81 |
1 files changed, 49 insertions, 32 deletions
diff --git a/g4f/Provider/nexra/NexraGeminiPro.py b/g4f/Provider/nexra/NexraGeminiPro.py index fb0b096b..2d1ce343 100644 --- a/g4f/Provider/nexra/NexraGeminiPro.py +++ b/g4f/Provider/nexra/NexraGeminiPro.py @@ -1,42 +1,41 @@ from __future__ import annotations -from aiohttp import ClientSession import json -from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin -from ..helper import format_prompt -from ...typing import AsyncResult, Messages +import requests +from ...typing import CreateResult, Messages +from ..base_provider import ProviderModelMixin, AbstractProvider +from ..helper import format_prompt -class NexraGeminiPro(AsyncGeneratorProvider, ProviderModelMixin): +class NexraGeminiPro(AbstractProvider, ProviderModelMixin): label = "Nexra Gemini PRO" url = "https://nexra.aryahcr.cc/documentation/gemini-pro/en" api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements" - working = False + working = True supports_stream = True - + default_model = 'gemini-pro' models = [default_model] @classmethod def get_model(cls, model: str) -> str: return cls.default_model - + @classmethod - async def create_async_generator( + def create_completion( cls, model: str, messages: Messages, - proxy: str = None, - stream: bool = False, + stream: bool, markdown: bool = False, **kwargs - ) -> AsyncResult: + ) -> CreateResult: model = cls.get_model(model) headers = { - "Content-Type": "application/json" + 'Content-Type': 'application/json' } - + data = { "messages": [ { @@ -44,25 +43,43 @@ class NexraGeminiPro(AsyncGeneratorProvider, ProviderModelMixin): "content": format_prompt(messages) } ], - "markdown": markdown, "stream": stream, + "markdown": markdown, "model": model } + + response = requests.post(cls.api_endpoint, headers=headers, json=data, stream=stream) + + if stream: + return cls.process_streaming_response(response) + else: + return cls.process_non_streaming_response(response) - async with ClientSession(headers=headers) as session: - async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response: - response.raise_for_status() - buffer = "" - async for chunk in response.content.iter_any(): - if chunk.strip(): # Check if chunk is not empty - buffer += chunk.decode() - while '\x1e' in buffer: - part, buffer = buffer.split('\x1e', 1) - if part.strip(): - try: - response_json = json.loads(part) - message = response_json.get("message", "") - if message: - yield message - except json.JSONDecodeError as e: - print(f"JSONDecodeError: {e}") + @classmethod + def process_non_streaming_response(cls, response): + if response.status_code == 200: + try: + content = response.text.lstrip('`') + data = json.loads(content) + return data.get('message', '') + except json.JSONDecodeError: + return "Error: Unable to decode JSON response" + else: + return f"Error: {response.status_code}" + + @classmethod + def process_streaming_response(cls, response): + full_message = "" + for line in response.iter_lines(decode_unicode=True): + if line: + try: + line = line.lstrip('`') + data = json.loads(line) + if data.get('finish'): + break + message = data.get('message', '') + if message: + yield message[len(full_message):] + full_message = message + except json.JSONDecodeError: + pass |