diff options
-rw-r--r-- | g4f/Provider/Binjie.py | 65 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 3 | ||||
-rw-r--r-- | g4f/models.py | 4 |
3 files changed, 70 insertions, 2 deletions
diff --git a/g4f/Provider/Binjie.py b/g4f/Provider/Binjie.py new file mode 100644 index 00000000..90f9ec3c --- /dev/null +++ b/g4f/Provider/Binjie.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import random +from ..requests import StreamSession + +from ..typing import AsyncResult, Messages +from .base_provider import AsyncGeneratorProvider, format_prompt + + +class Binjie(AsyncGeneratorProvider): + url = "https://chat18.aichatos8.com" + working = True + supports_gpt_4 = True + supports_stream = True + supports_system_message = True + supports_message_history = True + + @staticmethod + async def create_async_generator( + model: str, + messages: Messages, + proxy: str = None, + timeout: int = 120, + **kwargs, + ) -> AsyncResult: + async with StreamSession( + headers=_create_header(), proxies={"https": proxy}, timeout=timeout + ) as session: + payload = _create_payload(messages, **kwargs) + async with session.post("https://api.binjie.fun/api/generateStream", json=payload) as response: + response.raise_for_status() + async for chunk in response.iter_content(): + if chunk: + chunk = chunk.decode() + if "sorry, 您的ip已由于触发防滥用检测而被封禁" in chunk: + raise RuntimeError("IP address is blocked by abuse detection.") + yield chunk + + +def _create_header(): + return { + "accept" : "application/json, text/plain, */*", + "content-type" : "application/json", + "origin" : "https://chat18.aichatos8.com", + "referer" : "https://chat18.aichatos8.com/" + } + + +def _create_payload( + messages: Messages, + system_message: str = "", + user_id: int = None, + **kwargs +): + if not user_id: + user_id = random.randint(1690000544336, 2093025544336) + return { + "prompt": format_prompt(messages), + "network": True, + "system": system_message, + "withoutContext": False, + "stream": True, + "userId": f"#/chat/{user_id}" + } + diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 1aa94d07..8fe8cfad 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -16,13 +16,14 @@ from .AiChats import AiChats from .Aura import Aura from .Bing import Bing from .BingCreateImages import BingCreateImages +from .Binjie import Binjie from .Bixin123 import Bixin123 from .Blackbox import Blackbox from .ChatGot import ChatGot from .Chatgpt4Online import Chatgpt4Online from .Chatgpt4o import Chatgpt4o from .ChatgptFree import ChatgptFree -from .CodeNews import CodeNews +from .CodeNews import CodeNews from .DDG import DDG from .DeepInfra import DeepInfra from .DeepInfraImage import DeepInfraImage diff --git a/g4f/models.py b/g4f/models.py index fe9dee78..61350d91 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -7,6 +7,7 @@ from .Provider import ( AiChatOnline, Allyfy, Bing, + Binjie, Bixin123, Blackbox, ChatGot, @@ -84,6 +85,7 @@ default = Model( Upstage, Blackbox, Bixin123, + Binjie, ]) ) @@ -139,7 +141,7 @@ gpt_4 = Model( name = 'gpt-4', base_provider = 'OpenAI', best_provider = IterListProvider([ - Chatgpt4Online, Nexra, Bing, + Chatgpt4Online, Nexra, Binjie, Bing, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider ]) ) |