diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-01-23 02:17:02 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-01-23 02:17:02 +0100 |
commit | 2a35052687e9026d90607c7db915e7c5b4e4018e (patch) | |
tree | c28dfef872f8dda791553fe1ae46418990d1c6b1 /g4f/Provider | |
parent | Merge pull request #1501 from eltociear/patch-5 (diff) | |
parent | Fix create copilot comment (diff) | |
download | gpt4free-2a35052687e9026d90607c7db915e7c5b4e4018e.tar gpt4free-2a35052687e9026d90607c7db915e7c5b4e4018e.tar.gz gpt4free-2a35052687e9026d90607c7db915e7c5b4e4018e.tar.bz2 gpt4free-2a35052687e9026d90607c7db915e7c5b4e4018e.tar.lz gpt4free-2a35052687e9026d90607c7db915e7c5b4e4018e.tar.xz gpt4free-2a35052687e9026d90607c7db915e7c5b4e4018e.tar.zst gpt4free-2a35052687e9026d90607c7db915e7c5b4e4018e.zip |
Diffstat (limited to 'g4f/Provider')
-rw-r--r-- | g4f/Provider/PerplexityLabs.py | 97 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 5 | ||||
-rw-r--r-- | g4f/Provider/bing/upload_image.py | 36 |
3 files changed, 121 insertions, 17 deletions
diff --git a/g4f/Provider/PerplexityLabs.py b/g4f/Provider/PerplexityLabs.py new file mode 100644 index 00000000..7e76aeef --- /dev/null +++ b/g4f/Provider/PerplexityLabs.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import random +import json +from aiohttp import ClientSession, WSMsgType + +from ..typing import AsyncResult, Messages +from .base_provider import AsyncGeneratorProvider + +API_URL = "https://labs-api.perplexity.ai/socket.io/" +WS_URL = "wss://labs-api.perplexity.ai/socket.io/" +MODELS = ['pplx-7b-online', 'pplx-70b-online', 'pplx-7b-chat', 'pplx-70b-chat', 'mistral-7b-instruct', + 'codellama-34b-instruct', 'llama-2-70b-chat', 'llava-7b-chat', 'mixtral-8x7b-instruct', + 'mistral-medium', 'related'] +DEFAULT_MODEL = MODELS[1] +MODEL_MAP = { + "mistralai/Mistral-7B-Instruct-v0.1": "mistral-7b-instruct", + "meta-llama/Llama-2-70b-chat-hf": "llama-2-70b-chat", + "mistralai/Mixtral-8x7B-Instruct-v0.1": "mixtral-8x7b-instruct", +} + +class PerplexityLabs(AsyncGeneratorProvider): + url = "https://labs.perplexity.ai" + working = True + supports_gpt_35_turbo = True + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + proxy: str = None, + **kwargs + ) -> AsyncResult: + if not model: + model = DEFAULT_MODEL + elif model in MODEL_MAP: + model = MODEL_MAP[model] + elif model not in MODELS: + raise ValueError(f"Model is not supported: {model}") + headers = { + "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:121.0) Gecko/20100101 Firefox/121.0", + "Accept": "*/*", + "Accept-Language": "de,en-US;q=0.7,en;q=0.3", + "Accept-Encoding": "gzip, deflate, br", + "Origin": cls.url, + "Connection": "keep-alive", + "Referer": f"{cls.url}/", + "Sec-Fetch-Dest": "empty", + "Sec-Fetch-Mode": "cors", + "Sec-Fetch-Site": "same-site", + "TE": "trailers", + } + async with ClientSession(headers=headers) as session: + t = format(random.getrandbits(32), '08x') + async with session.get( + f"{API_URL}?EIO=4&transport=polling&t={t}", + proxy=proxy + ) as response: + text = await response.text() + + sid = json.loads(text[1:])['sid'] + post_data = '40{"jwt":"anonymous-ask-user"}' + async with session.post( + f'{API_URL}?EIO=4&transport=polling&t={t}&sid={sid}', + data=post_data, + proxy=proxy + ) as response: + assert await response.text() == 'OK' + + async with session.ws_connect(f'{WS_URL}?EIO=4&transport=websocket&sid={sid}', autoping=False) as ws: + await ws.send_str('2probe') + assert(await ws.receive_str() == '3probe') + await ws.send_str('5') + assert(await ws.receive_str()) + assert(await ws.receive_str() == '6') + message_data = { + 'version': '2.2', + 'source': 'default', + 'model': model, + 'messages': messages + } + await ws.send_str('42' + json.dumps(['perplexity_playground', message_data])) + last_message = 0 + while True: + message = await ws.receive_str() + if message == '2': + await ws.send_str('3') + continue + try: + data = json.loads(message[2:])[1] + yield data["output"][last_message:] + last_message = len(data["output"]) + if data["final"]: + break + except: + raise RuntimeError(f"Message: {message}")
\ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index ee8d2c1b..5ac5cfca 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -9,10 +9,11 @@ from .needs_auth import * from .unfinished import * from .selenium import * -from .Aura import Aura from .AiAsk import AiAsk from .AiChatOnline import AiChatOnline from .AItianhu import AItianhu +from .Aura import Aura +from .Bestim import Bestim from .Bing import Bing from .ChatAnywhere import ChatAnywhere from .ChatBase import ChatBase @@ -45,12 +46,12 @@ from .Koala import Koala from .Liaobots import Liaobots from .Llama2 import Llama2 from .OnlineGpt import OnlineGpt +from .PerplexityLabs import PerplexityLabs from .Phind import Phind from .Pi import Pi from .Vercel import Vercel from .Ylokh import Ylokh from .You import You -from .Bestim import Bestim import sys diff --git a/g4f/Provider/bing/upload_image.py b/g4f/Provider/bing/upload_image.py index 4d70659f..bb5687a8 100644 --- a/g4f/Provider/bing/upload_image.py +++ b/g4f/Provider/bing/upload_image.py @@ -82,13 +82,16 @@ def build_image_upload_payload(image_bin: str, tone: str) -> Tuple[str, str]: Tuple[str, str]: The data and boundary for the payload. """ boundary = "----WebKitFormBoundary" + ''.join(random.choices(string.ascii_letters + string.digits, k=16)) - data = f"--{boundary}\r\n" \ - f"Content-Disposition: form-data; name=\"knowledgeRequest\"\r\n\r\n" \ - f"{json.dumps(build_knowledge_request(tone), ensure_ascii=False)}\r\n" \ - f"--{boundary}\r\n" \ - f"Content-Disposition: form-data; name=\"imageBase64\"\r\n\r\n" \ - f"{image_bin}\r\n" \ - f"--{boundary}--\r\n" + data = f"""--{boundary} +Content-Disposition: form-data; name="knowledgeRequest" + +{json.dumps(build_knowledge_request(tone), ensure_ascii=False)} +--{boundary} +Content-Disposition: form-data; name="imageBase64" + +{image_bin} +--{boundary}-- +""" return data, boundary def build_knowledge_request(tone: str) -> dict: @@ -102,14 +105,17 @@ def build_knowledge_request(tone: str) -> dict: dict: The knowledge request payload. """ return { - 'invokedSkills': ["ImageById"], - 'subscriptionId': "Bing.Chat.Multimodal", - 'invokedSkillsRequestData': { - 'enableFaceBlur': True - }, - 'convoData': { - 'convoid': "", - 'convotone': tone + "imageInfo": {}, + "knowledgeRequest": { + 'invokedSkills': ["ImageById"], + 'subscriptionId': "Bing.Chat.Multimodal", + 'invokedSkillsRequestData': { + 'enableFaceBlur': True + }, + 'convoData': { + 'convoid': "", + 'convotone': tone + } } } |