diff options
Diffstat (limited to 'g4f')
-rw-r--r-- | g4f/Provider/You.py | 20 | ||||
-rw-r--r-- | g4f/Provider/you/har_file.py | 34 | ||||
-rw-r--r-- | g4f/models.py | 9 |
3 files changed, 50 insertions, 13 deletions
diff --git a/g4f/Provider/You.py b/g4f/Provider/You.py index e7958f71..b4892035 100644 --- a/g4f/Provider/You.py +++ b/g4f/Provider/You.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import re import json import base64 @@ -24,6 +22,7 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): image_models = ["dall-e"] models = [ default_model, + "gpt-4o", "gpt-4", "gpt-4-turbo", "claude-instant", @@ -42,7 +41,8 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): *image_models ] model_aliases = { - "claude-v2": "claude-2" + "claude-v2": "claude-2", + "gpt-4o": "gpt-4o", } _cookies = None _cookies_used = 0 @@ -99,7 +99,9 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): "selectedChatMode": chat_mode, } if chat_mode == "custom": + # print(f"You model: {model}") params["selectedAIModel"] = model.replace("-", "_") + async with (session.post if chat_mode == "default" else session.get)( f"{cls.url}/api/streamingSearch", data=data, @@ -183,7 +185,15 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): @classmethod async def create_cookies(cls, client: StreamSession) -> Cookies: if not cls._telemetry_ids: - cls._telemetry_ids = await get_telemetry_ids() + try: + cls._telemetry_ids = await get_telemetry_ids() + except RuntimeError as e: + if str(e) == "Event loop is closed": + if debug.logging: + print("Event loop is closed error occurred in create_cookies.") + else: + raise + user_uuid = str(uuid.uuid4()) telemetry_id = cls._telemetry_ids.pop() if debug.logging: @@ -212,4 +222,4 @@ class You(AsyncGeneratorProvider, ProviderModelMixin): 'stytch_session_jwt': session["session_jwt"], 'ydc_stytch_session': session["session_token"], 'ydc_stytch_session_jwt': session["session_jwt"], - } + }
\ No newline at end of file diff --git a/g4f/Provider/you/har_file.py b/g4f/Provider/you/har_file.py index 8089d686..cfdca12f 100644 --- a/g4f/Provider/you/har_file.py +++ b/g4f/Provider/you/har_file.py @@ -4,11 +4,14 @@ import json import os import os.path import random +import logging from ...requests import StreamSession, raise_for_status from ...errors import MissingRequirementsError from ... import debug +logging.basicConfig(level=logging.ERROR) + class NoValidHarFileError(Exception): ... @@ -62,10 +65,16 @@ def parseHAREntry(entry) -> arkReq: return tmpArk async def sendRequest(tmpArk: arkReq, proxy: str = None): - async with StreamSession(headers=tmpArk.arkHeaders, cookies=tmpArk.arkCookies, proxy=proxy) as session: - async with session.post(tmpArk.arkURL, data=tmpArk.arkBody) as response: - await raise_for_status(response) - return await response.text() + try: + async with StreamSession(headers=tmpArk.arkHeaders, cookies=tmpArk.arkCookies, proxy=proxy) as session: + async with session.post(tmpArk.arkURL, data=tmpArk.arkBody) as response: + await raise_for_status(response) + return await response.text() + except RuntimeError as e: + if str(e) == "Event loop is closed": + print("Event loop is closed error occurred in sendRequest.") + else: + raise async def create_telemetry_id(proxy: str = None): global chatArks @@ -78,9 +87,9 @@ async def get_telemetry_ids(proxy: str = None) -> list: return [await create_telemetry_id(proxy)] except NoValidHarFileError as e: if debug.logging: - print(e) + logging.error(e) if debug.logging: - print('Getting telemetry_id for you.com with nodriver') + logging.error('Getting telemetry_id for you.com with nodriver') try: from nodriver import start except ImportError: @@ -100,6 +109,15 @@ async def get_telemetry_ids(proxy: str = None) -> list: ) return [await get_telemetry_id()] + finally: - if page is not None: - await page.close()
\ No newline at end of file + try: + if page is not None: + await page.close() + + if browser is not None: + await browser.close() + + except Exception as e: + if debug.logging: + logging.error(e) diff --git a/g4f/models.py b/g4f/models.py index e3da0363..0c5eaa9f 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -100,6 +100,14 @@ gpt_4 = Model( ]) ) +gpt_4o = Model( + name = 'gpt-4o', + base_provider = 'openai', + best_provider = RetryProvider([ + You + ]) +) + gpt_4_turbo = Model( name = 'gpt-4-turbo', base_provider = 'openai', @@ -330,6 +338,7 @@ class ModelUtils: 'gpt-3.5-long': gpt_35_long, # gpt-4 + 'gpt-4o' : gpt_4o, 'gpt-4' : gpt_4, 'gpt-4-0613' : gpt_4_0613, 'gpt-4-32k' : gpt_4_32k, |