summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-04-21 07:45:23 +0200
committerGitHub <noreply@github.com>2024-04-21 07:45:23 +0200
commit0f3935f1c068c723342153dd040e0e72e95aa11b (patch)
treec4d1e9c9eb2ebfe13e2679afe8a23d7ffa45fa46 /g4f/Provider
parentMerge pull request #1865 from hlohaus/carst (diff)
parentAdd MissingRequirementsError to You (diff)
downloadgpt4free-0f3935f1c068c723342153dd040e0e72e95aa11b.tar
gpt4free-0f3935f1c068c723342153dd040e0e72e95aa11b.tar.gz
gpt4free-0f3935f1c068c723342153dd040e0e72e95aa11b.tar.bz2
gpt4free-0f3935f1c068c723342153dd040e0e72e95aa11b.tar.lz
gpt4free-0f3935f1c068c723342153dd040e0e72e95aa11b.tar.xz
gpt4free-0f3935f1c068c723342153dd040e0e72e95aa11b.tar.zst
gpt4free-0f3935f1c068c723342153dd040e0e72e95aa11b.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/Bing.py123
-rw-r--r--g4f/Provider/MetaAI.py94
-rw-r--r--g4f/Provider/MetaAIAccount.py21
-rw-r--r--g4f/Provider/You.py11
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/Provider/bing/conversation.py9
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py23
-rw-r--r--g4f/Provider/you/har_file.py52
8 files changed, 191 insertions, 143 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index fd8cac8e..1fe94359 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -7,13 +7,13 @@ import time
import asyncio
from urllib import parse
from datetime import datetime, date
-from aiohttp import ClientSession, ClientTimeout, BaseConnector, WSMsgType
from ..typing import AsyncResult, Messages, ImageType, Cookies
from ..image import ImageRequest
-from ..errors import ResponseStatusError, RateLimitError
+from ..errors import ResponseError, ResponseStatusError, RateLimitError
+from ..requests import StreamSession, DEFAULT_HEADERS
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from .helper import get_connector, get_random_hex
+from .helper import get_random_hex
from .bing.upload_image import upload_image
from .bing.conversation import Conversation, create_conversation, delete_conversation
from .BingCreateImages import BingCreateImages
@@ -49,7 +49,6 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
timeout: int = 900,
api_key: str = None,
cookies: Cookies = None,
- connector: BaseConnector = None,
tone: str = None,
image: ImageType = None,
web_search: bool = False,
@@ -79,7 +78,6 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
return stream_generate(
prompt, tone, image, context, cookies, api_key,
- get_connector(connector, proxy, True),
proxy, web_search, gpt4_turbo, timeout,
**kwargs
)
@@ -102,25 +100,53 @@ def get_ip_address() -> str:
return f"13.{random.randint(104, 107)}.{random.randint(0, 255)}.{random.randint(0, 255)}"
def get_default_cookies():
+ #muid = get_random_hex().upper()
+ sid = get_random_hex().upper()
+ guid = get_random_hex().upper()
+ isodate = date.today().isoformat()
+ timestamp = int(time.time())
+ zdate = "0001-01-01T00:00:00.0000000"
return {
- 'SRCHD' : 'AF=NOFORM',
- 'PPLState' : '1',
- 'KievRPSSecAuth': '',
- 'SUID' : '',
- 'SRCHUSR' : f'DOB={date.today().strftime("%Y%m%d")}&T={int(time.time())}',
- 'SRCHHPGUSR' : f'HV={int(time.time())}',
- 'BCP' : 'AD=1&AL=1&SM=1',
- '_Rwho' : f'u=d&ts={date.today().isoformat()}',
+ "_C_Auth": "",
+ #"MUID": muid,
+ #"MUIDB": muid,
+ "_EDGE_S": f"F=1&SID={sid}",
+ "_EDGE_V": "1",
+ "SRCHD": "AF=hpcodx",
+ "SRCHUID": f"V=2&GUID={guid}&dmnchg=1",
+ "_RwBf": (
+ f"r=0&ilt=1&ihpd=0&ispd=0&rc=3&rb=0&gb=0&rg=200&pc=0&mtu=0&rbb=0&g=0&cid="
+ f"&clo=0&v=1&l={isodate}&lft={zdate}&aof=0&ard={zdate}"
+ f"&rwdbt={zdate}&rwflt={zdate}&o=2&p=&c=&t=0&s={zdate}"
+ f"&ts={isodate}&rwred=0&wls=&wlb="
+ "&wle=&ccp=&cpt=&lka=0&lkt=0&aad=0&TH="
+ ),
+ '_Rwho': f'u=d&ts={isodate}',
+ "_SS": f"SID={sid}&R=3&RB=0&GB=0&RG=200&RP=0",
+ "SRCHUSR": f"DOB={date.today().strftime('%Y%m%d')}&T={timestamp}",
+ "SRCHHPGUSR": f"HV={int(time.time())}",
+ "BCP": "AD=1&AL=1&SM=1",
+ "ipv6": f"hit={timestamp}",
+ '_C_ETH' : '1',
}
-def create_headers(cookies: Cookies = None, api_key: str = None) -> dict:
+async def create_headers(cookies: Cookies = None, api_key: str = None) -> dict:
if cookies is None:
+ # import nodriver as uc
+ # browser = await uc.start(headless=False)
+ # page = await browser.get(Defaults.home)
+ # await asyncio.sleep(10)
+ # cookies = {}
+ # for c in await page.browser.cookies.get_all():
+ # if c.domain.endswith(".bing.com"):
+ # cookies[c.name] = c.value
+ # user_agent = await page.evaluate("window.navigator.userAgent")
+ # await page.close()
cookies = get_default_cookies()
if api_key is not None:
cookies["_U"] = api_key
headers = Defaults.headers.copy()
headers["cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
- headers["x-forwarded-for"] = get_ip_address()
return headers
class Defaults:
@@ -246,25 +272,13 @@ class Defaults:
}
# Default headers for requests
- home = 'https://www.bing.com/chat?q=Bing+AI&FORM=hpcodx'
+ home = "https://www.bing.com/chat?q=Microsoft+Copilot&FORM=hpcodx"
headers = {
- 'sec-ch-ua': '"Chromium";v="122", "Not(A:Brand";v="24", "Google Chrome";v="122"',
- 'sec-ch-ua-mobile': '?0',
- 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
- 'sec-ch-ua-arch': '"x86"',
- 'sec-ch-ua-full-version': '"122.0.6261.69"',
- 'accept': 'application/json',
- 'sec-ch-ua-platform-version': '"15.0.0"',
+ **DEFAULT_HEADERS,
+ "accept": "application/json",
+ "referer": home,
"x-ms-client-request-id": str(uuid.uuid4()),
- 'sec-ch-ua-full-version-list': '"Chromium";v="122.0.6261.69", "Not(A:Brand";v="24.0.0.0", "Google Chrome";v="122.0.6261.69"',
- 'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.12.3 OS/Windows',
- 'sec-ch-ua-model': '""',
- 'sec-ch-ua-platform': '"Windows"',
- 'sec-fetch-site': 'same-origin',
- 'sec-fetch-mode': 'cors',
- 'sec-fetch-dest': 'empty',
- 'referer': home,
- 'accept-language': 'en-US,en;q=0.9',
+ "x-ms-useragent": "azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.15.1 OS/Windows",
}
def format_message(msg: dict) -> str:
@@ -368,7 +382,6 @@ async def stream_generate(
context: str = None,
cookies: dict = None,
api_key: str = None,
- connector: BaseConnector = None,
proxy: str = None,
web_search: bool = False,
gpt4_turbo: bool = False,
@@ -393,14 +406,12 @@ async def stream_generate(
:param timeout: Timeout for the request.
:return: An asynchronous generator yielding responses.
"""
- headers = create_headers(cookies, api_key)
+ headers = await create_headers(cookies, api_key)
new_conversation = conversation is None
max_retries = (5 if new_conversation else 0) if max_retries is None else max_retries
- async with ClientSession(
- timeout=ClientTimeout(total=timeout), connector=connector
- ) as session:
- first = True
- while first or conversation is None:
+ first = True
+ while first or conversation is None:
+ async with StreamSession(timeout=timeout, proxy=proxy) as session:
first = False
do_read = True
try:
@@ -408,13 +419,13 @@ async def stream_generate(
conversation = await create_conversation(session, headers, tone)
if return_conversation:
yield conversation
- except ResponseStatusError as e:
+ except (ResponseStatusError, RateLimitError) as e:
max_retries -= 1
if max_retries < 1:
raise e
if debug.logging:
print(f"Bing: Retry: {e}")
- headers = create_headers()
+ headers = await create_headers()
await asyncio.sleep(sleep_retry)
continue
@@ -434,7 +445,7 @@ async def stream_generate(
) as wss:
await wss.send_str(format_message({'protocol': 'json', 'version': 1}))
await wss.send_str(format_message({"type": 6}))
- await wss.receive(timeout=timeout)
+ await wss.receive_str()
await wss.send_str(create_message(
conversation, prompt, tone,
context if new_conversation else None,
@@ -445,16 +456,15 @@ async def stream_generate(
returned_text = ''
message_id = None
while do_read:
- msg = await wss.receive(timeout=timeout)
- if msg.type == WSMsgType.CLOSED:
- break
- if msg.type != WSMsgType.TEXT or not msg.data:
- continue
- objects = msg.data.split(Defaults.delimiter)
+ msg = await wss.receive_str()
+ objects = msg.split(Defaults.delimiter)
for obj in objects:
if obj is None or not obj:
continue
- response = json.loads(obj)
+ try:
+ response = json.loads(obj)
+ except json.JSONDecodeError:
+ continue
if response and response.get('type') == 1 and response['arguments'][0].get('messages'):
message = response['arguments'][0]['messages'][0]
if message_id is not None and message_id != message["messageId"]:
@@ -462,7 +472,7 @@ async def stream_generate(
message_id = message["messageId"]
image_response = None
if (raise_apology and message['contentOrigin'] == 'Apology'):
- raise RuntimeError("Apology Response Error")
+ raise ResponseError("Apology Response Error")
if 'adaptiveCards' in message:
card = message['adaptiveCards'][0]['body'][0]
if "text" in card:
@@ -488,6 +498,7 @@ async def stream_generate(
yield image_response
elif response.get('type') == 2:
result = response['item']['result']
+ do_read = False
if result.get('error'):
max_retries -= 1
if max_retries < 1:
@@ -497,10 +508,12 @@ async def stream_generate(
raise RuntimeError(f"{result['value']}: {result['message']}")
if debug.logging:
print(f"Bing: Retry: {result['value']}: {result['message']}")
- headers = create_headers()
- do_read = False
+ headers = await create_headers()
conversation = None
await asyncio.sleep(sleep_retry)
- break
- return
- await delete_conversation(session, conversation, headers)
+ break
+ elif response.get('type') == 3:
+ do_read = False
+ break
+ if conversation is not None:
+ await delete_conversation(session, conversation, headers)
diff --git a/g4f/Provider/MetaAI.py b/g4f/Provider/MetaAI.py
index e64a96d5..fb7790f9 100644
--- a/g4f/Provider/MetaAI.py
+++ b/g4f/Provider/MetaAI.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import json
import uuid
import random
@@ -8,6 +10,8 @@ from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages, Cookies
from ..requests import raise_for_status, DEFAULT_HEADERS
+from ..image import ImageResponse, ImagePreview
+from ..errors import ResponseError
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt, get_connector
@@ -22,6 +26,7 @@ class AbraGeoBlockedError(Exception):
pass
class MetaAI(AsyncGeneratorProvider):
+ label = "Meta AI"
url = "https://www.meta.ai"
working = True
@@ -38,11 +43,10 @@ class MetaAI(AsyncGeneratorProvider):
proxy: str = None,
**kwargs
) -> AsyncResult:
- #cookies = get_cookies(".meta.ai", False, True)
async for chunk in cls(proxy).prompt(format_prompt(messages)):
yield chunk
- async def get_access_token(self, birthday: str = "1999-01-01") -> str:
+ async def update_access_token(self, birthday: str = "1999-01-01"):
url = "https://www.meta.ai/api/graphql/"
payload = {
@@ -66,25 +70,37 @@ class MetaAI(AsyncGeneratorProvider):
async with self.session.post(url, headers=headers, cookies=self.cookies, data=payload) as response:
await raise_for_status(response, "Fetch access_token failed")
auth_json = await response.json(content_type=None)
- access_token = auth_json["data"]["xab_abra_accept_terms_of_service"]["new_temp_user_auth"]["access_token"]
- return access_token
+ self.access_token = auth_json["data"]["xab_abra_accept_terms_of_service"]["new_temp_user_auth"]["access_token"]
async def prompt(self, message: str, cookies: Cookies = None) -> AsyncResult:
+ if self.cookies is None:
+ await self.update_cookies(cookies)
if cookies is not None:
- self.cookies = cookies
self.access_token = None
- if self.cookies is None:
- self.cookies = await self.get_cookies()
- if self.access_token is None:
- self.access_token = await self.get_access_token()
+ if self.access_token is None and cookies is None:
+ await self.update_access_token()
- url = "https://graph.meta.ai/graphql?locale=user"
- #url = "https://www.meta.ai/api/graphql/"
+ if self.access_token is None:
+ url = "https://www.meta.ai/api/graphql/"
+ payload = {"lsd": self.lsd, 'fb_dtsg': self.dtsg}
+ headers = {'x-fb-lsd': self.lsd}
+ else:
+ url = "https://graph.meta.ai/graphql?locale=user"
+ payload = {"access_token": self.access_token}
+ headers = {}
+ headers = {
+ 'content-type': 'application/x-www-form-urlencoded',
+ 'cookie': "; ".join([f"{k}={v}" for k, v in cookies.items()]),
+ 'origin': 'https://www.meta.ai',
+ 'referer': 'https://www.meta.ai/',
+ 'x-asbd-id': '129477',
+ 'x-fb-friendly-name': 'useAbraSendMessageMutation',
+ **headers
+ }
payload = {
- "access_token": self.access_token,
- #"lsd": cookies["lsd"],
- "fb_api_caller_class": "RelayModern",
- "fb_api_req_friendly_name": "useAbraSendMessageMutation",
+ **payload,
+ 'fb_api_caller_class': 'RelayModern',
+ 'fb_api_req_friendly_name': 'useAbraSendMessageMutation',
"variables": json.dumps({
"message": {"sensitive_string_value": message},
"externalConversationId": str(uuid.uuid4()),
@@ -98,19 +114,16 @@ class MetaAI(AsyncGeneratorProvider):
"__relay_internal__pv__AbraDebugDevOnlyrelayprovider": False,
"__relay_internal__pv__WebPixelRatiorelayprovider": 1,
}),
- "server_timestamps": "true",
- "doc_id": "7783822248314888",
- }
- headers = {
- "x-asbd-id": "129477",
- "x-fb-friendly-name": "useAbraSendMessageMutation",
- #"x-fb-lsd": cookies["lsd"],
+ 'server_timestamps': 'true',
+ 'doc_id': '7783822248314888'
}
- async with self.session.post(url, headers=headers, cookies=self.cookies, data=payload) as response:
+ async with self.session.post(url, headers=headers, data=payload) as response:
await raise_for_status(response, "Fetch response failed")
last_snippet_len = 0
fetch_id = None
async for line in response.content:
+ if b"<h1>Something Went Wrong</h1>" in line:
+ raise ResponseError("Response: Something Went Wrong")
try:
json_line = json.loads(line)
except json.JSONDecodeError:
@@ -119,7 +132,14 @@ class MetaAI(AsyncGeneratorProvider):
streaming_state = bot_response_message.get("streaming_state")
fetch_id = bot_response_message.get("fetch_id") or fetch_id
if streaming_state in ("STREAMING", "OVERALL_DONE"):
- #imagine_card = bot_response_message["imagine_card"]
+ imagine_card = bot_response_message.get("imagine_card")
+ if imagine_card is not None:
+ imagine_session = imagine_card.get("session")
+ if imagine_session is not None:
+ imagine_medias = imagine_session.get("media_sets", {}).pop().get("imagine_media")
+ if imagine_medias is not None:
+ image_class = ImageResponse if streaming_state == "OVERALL_DONE" else ImagePreview
+ yield image_class([media["uri"] for media in imagine_medias], imagine_medias[0]["prompt"])
snippet = bot_response_message["snippet"]
new_snippet_len = len(snippet)
if new_snippet_len > last_snippet_len:
@@ -135,7 +155,7 @@ class MetaAI(AsyncGeneratorProvider):
if sources is not None:
yield sources
- async def get_cookies(self, cookies: Cookies = None) -> Cookies:
+ async def update_cookies(self, cookies: Cookies = None):
async with self.session.get("https://www.meta.ai/", cookies=cookies) as response:
await raise_for_status(response, "Fetch home failed")
text = await response.text()
@@ -148,12 +168,20 @@ class MetaAI(AsyncGeneratorProvider):
"datr": self.extract_value(text, "datr"),
}
self.lsd = self.extract_value(text, start_str='"LSD",[],{"token":"', end_str='"}')
- return cookies
+ self.dtsg = self.extract_value(text, start_str='"DTSGInitialData",[],{"token":"', end_str='"}')
+ self.cookies = cookies
async def fetch_sources(self, fetch_id: str) -> Sources:
- url = "https://graph.meta.ai/graphql?locale=user"
+ if self.access_token is None:
+ url = "https://www.meta.ai/api/graphql/"
+ payload = {"lsd": self.lsd, 'fb_dtsg': self.dtsg}
+ headers = {'x-fb-lsd': self.lsd}
+ else:
+ url = "https://graph.meta.ai/graphql?locale=user"
+ payload = {"access_token": self.access_token}
+ headers = {}
payload = {
- "access_token": self.access_token,
+ **payload,
"fb_api_caller_class": "RelayModern",
"fb_api_req_friendly_name": "AbraSearchPluginDialogQuery",
"variables": json.dumps({"abraMessageFetchID": fetch_id}),
@@ -163,18 +191,22 @@ class MetaAI(AsyncGeneratorProvider):
headers = {
"authority": "graph.meta.ai",
"x-fb-friendly-name": "AbraSearchPluginDialogQuery",
+ **headers
}
async with self.session.post(url, headers=headers, cookies=self.cookies, data=payload) as response:
await raise_for_status(response)
- response_json = await response.json()
+ text = await response.text()
+ if "<h1>Something Went Wrong</h1>" in text:
+ raise ResponseError("Response: Something Went Wrong")
try:
+ response_json = json.loads(text)
message = response_json["data"]["message"]
if message is not None:
searchResults = message["searchResults"]
if searchResults is not None:
return Sources(searchResults["references"])
- except (KeyError, TypeError):
- raise RuntimeError(f"Response: {response_json}")
+ except (KeyError, TypeError, json.JSONDecodeError):
+ raise RuntimeError(f"Response: {text}")
@staticmethod
def extract_value(text: str, key: str = None, start_str = None, end_str = '",') -> str:
diff --git a/g4f/Provider/MetaAIAccount.py b/g4f/Provider/MetaAIAccount.py
new file mode 100644
index 00000000..8be2318e
--- /dev/null
+++ b/g4f/Provider/MetaAIAccount.py
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+from ..typing import AsyncResult, Messages, Cookies
+from .helper import format_prompt, get_cookies
+from .MetaAI import MetaAI
+
+class MetaAIAccount(MetaAI):
+ needs_auth = True
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ proxy: str = None,
+ cookies: Cookies = None,
+ **kwargs
+ ) -> AsyncResult:
+ cookies = get_cookies(".meta.ai", True, True) if cookies is None else cookies
+ async for chunk in cls(proxy).prompt(format_prompt(messages), cookies):
+ yield chunk \ No newline at end of file
diff --git a/g4f/Provider/You.py b/g4f/Provider/You.py
index 3d3a3513..e90365ce 100644
--- a/g4f/Provider/You.py
+++ b/g4f/Provider/You.py
@@ -10,7 +10,8 @@ from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import format_prompt
from ..image import ImageResponse, to_bytes, is_accepted_format
from ..requests import StreamSession, FormData, raise_for_status
-from .you.har_file import get_dfp_telemetry_id
+from .you.har_file import get_telemetry_ids
+from .. import debug
class You(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://you.com"
@@ -35,6 +36,7 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
}
_cookies = None
_cookies_used = 0
+ _telemetry_ids = []
@classmethod
async def create_async_generator(
@@ -159,7 +161,12 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
@classmethod
async def create_cookies(cls, client: StreamSession) -> Cookies:
+ if not cls._telemetry_ids:
+ cls._telemetry_ids = await get_telemetry_ids()
user_uuid = str(uuid.uuid4())
+ telemetry_id = cls._telemetry_ids.pop()
+ if debug.logging:
+ print(f"Use telemetry_id: {telemetry_id}")
async with client.post(
"https://web.stytch.com/sdk/v1/passwords",
headers={
@@ -170,7 +177,7 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
"Referer": "https://you.com/"
},
json={
- "dfp_telemetry_id": await get_dfp_telemetry_id(),
+ "dfp_telemetry_id": telemetry_id,
"email": f"{user_uuid}@gmail.com",
"password": f"{user_uuid}#{user_uuid}",
"session_duration_minutes": 129600
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 10249aa2..27c14672 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -43,6 +43,7 @@ from .Liaobots import Liaobots
from .Llama import Llama
from .Local import Local
from .MetaAI import MetaAI
+from .MetaAIAccount import MetaAIAccount
from .PerplexityLabs import PerplexityLabs
from .Pi import Pi
from .ReplicateImage import ReplicateImage
diff --git a/g4f/Provider/bing/conversation.py b/g4f/Provider/bing/conversation.py
index 85292079..4cfeef8e 100644
--- a/g4f/Provider/bing/conversation.py
+++ b/g4f/Provider/bing/conversation.py
@@ -1,7 +1,6 @@
from __future__ import annotations
-from aiohttp import ClientSession
-from ...requests import raise_for_status
+from ...requests import StreamSession, raise_for_status
from ...errors import RateLimitError
from ...providers.conversation import BaseConversation
@@ -22,7 +21,7 @@ class Conversation(BaseConversation):
self.clientId = clientId
self.conversationSignature = conversationSignature
-async def create_conversation(session: ClientSession, headers: dict, tone: str) -> Conversation:
+async def create_conversation(session: StreamSession, headers: dict, tone: str) -> Conversation:
"""
Create a new conversation asynchronously.
@@ -49,7 +48,7 @@ async def create_conversation(session: ClientSession, headers: dict, tone: str)
raise RuntimeError('Empty fields: Failed to create conversation')
return Conversation(conversationId, clientId, conversationSignature)
-async def list_conversations(session: ClientSession) -> list:
+async def list_conversations(session: StreamSession) -> list:
"""
List all conversations asynchronously.
@@ -64,7 +63,7 @@ async def list_conversations(session: ClientSession) -> list:
response = await response.json()
return response["chats"]
-async def delete_conversation(session: ClientSession, conversation: Conversation, headers: dict) -> bool:
+async def delete_conversation(session: StreamSession, conversation: Conversation, headers: dict) -> bool:
"""
Delete a conversation asynchronously.
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index ae028965..36b8bd3c 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -24,7 +24,7 @@ except ImportError:
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ...webdriver import get_browser
from ...typing import AsyncResult, Messages, Cookies, ImageType, AsyncIterator
-from ...requests import get_args_from_browser, raise_for_status
+from ...requests import DEFAULT_HEADERS, get_args_from_browser, raise_for_status
from ...requests.aiohttp import StreamSession
from ...image import to_image, to_bytes, ImageResponse, ImageRequest
from ...errors import MissingAuthError, ResponseError
@@ -360,7 +360,6 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
if debug.logging:
print("OpenaiChat: Load default_model failed")
print(f"{e.__class__.__name__}: {e}")
-
arkose_token = None
if cls.default_model is None:
@@ -377,7 +376,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers))
async with session.post(
- f"{cls.url}/backend-anon/sentinel/chat-requirements" if not cls._api_key else
+ f"{cls.url}/backend-anon/sentinel/chat-requirements"
+ if not cls._api_key else
f"{cls.url}/backend-api/sentinel/chat-requirements",
json={"conversation_mode_kind": "primary_assistant"},
headers=cls._headers
@@ -388,7 +388,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
blob = data["arkose"]["dx"]
need_arkose = data["arkose"]["required"]
chat_token = data["token"]
-
+
if debug.logging:
print(f'Arkose: {need_arkose} Turnstile: {data["turnstile"]["required"]}')
@@ -595,8 +595,7 @@ this.fetch = async (url, options) => {
print(f"Open nodriver with user_dir: {user_data_dir}")
browser = await uc.start(user_data_dir=user_data_dir)
page = await browser.get("https://chat.openai.com/")
- while await page.find("[id^=headlessui-menu-button-]") is None:
- await asyncio.sleep(1)
+ await page.select("[id^=headlessui-menu-button-]", 240)
api_key = await page.evaluate(
"(async () => {"
"let session = await fetch('/api/auth/session');"
@@ -662,16 +661,10 @@ this.fetch = async (url, options) => {
@staticmethod
def get_default_headers() -> dict:
return {
- "accept-language": "en-US",
+ **DEFAULT_HEADERS,
"content-type": "application/json",
"oai-device-id": str(uuid.uuid4()),
"oai-language": "en-US",
- "sec-ch-ua": "\"Google Chrome\";v=\"123\", \"Not:A-Brand\";v=\"8\", \"Chromium\";v=\"123\"",
- "sec-ch-ua-mobile": "?0",
- "sec-ch-ua-platform": "\"Linux\"",
- "sec-fetch-dest": "empty",
- "sec-fetch-mode": "cors",
- "sec-fetch-site": "same-origin"
}
@staticmethod
@@ -696,11 +689,11 @@ this.fetch = async (url, options) => {
def _set_api_key(cls, api_key: str):
cls._api_key = api_key
cls._expires = int(time.time()) + 60 * 60 * 4
- cls._headers["Authorization"] = f"Bearer {api_key}"
+ cls._headers["authorization"] = f"Bearer {api_key}"
@classmethod
def _update_cookie_header(cls):
- cls._headers["Cookie"] = cls._format_cookies(cls._cookies)
+ cls._headers["cookie"] = cls._format_cookies(cls._cookies)
class Conversation(BaseConversation):
"""
diff --git a/g4f/Provider/you/har_file.py b/g4f/Provider/you/har_file.py
index a6981296..18a7144b 100644
--- a/g4f/Provider/you/har_file.py
+++ b/g4f/Provider/you/har_file.py
@@ -2,12 +2,13 @@ from __future__ import annotations
import json
import os
+import os.path
import random
-import uuid
-import asyncio
import requests
from ...requests import StreamSession, raise_for_status
+from ...errors import MissingRequirementsError
+from ... import debug
class NoValidHarFileError(Exception):
...
@@ -67,60 +68,41 @@ async def sendRequest(tmpArk: arkReq, proxy: str = None):
return await response.text()
async def get_dfp_telemetry_id(proxy: str = None):
- return await telemetry_id_with_driver(proxy)
global chatArks
if chatArks is None:
chatArks = readHAR()
return await sendRequest(random.choice(chatArks), proxy)
-async def telemetry_id_with_driver(proxy: str = None):
- from ...debug import logging
- if logging:
- print('getting telemetry_id for you.com with nodriver')
+async def get_telemetry_ids(proxy: str = None) -> list:
+ if debug.logging:
+ print('Getting telemetry_id for you.com with nodriver')
try:
- import nodriver as uc
- from nodriver import start, cdp, loop
+ from nodriver import start
except ImportError:
- if logging:
- print('nodriver not found, random uuid (may fail)')
- return str(uuid.uuid4())
-
- CAN_EVAL = False
- payload_received = False
- payload = None
-
+ raise MissingRequirementsError('Install "nodriver" package | pip install -U nodriver')
try:
browser = await start()
tab = browser.main_tab
-
- async def send_handler(event: cdp.network.RequestWillBeSent):
- nonlocal CAN_EVAL, payload_received, payload
- if 'telemetry.js' in event.request.url:
- CAN_EVAL = True
- if "/submit" in event.request.url:
- payload = event.request.post_data
- payload_received = True
-
- tab.add_handler(cdp.network.RequestWillBeSent, send_handler)
await browser.get("https://you.com")
- while not CAN_EVAL:
+ while not await tab.evaluate('"GetTelemetryID" in this'):
await tab.sleep(1)
- await tab.evaluate('window.GetTelemetryID("public-token-live-507a52ad-7e69-496b-aee0-1c9863c7c819", "https://telemetry.stytch.com/submit");')
-
- while not payload_received:
- await tab.sleep(.1)
+ async def get_telemetry_id():
+ public_token = "public-token-live-507a52ad-7e69-496b-aee0-1c9863c7c819"
+ telemetry_url = "https://telemetry.stytch.com/submit"
+ return await tab.evaluate(f'this.GetTelemetryID("{public_token}", "{telemetry_url}");', await_promise=True)
- except Exception as e:
- print(f"Error occurred: {str(e)}")
+ # for _ in range(500):
+ # with open("hardir/you.com_telemetry_ids.txt", "a") as f:
+ # f.write((await get_telemetry_id()) + "\n")
+ return [await get_telemetry_id() for _ in range(4)]
finally:
try:
await tab.close()
except Exception as e:
print(f"Error occurred while closing tab: {str(e)}")
-
try:
await browser.stop()
except Exception as e: