diff options
author | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-01-14 07:45:41 +0100 |
---|---|---|
committer | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-01-14 07:45:41 +0100 |
commit | 5756586cde6ed6da147119113fb5a5fd640d5f83 (patch) | |
tree | 64a9612c8c32e3c7eb4cdeb6f4ad63a08e5706b0 /g4f/Provider/FreeChatgpt.py | |
parent | Fix process_image in Bing (diff) | |
download | gpt4free-5756586cde6ed6da147119113fb5a5fd640d5f83.tar gpt4free-5756586cde6ed6da147119113fb5a5fd640d5f83.tar.gz gpt4free-5756586cde6ed6da147119113fb5a5fd640d5f83.tar.bz2 gpt4free-5756586cde6ed6da147119113fb5a5fd640d5f83.tar.lz gpt4free-5756586cde6ed6da147119113fb5a5fd640d5f83.tar.xz gpt4free-5756586cde6ed6da147119113fb5a5fd640d5f83.tar.zst gpt4free-5756586cde6ed6da147119113fb5a5fd640d5f83.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/FreeChatgpt.py | 15 |
1 files changed, 10 insertions, 5 deletions
diff --git a/g4f/Provider/FreeChatgpt.py b/g4f/Provider/FreeChatgpt.py index 75514118..0f993690 100644 --- a/g4f/Provider/FreeChatgpt.py +++ b/g4f/Provider/FreeChatgpt.py @@ -1,16 +1,20 @@ from __future__ import annotations -import json +import json, random from aiohttp import ClientSession from ..typing import AsyncResult, Messages from .base_provider import AsyncGeneratorProvider - models = { - "claude-v2": "claude-2.0", - "gemini-pro": "google-gemini-pro" + "claude-v2": "claude-2.0", + "claude-v2.1":"claude-2.1", + "gemini-pro": "google-gemini-pro" } +urls = [ + "https://free.chatgpt.org.uk", + "https://ai.chatgpt.org.uk" +] class FreeChatgpt(AsyncGeneratorProvider): url = "https://free.chatgpt.org.uk" @@ -31,6 +35,7 @@ class FreeChatgpt(AsyncGeneratorProvider): model = models[model] elif not model: model = "gpt-3.5-turbo" + url = random.choice(urls) headers = { "Accept": "application/json, text/event-stream", "Content-Type":"application/json", @@ -55,7 +60,7 @@ class FreeChatgpt(AsyncGeneratorProvider): "top_p":1, **kwargs } - async with session.post(f'{cls.url}/api/openai/v1/chat/completions', json=data, proxy=proxy) as response: + async with session.post(f'{url}/api/openai/v1/chat/completions', json=data, proxy=proxy) as response: response.raise_for_status() started = False async for line in response.content: |