diff options
author | abc <98614666+xtekky@users.noreply.github.com> | 2023-10-16 15:34:00 +0200 |
---|---|---|
committer | abc <98614666+xtekky@users.noreply.github.com> | 2023-10-16 15:34:00 +0200 |
commit | 5b240665fbbabe5e7e47d4ee97080fe03cf03232 (patch) | |
tree | 07cc87c1c7ccd1daf0bc6589fff4c11709d4cc13 /g4f | |
parent | ~ | new providers test (diff) | |
download | gpt4free-5b240665fbbabe5e7e47d4ee97080fe03cf03232.tar gpt4free-5b240665fbbabe5e7e47d4ee97080fe03cf03232.tar.gz gpt4free-5b240665fbbabe5e7e47d4ee97080fe03cf03232.tar.bz2 gpt4free-5b240665fbbabe5e7e47d4ee97080fe03cf03232.tar.lz gpt4free-5b240665fbbabe5e7e47d4ee97080fe03cf03232.tar.xz gpt4free-5b240665fbbabe5e7e47d4ee97080fe03cf03232.tar.zst gpt4free-5b240665fbbabe5e7e47d4ee97080fe03cf03232.zip |
Diffstat (limited to 'g4f')
-rw-r--r-- | g4f/Provider/Geekgpt.py | 81 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 5 |
2 files changed, 85 insertions, 1 deletions
diff --git a/g4f/Provider/Geekgpt.py b/g4f/Provider/Geekgpt.py new file mode 100644 index 00000000..1a82757c --- /dev/null +++ b/g4f/Provider/Geekgpt.py @@ -0,0 +1,81 @@ +from __future__ import annotations +import requests, json + +from .base_provider import BaseProvider +from ..typing import CreateResult, Messages +from json import dumps + + +class GeekGpt(BaseProvider): + url = 'https://chat.geekgpt.org' + supports_stream = True + working = True + supports_gpt_35_turbo = True + supports_gpt_4 = True + + @classmethod + def create_completion(cls, + model: str, + messages: Messages, + stream: bool, **kwargs) -> CreateResult: + + json_data = { + 'messages': messages, + 'model': model, + 'temperature': kwargs.get('temperature', 0.9), + 'presence_penalty': kwargs.get('presence_penalty', 0), + 'top_p': kwargs.get('top_p', 1), + 'frequency_penalty': kwargs.get('frequency_penalty', 0), + 'stream': True + } + + data = dumps(json_data, separators=(',', ':')) + + headers = { + 'authority': 'ai.fakeopen.com', + 'accept': '*/*', + 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', + 'authorization': 'Bearer pk-this-is-a-real-free-pool-token-for-everyone', + 'content-type': 'application/json', + 'origin': 'https://chat.geekgpt.org', + 'referer': 'https://chat.geekgpt.org/', + 'sec-ch-ua': '"Chromium";v="118", "Google Chrome";v="118", "Not=A?Brand";v="99"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-platform': '"macOS"', + 'sec-fetch-dest': 'empty', + 'sec-fetch-mode': 'cors', + 'sec-fetch-site': 'cross-site', + 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36', + } + + response = requests.post("https://ai.fakeopen.com/v1/chat/completions", + headers=headers, data=data, stream=True) + response.raise_for_status() + + for chunk in response.iter_lines(): + if b'content' in chunk: + json_data = chunk.decode().replace("data: ", "") + + if json_data == "[DONE]": + break + + try: + content = json.loads(json_data)["choices"][0]["delta"].get("content") + + except Exception as e: + raise RuntimeError(f'error | {e} :', json_data) + + if content: + yield content + + @classmethod + @property + def params(cls): + params = [ + ('model', 'str'), + ('messages', 'list[dict[str, str]]'), + ('stream', 'bool'), + ('temperature', 'float'), + ] + param = ', '.join([': '.join(p) for p in params]) + return f'g4f.provider.{cls.__name__} supports: ({param})' diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index ae6ca996..1dfa6a8d 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -33,6 +33,7 @@ from .Vercel import Vercel from .Ylokh import Ylokh from .You import You from .Yqcloud import Yqcloud +from .Geekgpt import GeekGpt from .base_provider import BaseProvider, AsyncProvider, AsyncGeneratorProvider from .retry_provider import RetryProvider @@ -103,6 +104,7 @@ class ProviderUtils: 'Ylokh': Ylokh, 'You': You, 'Yqcloud': Yqcloud, + 'Geekgpt': GeekGpt, 'BaseProvider': BaseProvider, 'AsyncProvider': AsyncProvider, @@ -171,5 +173,6 @@ __all__ = [ 'Equing', 'FastGpt', 'Wuguokai', - 'V50' + 'V50', + 'GeekGpt' ] |