diff options
author | OmiiiDev <103533638+OmiiiDev@users.noreply.github.com> | 2024-03-12 02:17:45 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-03-12 02:17:45 +0100 |
commit | d6d75c28c86b5ce107f3d498e3b3b3a56b9e3d54 (patch) | |
tree | ffb05c56389ed46b1e52cb0fffd28c2d33719a7a | |
parent | Fix unit test with missing requirements (diff) | |
download | gpt4free-d6d75c28c86b5ce107f3d498e3b3b3a56b9e3d54.tar gpt4free-d6d75c28c86b5ce107f3d498e3b3b3a56b9e3d54.tar.gz gpt4free-d6d75c28c86b5ce107f3d498e3b3b3a56b9e3d54.tar.bz2 gpt4free-d6d75c28c86b5ce107f3d498e3b3b3a56b9e3d54.tar.lz gpt4free-d6d75c28c86b5ce107f3d498e3b3b3a56b9e3d54.tar.xz gpt4free-d6d75c28c86b5ce107f3d498e3b3b3a56b9e3d54.tar.zst gpt4free-d6d75c28c86b5ce107f3d498e3b3b3a56b9e3d54.zip |
-rw-r--r-- | g4f/Provider/Liaobots.py | 57 |
1 files changed, 34 insertions, 23 deletions
diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py index b5e7cbe7..b80e46ac 100644 --- a/g4f/Provider/Liaobots.py +++ b/g4f/Provider/Liaobots.py @@ -10,18 +10,6 @@ from .helper import get_connector from ..requests import raise_for_status models = { - "gpt-4": { - "id": "gpt-4", - "name": "GPT-4", - "maxLength": 24000, - "tokenLimit": 8000, - }, - "gpt-4-0613": { - "id": "gpt-4-0613", - "name": "GPT-4", - "maxLength": 32000, - "tokenLimit": 8000, - }, "gpt-3.5-turbo": { "id": "gpt-3.5-turbo", "name": "GPT-3.5-Turbo", @@ -29,14 +17,8 @@ models = { "tokenLimit": 14000, "context": "16K", }, - "gpt-3.5-turbo-16k": { - "id": "gpt-3.5-turbo-16k", - "name": "GPT-3.5-16k", - "maxLength": 48000, - "tokenLimit": 16000, - }, - "gpt-4-1106-preview": { - "id": "gpt-4-1106-preview", + "gpt-4-turbo-preview": { + "id": "gpt-4-turbo-preview", "name": "GPT-4-Turbo", "maxLength": 260000, "tokenLimit": 126000, @@ -49,6 +31,13 @@ models = { "tokenLimit": 31000, "context": "32K", }, + "gpt-4-0613": { + "id": "gpt-4-0613", + "name": "GPT-4-0613", + "maxLength": 60000, + "tokenLimit": 15000, + "context": "16K", + }, "gemini-pro": { "id": "gemini-pro", "name": "Gemini-Pro", @@ -56,13 +45,34 @@ models = { "tokenLimit": 30000, "context": "32K", }, - "claude-2": { - "id": "claude-2", - "name": "Claude-2-200k", + "claude-3-opus-20240229": { + "id": "claude-3-opus-20240229", + "name": "Claude-3-Opus", "maxLength": 800000, "tokenLimit": 200000, "context": "200K", }, + "claude-3-sonnet-20240229": { + "id": "claude-3-sonnet-20240229", + "name": "Claude-3-Sonnet", + "maxLength": 800000, + "tokenLimit": 200000, + "context": "200K", + }, + "claude-2.1": { + "id": "claude-2.1", + "name": "Claude-2.1-200k", + "maxLength": 800000, + "tokenLimit": 200000, + "context": "200K", + }, + "claude-2.0": { + "id": "claude-2.0", + "name": "Claude-2.0-100k", + "maxLength": 400000, + "tokenLimit": 100000, + "context": "100K", + }, "claude-instant-1": { "id": "claude-instant-1", "name": "Claude-instant-1", @@ -72,6 +82,7 @@ models = { } } + class Liaobots(AsyncGeneratorProvider, ProviderModelMixin): url = "https://liaobots.site" working = True |