summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-09-12 16:55:39 +0200
committerkqlio67 <kqlio67@users.noreply.github.com>2024-09-12 16:55:39 +0200
commitabefd7f36fb5b03e612e5b3e96ba1079c4d2f303 (patch)
treee94a18378d833e1107c2fbf231ce60b83464b788 /g4f
parentfeat(g4f/Provider/Bixin123.py): implement dynamic fingerprint generation (diff)
downloadgpt4free-abefd7f36fb5b03e612e5b3e96ba1079c4d2f303.tar
gpt4free-abefd7f36fb5b03e612e5b3e96ba1079c4d2f303.tar.gz
gpt4free-abefd7f36fb5b03e612e5b3e96ba1079c4d2f303.tar.bz2
gpt4free-abefd7f36fb5b03e612e5b3e96ba1079c4d2f303.tar.lz
gpt4free-abefd7f36fb5b03e612e5b3e96ba1079c4d2f303.tar.xz
gpt4free-abefd7f36fb5b03e612e5b3e96ba1079c4d2f303.tar.zst
gpt4free-abefd7f36fb5b03e612e5b3e96ba1079c4d2f303.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/HuggingChat.py6
-rw-r--r--g4f/Provider/HuggingFace.py6
-rw-r--r--g4f/models.py2
3 files changed, 5 insertions, 9 deletions
diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py
index 76c76a35..28e58768 100644
--- a/g4f/Provider/HuggingChat.py
+++ b/g4f/Provider/HuggingChat.py
@@ -14,8 +14,7 @@ class HuggingChat(AbstractProvider, ProviderModelMixin):
default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
models = [
'meta-llama/Meta-Llama-3.1-70B-Instruct',
- 'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8',
- 'CohereForAI/c4ai-command-r-plus',
+ 'CohereForAI/c4ai-command-r-plus-08-2024',
'mistralai/Mixtral-8x7B-Instruct-v0.1',
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
'01-ai/Yi-1.5-34B-Chat',
@@ -25,8 +24,7 @@ class HuggingChat(AbstractProvider, ProviderModelMixin):
model_aliases = {
"llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct",
- "llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
- "command-r-plus": "CohereForAI/c4ai-command-r-plus",
+ "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024",
"mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat",
diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py
index 74957862..46cfcca3 100644
--- a/g4f/Provider/HuggingFace.py
+++ b/g4f/Provider/HuggingFace.py
@@ -17,8 +17,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
models = [
'meta-llama/Meta-Llama-3.1-70B-Instruct',
- 'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8',
- 'CohereForAI/c4ai-command-r-plus',
+ 'CohereForAI/c4ai-command-r-plus-08-2024',
'mistralai/Mixtral-8x7B-Instruct-v0.1',
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
'01-ai/Yi-1.5-34B-Chat',
@@ -28,8 +27,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
model_aliases = {
"llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct",
- "llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
- "command-r-plus": "CohereForAI/c4ai-command-r-plus",
+ "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024",
"mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat",
diff --git a/g4f/models.py b/g4f/models.py
index a01df7da..2414719b 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -186,7 +186,7 @@ llama_3_1_70b = Model(
llama_3_1_405b = Model(
name = "llama-3.1-405b",
base_provider = "Meta",
- best_provider = IterListProvider([HuggingChat, Blackbox, HuggingFace])
+ best_provider = IterListProvider([Blackbox])
)
### Mistral ###