diff options
-rw-r--r-- | g4f/Provider/HuggingChat.py | 6 | ||||
-rw-r--r-- | g4f/Provider/HuggingFace.py | 6 | ||||
-rw-r--r-- | g4f/models.py | 2 |
3 files changed, 5 insertions, 9 deletions
diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py index 76c76a35..28e58768 100644 --- a/g4f/Provider/HuggingChat.py +++ b/g4f/Provider/HuggingChat.py @@ -14,8 +14,7 @@ class HuggingChat(AbstractProvider, ProviderModelMixin): default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" models = [ 'meta-llama/Meta-Llama-3.1-70B-Instruct', - 'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8', - 'CohereForAI/c4ai-command-r-plus', + 'CohereForAI/c4ai-command-r-plus-08-2024', 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', '01-ai/Yi-1.5-34B-Chat', @@ -25,8 +24,7 @@ class HuggingChat(AbstractProvider, ProviderModelMixin): model_aliases = { "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct", - "llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8", - "command-r-plus": "CohereForAI/c4ai-command-r-plus", + "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat", diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py index 74957862..46cfcca3 100644 --- a/g4f/Provider/HuggingFace.py +++ b/g4f/Provider/HuggingFace.py @@ -17,8 +17,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" models = [ 'meta-llama/Meta-Llama-3.1-70B-Instruct', - 'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8', - 'CohereForAI/c4ai-command-r-plus', + 'CohereForAI/c4ai-command-r-plus-08-2024', 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', '01-ai/Yi-1.5-34B-Chat', @@ -28,8 +27,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): model_aliases = { "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct", - "llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8", - "command-r-plus": "CohereForAI/c4ai-command-r-plus", + "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat", diff --git a/g4f/models.py b/g4f/models.py index a01df7da..2414719b 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -186,7 +186,7 @@ llama_3_1_70b = Model( llama_3_1_405b = Model( name = "llama-3.1-405b", base_provider = "Meta", - best_provider = IterListProvider([HuggingChat, Blackbox, HuggingFace]) + best_provider = IterListProvider([Blackbox]) ) ### Mistral ### |