summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-09-11 15:59:02 +0200
committerkqlio67 <kqlio67@users.noreply.github.com>2024-09-11 15:59:02 +0200
commitcd5b916789470ab49880b485f553ba9b30b06400 (patch)
tree49465a10dc22725715f13081fa66fc476234bd92 /g4f
parentfeat(g4f/Provider/PerplexityLabs.py): add model aliases and update default model (diff)
downloadgpt4free-cd5b916789470ab49880b485f553ba9b30b06400.tar
gpt4free-cd5b916789470ab49880b485f553ba9b30b06400.tar.gz
gpt4free-cd5b916789470ab49880b485f553ba9b30b06400.tar.bz2
gpt4free-cd5b916789470ab49880b485f553ba9b30b06400.tar.lz
gpt4free-cd5b916789470ab49880b485f553ba9b30b06400.tar.xz
gpt4free-cd5b916789470ab49880b485f553ba9b30b06400.tar.zst
gpt4free-cd5b916789470ab49880b485f553ba9b30b06400.zip
Diffstat (limited to 'g4f')
-rw-r--r--g4f/models.py24
1 files changed, 11 insertions, 13 deletions
diff --git a/g4f/models.py b/g4f/models.py
index ddbeeddf..a01df7da 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -168,19 +168,19 @@ llama_3_8b = Model(
llama_3_70b = Model(
name = "llama-3-70b",
base_provider = "Meta",
- best_provider = IterListProvider([ReplicateHome, DeepInfra, PerplexityLabs, Replicate])
+ best_provider = IterListProvider([ReplicateHome, DeepInfra, Replicate])
)
llama_3_1_8b = Model(
name = "llama-3.1-8b",
base_provider = "Meta",
- best_provider = IterListProvider([Blackbox])
+ best_provider = IterListProvider([Blackbox, PerplexityLabs])
)
llama_3_1_70b = Model(
name = "llama-3.1-70b",
base_provider = "Meta",
- best_provider = IterListProvider([DDG, HuggingChat, FreeGpt, Blackbox, TeachAnything, Free2GPT, HuggingFace])
+ best_provider = IterListProvider([DDG, HuggingChat, FreeGpt, Blackbox, TeachAnything, Free2GPT, HuggingFace, PerplexityLabs])
)
llama_3_1_405b = Model(
@@ -404,13 +404,6 @@ westlake_7b_v2 = Model(
best_provider = Snova
)
-### CookinAI ###
-donutlm_v1 = Model(
- name = 'donutlm-v1',
- base_provider = 'CookinAI',
- best_provider = Snova
-)
-
### DeepSeek ###
deepseek = Model(
name = 'deepseek',
@@ -491,6 +484,13 @@ dalle = Model(
)
+dalle_2 = Model(
+ name = 'dalle-2',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
+
+)
+
dalle_mini = Model(
name = 'dalle-mini',
base_provider = '',
@@ -638,9 +638,6 @@ class ModelUtils:
'westlake-7b-v2': westlake_7b_v2,
-### CookinAI ###
-'donutlm-v1': donutlm_v1,
-
### DeepSeek ###
'deepseek': deepseek,
@@ -669,6 +666,7 @@ class ModelUtils:
### ###
'dalle': dalle,
+'dalle-2': dalle_2,
'dalle-mini': dalle_mini,
'emi': emi,
}