From 06e5b01c39ae75074d40b28f444b5376da64c8ba Mon Sep 17 00:00:00 2001 From: baysonfox Date: Thu, 1 Aug 2024 00:26:01 +0800 Subject: feat: GPT-4o-mini support for DDG provider --- g4f/Provider/DDG.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) (limited to 'g4f') diff --git a/g4f/Provider/DDG.py b/g4f/Provider/DDG.py index 2aa78773..2b2879b6 100644 --- a/g4f/Provider/DDG.py +++ b/g4f/Provider/DDG.py @@ -18,9 +18,12 @@ class DDG(AsyncGeneratorProvider, ProviderModelMixin): supports_gpt_35_turbo = True supports_message_history = True - default_model = "gpt-3.5-turbo-0125" - models = ["gpt-3.5-turbo-0125", "claude-3-haiku-20240307", "meta-llama/Llama-3-70b-chat-hf", "mistralai/Mixtral-8x7B-Instruct-v0.1"] + # default_model = "gpt-3.5-turbo-0125" + default_model = "gpt-4o-mini" + models = ["gpt-4o-mini" ,"gpt-3.5-turbo-0125", "claude-3-haiku-20240307", "meta-llama/Llama-3-70b-chat-hf", "mistralai/Mixtral-8x7B-Instruct-v0.1"] model_aliases = { + "gpt-4": "gpt-4o-mini", + "gpt-4o": "gpt-4o-mini", "gpt-3.5-turbo": "gpt-3.5-turbo-0125", "claude-3-haiku": "claude-3-haiku-20240307", "llama-3-70b": "meta-llama/Llama-3-70b-chat-hf", @@ -33,7 +36,7 @@ class DDG(AsyncGeneratorProvider, ProviderModelMixin): referer = base64.b64decode("aHR0cHM6Ly9kdWNrZHVja2dvLmNvbS8=").decode("utf-8") origin = base64.b64decode("aHR0cHM6Ly9kdWNrZHVja2dvLmNvbQ==").decode("utf-8") - user_agent = 'Mozilla/5.0 (Windows NT 10.0; rv:127.0) Gecko/20100101 Firefox/127.0' + user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36' headers = { 'User-Agent': user_agent, 'Accept': 'text/event-stream', -- cgit v1.2.3 From bc66fa4942cd6455dbb10188edc92f201cabcecc Mon Sep 17 00:00:00 2001 From: baysonfox Date: Thu, 1 Aug 2024 00:35:21 +0800 Subject: feat(DDG): remove gpt-3.5-turbo model --- g4f/Provider/DDG.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'g4f') diff --git a/g4f/Provider/DDG.py b/g4f/Provider/DDG.py index 2b2879b6..91ccde32 100644 --- a/g4f/Provider/DDG.py +++ b/g4f/Provider/DDG.py @@ -20,11 +20,10 @@ class DDG(AsyncGeneratorProvider, ProviderModelMixin): # default_model = "gpt-3.5-turbo-0125" default_model = "gpt-4o-mini" - models = ["gpt-4o-mini" ,"gpt-3.5-turbo-0125", "claude-3-haiku-20240307", "meta-llama/Llama-3-70b-chat-hf", "mistralai/Mixtral-8x7B-Instruct-v0.1"] + models = ["gpt-4o-mini", "claude-3-haiku-20240307", "meta-llama/Llama-3-70b-chat-hf", "mistralai/Mixtral-8x7B-Instruct-v0.1"] model_aliases = { "gpt-4": "gpt-4o-mini", "gpt-4o": "gpt-4o-mini", - "gpt-3.5-turbo": "gpt-3.5-turbo-0125", "claude-3-haiku": "claude-3-haiku-20240307", "llama-3-70b": "meta-llama/Llama-3-70b-chat-hf", "mixtral-8x7B": "mistralai/Mixtral-8x7B-Instruct-v0.1" -- cgit v1.2.3