From 5fd118f3c9d3d3a932695a3b413d8926d8ad58c0 Mon Sep 17 00:00:00 2001
From: PD <56485898+pratham-darooka@users.noreply.github.com>
Date: Fri, 19 Apr 2024 12:57:33 +0530
Subject: Added Meta llama-3 support! (#1856)
* Added Meta llama-3 support!
Decided to change llama2.py to llama.py to hold all the llama family models.
* updated HuggingChat provider
* Update FlowGpt.py
---
g4f/gui/client/index.html | 1 +
g4f/gui/client/static/js/chat.v1.js | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
(limited to 'g4f/gui/client')
diff --git a/g4f/gui/client/index.html b/g4f/gui/client/index.html
index a6c4909b..8668c21e 100644
--- a/g4f/gui/client/index.html
+++ b/g4f/gui/client/index.html
@@ -220,6 +220,7 @@
+
diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js
index a17be16e..39027260 100644
--- a/g4f/gui/client/static/js/chat.v1.js
+++ b/g4f/gui/client/static/js/chat.v1.js
@@ -926,7 +926,7 @@ colorThemes.forEach((themeOption) => {
function count_tokens(model, text) {
if (model) {
if (window.llamaTokenizer)
- if (model.startsWith("llama2") || model.startsWith("codellama")) {
+ if (model.startsWith("llama") || model.startsWith("codellama")) {
return llamaTokenizer.encode(text).length;
}
if (window.mistralTokenizer)
--
cgit v1.2.3