summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/__init__.py2
-rw-r--r--g4f/gui/client/js/chat.v2.js18
-rw-r--r--g4f/gui/server/backend.py1
-rw-r--r--g4f/models.py4
4 files changed, 15 insertions, 10 deletions
diff --git a/g4f/__init__.py b/g4f/__init__.py
index 96b620d9..58cfc314 100644
--- a/g4f/__init__.py
+++ b/g4f/__init__.py
@@ -5,7 +5,7 @@ from .Provider import BaseProvider, RetryProvider
from .typing import Messages, CreateResult, Union, List
from .debug import logging
-version = '0.1.6.7'
+version = '0.1.6.8'
version_check = True
def check_pypi_version() -> None:
diff --git a/g4f/gui/client/js/chat.v2.js b/g4f/gui/client/js/chat.v2.js
index 03c3a87a..5c1cbca8 100644
--- a/g4f/gui/client/js/chat.v2.js
+++ b/g4f/gui/client/js/chat.v2.js
@@ -597,15 +597,21 @@ observer.observe(message_input, { attributes: true });
const load_models = async () => {
- response = await fetch('/backend-api/v2/models')
- models = await response.json()
+ models = localStorage.getItem('_models')
- var MODELS_SELECT = document.getElementById('model');
+ if (models === null) {
+ response = await fetch('/backend-api/v2/models')
+ models = await response.json()
+ localStorage.setItem('_models', JSON.stringify(models))
+
+ } else {
+ models = JSON.parse(models)
+ }
- for (model of models) {
+ let MODELS_SELECT = document.getElementById('model');
- // Create new option elements
- var model_info = document.createElement('option');
+ for (model of models) {
+ let model_info = document.createElement('option');
model_info.value = model
model_info.text = model
diff --git a/g4f/gui/server/backend.py b/g4f/gui/server/backend.py
index 714609f6..e2c445a5 100644
--- a/g4f/gui/server/backend.py
+++ b/g4f/gui/server/backend.py
@@ -27,7 +27,6 @@ class Backend_Api:
def models(self):
models = g4f._all_models
- models.remove('oasst-sft-4-pythia-12b-epoch-3.5')
return models
diff --git a/g4f/models.py b/g4f/models.py
index b34297f5..af4958cc 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -168,12 +168,12 @@ code_davinci_002 = Model(
gpt_35_turbo_16k = Model(
name = 'gpt-3.5-turbo-16k',
base_provider = 'openai',
- best_provider = gpt_35_turbo.best_provider)
+ best_provider = gpt_35_long.best_provider)
gpt_35_turbo_16k_0613 = Model(
name = 'gpt-3.5-turbo-16k-0613',
base_provider = 'openai',
- best_provider = gpt_35_turbo.best_provider
+ best_provider = gpt_35_long.best_provider
)
gpt_35_turbo_0613 = Model(