summaryrefslogtreecommitdiffstats
path: root/g4f/gui/client/js
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/gui/client/js/chat.v1.js95
1 files changed, 35 insertions, 60 deletions
diff --git a/g4f/gui/client/js/chat.v1.js b/g4f/gui/client/js/chat.v1.js
index 5b7a0bf0..2844b73e 100644
--- a/g4f/gui/client/js/chat.v1.js
+++ b/g4f/gui/client/js/chat.v1.js
@@ -161,7 +161,7 @@ const ask_gpt = async (txtMsgs) => {
text += chunk;
- document.getElementById(`gpt_${window.token}`).innerHTML = markdown.render(text);
+ document.getElementById(`gpt_${window.token}`).innerHTML = markdown.render(text).replace("<a href=", '<a target="_blank" href=');
document.querySelectorAll(`code`).forEach((el) => {
hljs.highlightElement(el);
});
@@ -308,7 +308,7 @@ const load_conversation = async (conversation_id) => {
</div>
<div class="content">
${item.role == "assistant"
- ? markdown.render(item.content)
+ ? markdown.render(item.content).replace("<a href=", '<a target="_blank" href=')
: item.content
}
</div>
@@ -529,7 +529,7 @@ window.onload = async () => {
load_settings_localstorage();
setTheme();
- conversations = 0;
+ let conversations = 0;
for (let i = 0; i < localStorage.length; i++) {
if (localStorage.key(i).startsWith("conversation:")) {
conversations += 1;
@@ -548,7 +548,6 @@ window.onload = async () => {
}
}
- // await load_models();
await say_hello()
message_input.addEventListener(`keydown`, async (evt) => {
@@ -593,64 +592,40 @@ const observer = new MutationObserver((mutationsList) => {
observer.observe(message_input, { attributes: true });
-
-const load_models = async () => {
- // models = localStorage.getItem('_models')
-
- // if (models === null) {
- // response = await fetch('/backend-api/v2/models')
- // models = await response.json()
- // localStorage.setItem('_models', JSON.stringify(models))
-
- // } else {
- // models = JSON.parse(models)
- // }
-
- models = [
- "gpt-3.5-turbo",
- "gpt-3.5-turbo-0613",
- "gpt-3.5-turbo-16k",
- "gpt-3.5-turbo-16k-0613",
- "gpt-4",
- "gpt-4-0613",
- "gpt-4-32k",
- "gpt-4-32k-0613",
- "palm2",
- "palm",
- "google",
- "google-bard",
- "google-palm",
- "bard",
- "falcon-40b",
- "falcon-7b",
- "llama-13b",
- "command-nightly",
- "gpt-neox-20b",
- "santacoder",
- "bloom",
- "flan-t5-xxl",
- "code-davinci-002",
- "text-ada-001",
- "text-babbage-001",
- "text-curie-001",
- "text-davinci-002",
- "text-davinci-003",
- "llama70b-v2-chat",
- "llama13b-v2-chat",
- "llama7b-v2-chat",
- "oasst-sft-1-pythia-12b",
- "oasst-sft-4-pythia-12b-epoch-3.5",
- "command-light-nightly"
- ]
-
- let MODELS_SELECT = document.getElementById('model');
+(async () => {
+ response = await fetch('/backend-api/v2/models')
+ models = await response.json()
+ let select = document.getElementById('model');
+ select.textContent = '';
+
+ let auto = document.createElement('option');
+ auto.value = '';
+ auto.text = 'Model: Default';
+ select.appendChild(auto);
for (model of models) {
- let model_info = document.createElement('option');
- model_info.value = model
- model_info.text = model
+ let option = document.createElement('option');
+ option.value = option.text = model;
+ select.appendChild(option);
+ }
+})();
- MODELS_SELECT.appendChild(model_info);
+(async () => {
+ response = await fetch('/backend-api/v2/providers')
+ providers = await response.json()
+
+ let select = document.getElementById('provider');
+ select.textContent = '';
+
+ let auto = document.createElement('option');
+ auto.value = '';
+ auto.text = 'Provider: Auto';
+ select.appendChild(auto);
+
+ for (provider of providers) {
+ let option = document.createElement('option');
+ option.value = option.text = provider;
+ select.appendChild(option);
}
-} \ No newline at end of file
+})(); \ No newline at end of file