From b8a3db526ce25932658131fcb96a3576f43672b1 Mon Sep 17 00:00:00 2001 From: hdsz25 Date: Sat, 28 Oct 2023 23:58:36 +0800 Subject: Update backend.py, index.html, requirements.txt (#1180) * Update backend.py change to the model that received from user interactive from the web interface model selection. * Update index.html added Llama2 as a provider selection and also include the model selection for Llama2: llama2-70b, llama2-13b, llama2-7b * Update requirements.txt add asgiref to enable async for Flask in api. "RuntimeError: Install Flask with the 'async' extra in order to use async views" --- g4f/gui/server/backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'g4f/gui/server/backend.py') diff --git a/g4f/gui/server/backend.py b/g4f/gui/server/backend.py index 304b9fc8..3d7bfedc 100644 --- a/g4f/gui/server/backend.py +++ b/g4f/gui/server/backend.py @@ -56,7 +56,7 @@ class Backend_Api: def stream(): yield from g4f.ChatCompletion.create( - model=g4f.models.gpt_35_long, + model=model, provider=get_provider(provider), messages=messages, stream=True, -- cgit v1.2.3