diff options
author | Bagus Indrayana <bagusindrayanaindo@gmail.com> | 2023-08-17 15:30:52 +0200 |
---|---|---|
committer | Bagus Indrayana <bagusindrayanaindo@gmail.com> | 2023-08-17 15:30:52 +0200 |
commit | 74ecdee78466104e57eb7488e682b564988fcd88 (patch) | |
tree | bbc764ba3248e80f20dde55e5b382b3f3d382575 /interference/app.py | |
parent | add proxy and remove stream (diff) | |
parent | ~ (diff) | |
download | gpt4free-74ecdee78466104e57eb7488e682b564988fcd88.tar gpt4free-74ecdee78466104e57eb7488e682b564988fcd88.tar.gz gpt4free-74ecdee78466104e57eb7488e682b564988fcd88.tar.bz2 gpt4free-74ecdee78466104e57eb7488e682b564988fcd88.tar.lz gpt4free-74ecdee78466104e57eb7488e682b564988fcd88.tar.xz gpt4free-74ecdee78466104e57eb7488e682b564988fcd88.tar.zst gpt4free-74ecdee78466104e57eb7488e682b564988fcd88.zip |
Diffstat (limited to 'interference/app.py')
-rw-r--r-- | interference/app.py | 126 |
1 files changed, 66 insertions, 60 deletions
diff --git a/interference/app.py b/interference/app.py index eee67c57..30a9b129 100644 --- a/interference/app.py +++ b/interference/app.py @@ -1,86 +1,92 @@ -import os -import time import json import random +import string +import time +from typing import Any -from g4f import Model, ChatCompletion, Provider -from flask import Flask, request, Response +from flask import Flask, request from flask_cors import CORS +from g4f import ChatCompletion + app = Flask(__name__) CORS(app) -@app.route("/chat/completions", methods=['POST']) + +@app.route("/chat/completions", methods=["POST"]) def chat_completions(): - streaming = request.json.get('stream', False) - model = request.json.get('model', 'gpt-3.5-turbo') - messages = request.json.get('messages') - - response = ChatCompletion.create(model=model, stream=streaming, - messages=messages) - - if not streaming: - while 'curl_cffi.requests.errors.RequestsError' in response: - response = ChatCompletion.create(model=model, stream=streaming, - messages=messages) + model = request.get_json().get("model", "gpt-3.5-turbo") + stream = request.get_json().get("stream", False) + messages = request.get_json().get("messages") - completion_timestamp = int(time.time()) - completion_id = ''.join(random.choices( - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', k=28)) + response = ChatCompletion.create(model=model, stream=stream, messages=messages) + completion_id = "".join(random.choices(string.ascii_letters + string.digits, k=28)) + completion_timestamp = int(time.time()) + + if not stream: return { - 'id': 'chatcmpl-%s' % completion_id, - 'object': 'chat.completion', - 'created': completion_timestamp, - 'model': model, - 'usage': { - 'prompt_tokens': None, - 'completion_tokens': None, - 'total_tokens': None + "id": f"chatcmpl-{completion_id}", + "object": "chat.completion", + "created": completion_timestamp, + "model": model, + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": response, + }, + "finish_reason": "stop", + } + ], + "usage": { + "prompt_tokens": None, + "completion_tokens": None, + "total_tokens": None, }, - 'choices': [{ - 'message': { - 'role': 'assistant', - 'content': response - }, - 'finish_reason': 'stop', - 'index': 0 - }] } - def stream(): - for token in response: - completion_timestamp = int(time.time()) - completion_id = ''.join(random.choices( - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', k=28)) - + def streaming(): + for chunk in response: completion_data = { - 'id': f'chatcmpl-{completion_id}', - 'object': 'chat.completion.chunk', - 'created': completion_timestamp, - 'model': 'gpt-3.5-turbo-0301', - 'choices': [ + "id": f"chatcmpl-{completion_id}", + "object": "chat.completion.chunk", + "created": completion_timestamp, + "model": model, + "choices": [ { - 'delta': { - 'content': token + "index": 0, + "delta": { + "content": chunk, }, - 'index': 0, - 'finish_reason': None + "finish_reason": None, } - ] + ], } - yield 'data: %s\n\n' % json.dumps(completion_data, separators=(',' ':')) + content = json.dumps(completion_data, separators=(",", ":")) + yield f"data: {content}\n\n" time.sleep(0.1) - return app.response_class(stream(), mimetype='text/event-stream') + end_completion_data: dict[str, Any] = { + "id": f"chatcmpl-{completion_id}", + "object": "chat.completion.chunk", + "created": completion_timestamp, + "model": model, + "choices": [ + { + "index": 0, + "delta": {}, + "finish_reason": "stop", + } + ], + } + content = json.dumps(end_completion_data, separators=(",", ":")) + yield f"data: {content}\n\n" + return app.response_class(streaming(), mimetype="text/event-stream") -if __name__ == '__main__': - config = { - 'host': '0.0.0.0', - 'port': 1337, - 'debug': True - } - app.run(**config)
\ No newline at end of file +if __name__ == "__main__": + app.run(host="0.0.0.0", port=1337, debug=True)
\ No newline at end of file |