diff options
author | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-01-14 15:04:37 +0100 |
---|---|---|
committer | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-01-14 15:04:37 +0100 |
commit | 32252def150da94f12d1f3c07f977af6d8931402 (patch) | |
tree | 05a94b53b83461b8249de965e093b4fd3722e2d1 /g4f/gui/server/backend.py | |
parent | Refactor code with AI (diff) | |
download | gpt4free-32252def150da94f12d1f3c07f977af6d8931402.tar gpt4free-32252def150da94f12d1f3c07f977af6d8931402.tar.gz gpt4free-32252def150da94f12d1f3c07f977af6d8931402.tar.bz2 gpt4free-32252def150da94f12d1f3c07f977af6d8931402.tar.lz gpt4free-32252def150da94f12d1f3c07f977af6d8931402.tar.xz gpt4free-32252def150da94f12d1f3c07f977af6d8931402.tar.zst gpt4free-32252def150da94f12d1f3c07f977af6d8931402.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/gui/server/backend.py | 211 |
1 files changed, 149 insertions, 62 deletions
diff --git a/g4f/gui/server/backend.py b/g4f/gui/server/backend.py index 9d12bea5..4a5cafa8 100644 --- a/g4f/gui/server/backend.py +++ b/g4f/gui/server/backend.py @@ -1,6 +1,7 @@ import logging import json from flask import request, Flask +from typing import Generator from g4f import debug, version, models from g4f import _all_models, get_last_provider, ChatCompletion from g4f.image import is_allowed_extension, to_image @@ -11,60 +12,123 @@ from .internet import get_search_message debug.logging = True class Backend_Api: + """ + Handles various endpoints in a Flask application for backend operations. + + This class provides methods to interact with models, providers, and to handle + various functionalities like conversations, error handling, and version management. + + Attributes: + app (Flask): A Flask application instance. + routes (dict): A dictionary mapping API endpoints to their respective handlers. + """ def __init__(self, app: Flask) -> None: + """ + Initialize the backend API with the given Flask application. + + Args: + app (Flask): Flask application instance to attach routes to. + """ self.app: Flask = app self.routes = { '/backend-api/v2/models': { - 'function': self.models, - 'methods' : ['GET'] + 'function': self.get_models, + 'methods': ['GET'] }, '/backend-api/v2/providers': { - 'function': self.providers, - 'methods' : ['GET'] + 'function': self.get_providers, + 'methods': ['GET'] }, '/backend-api/v2/version': { - 'function': self.version, - 'methods' : ['GET'] + 'function': self.get_version, + 'methods': ['GET'] }, '/backend-api/v2/conversation': { - 'function': self._conversation, + 'function': self.handle_conversation, 'methods': ['POST'] }, '/backend-api/v2/gen.set.summarize:title': { - 'function': self._gen_title, + 'function': self.generate_title, 'methods': ['POST'] }, '/backend-api/v2/error': { - 'function': self.error, + 'function': self.handle_error, 'methods': ['POST'] } } - def error(self): + def handle_error(self): + """ + Initialize the backend API with the given Flask application. + + Args: + app (Flask): Flask application instance to attach routes to. + """ print(request.json) - return 'ok', 200 - def models(self): + def get_models(self): + """ + Return a list of all models. + + Fetches and returns a list of all available models in the system. + + Returns: + List[str]: A list of model names. + """ return _all_models - def providers(self): - return [ - provider.__name__ for provider in __providers__ if provider.working - ] + def get_providers(self): + """ + Return a list of all working providers. + """ + return [provider.__name__ for provider in __providers__ if provider.working] - def version(self): + def get_version(self): + """ + Returns the current and latest version of the application. + + Returns: + dict: A dictionary containing the current and latest version. + """ return { "version": version.utils.current_version, - "lastet_version": version.get_latest_version(), + "latest_version": version.get_latest_version(), } - def _gen_title(self): - return { - 'title': '' - } + def generate_title(self): + """ + Generates and returns a title based on the request data. + + Returns: + dict: A dictionary with the generated title. + """ + return {'title': ''} - def _conversation(self): + def handle_conversation(self): + """ + Handles conversation requests and streams responses back. + + Returns: + Response: A Flask response object for streaming. + """ + kwargs = self._prepare_conversation_kwargs() + + return self.app.response_class( + self._create_response_stream(kwargs), + mimetype='text/event-stream' + ) + + def _prepare_conversation_kwargs(self): + """ + Prepares arguments for chat completion based on the request data. + + Reads the request and prepares the necessary arguments for handling + a chat completion request. + + Returns: + dict: Arguments prepared for chat completion. + """ kwargs = {} if 'image' in request.files: file = request.files['image'] @@ -87,47 +151,70 @@ class Backend_Api: messages[-1]["content"] = get_search_message(messages[-1]["content"]) model = json_data.get('model') model = model if model else models.default - provider = json_data.get('provider', '').replace('g4f.Provider.', '') - provider = provider if provider and provider != "Auto" else None patch = patch_provider if json_data.get('patch_provider') else None - def try_response(): - try: - first = True - for chunk in ChatCompletion.create( - model=model, - provider=provider, - messages=messages, - stream=True, - ignore_stream_and_auth=True, - patch_provider=patch, - **kwargs - ): - if first: - first = False - yield json.dumps({ - 'type' : 'provider', - 'provider': get_last_provider(True) - }) + "\n" - if isinstance(chunk, Exception): - logging.exception(chunk) - yield json.dumps({ - 'type' : 'message', - 'message': get_error_message(chunk), - }) + "\n" - else: - yield json.dumps({ - 'type' : 'content', - 'content': str(chunk), - }) + "\n" - except Exception as e: - logging.exception(e) - yield json.dumps({ - 'type' : 'error', - 'error': get_error_message(e) - }) - - return self.app.response_class(try_response(), mimetype='text/event-stream') + return { + "model": model, + "provider": provider, + "messages": messages, + "stream": True, + "ignore_stream_and_auth": True, + "patch_provider": patch, + **kwargs + } + + def _create_response_stream(self, kwargs) -> Generator[str, None, None]: + """ + Creates and returns a streaming response for the conversation. + + Args: + kwargs (dict): Arguments for creating the chat completion. + + Yields: + str: JSON formatted response chunks for the stream. + + Raises: + Exception: If an error occurs during the streaming process. + """ + try: + first = True + for chunk in ChatCompletion.create(**kwargs): + if first: + first = False + yield self._format_json('provider', get_last_provider(True)) + if isinstance(chunk, Exception): + logging.exception(chunk) + yield self._format_json('message', get_error_message(chunk)) + else: + yield self._format_json('content', str(chunk)) + except Exception as e: + logging.exception(e) + yield self._format_json('error', get_error_message(e)) + + def _format_json(self, response_type: str, content) -> str: + """ + Formats and returns a JSON response. + + Args: + response_type (str): The type of the response. + content: The content to be included in the response. + + Returns: + str: A JSON formatted string. + """ + return json.dumps({ + 'type': response_type, + response_type: content + }) + "\n" def get_error_message(exception: Exception) -> str: + """ + Generates a formatted error message from an exception. + + Args: + exception (Exception): The exception to format. + + Returns: + str: A formatted error message string. + """ return f"{get_last_provider().__name__}: {type(exception).__name__}: {exception}"
\ No newline at end of file |