diff options
author | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-02-23 02:51:10 +0100 |
---|---|---|
committer | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-02-23 02:51:10 +0100 |
commit | d733930a2b1876340039d90f19ece81fab0d078d (patch) | |
tree | 208266ba04a3d4d056a85f20c104d324a1b625c8 /g4f | |
parent | Use new client in inter api (diff) | |
download | gpt4free-d733930a2b1876340039d90f19ece81fab0d078d.tar gpt4free-d733930a2b1876340039d90f19ece81fab0d078d.tar.gz gpt4free-d733930a2b1876340039d90f19ece81fab0d078d.tar.bz2 gpt4free-d733930a2b1876340039d90f19ece81fab0d078d.tar.lz gpt4free-d733930a2b1876340039d90f19ece81fab0d078d.tar.xz gpt4free-d733930a2b1876340039d90f19ece81fab0d078d.tar.zst gpt4free-d733930a2b1876340039d90f19ece81fab0d078d.zip |
Diffstat (limited to 'g4f')
-rw-r--r-- | g4f/api/__init__.py | 10 | ||||
-rw-r--r-- | g4f/client.py | 4 | ||||
-rw-r--r-- | g4f/stubs.py | 10 |
3 files changed, 14 insertions, 10 deletions
diff --git a/g4f/api/__init__.py b/g4f/api/__init__.py index 9033aafe..d1e8539f 100644 --- a/g4f/api/__init__.py +++ b/g4f/api/__init__.py @@ -6,7 +6,7 @@ import nest_asyncio from fastapi import FastAPI, Response, Request from fastapi.responses import StreamingResponse, RedirectResponse, HTMLResponse, JSONResponse from pydantic import BaseModel -from typing import List +from typing import List, Union import g4f import g4f.debug @@ -16,12 +16,12 @@ from g4f.typing import Messages class ChatCompletionsConfig(BaseModel): messages: Messages model: str - provider: str | None + provider: Union[str, None] stream: bool = False - temperature: float | None + temperature: Union[float, None] max_tokens: int = None - stop: list[str] | str | None - access_token: str | None + stop: Union[list[str], str, None] + access_token: Union[str, None] class Api: def __init__(self, engine: g4f, debug: bool = True, sentry: bool = False, diff --git a/g4f/client.py b/g4f/client.py index b44a5230..023d53f6 100644 --- a/g4f/client.py +++ b/g4f/client.py @@ -17,7 +17,7 @@ from . import get_model_and_provider, get_last_provider ImageProvider = Union[BaseProvider, object] Proxies = Union[dict, str] -IterResponse = Generator[ChatCompletion | ChatCompletionChunk, None, None] +IterResponse = Generator[Union[ChatCompletion, ChatCompletionChunk], None, None] def read_json(text: str) -> dict: """ @@ -124,7 +124,7 @@ class Completions(): stream: bool = False, response_format: dict = None, max_tokens: int = None, - stop: list[str] | str = None, + stop: Union[list[str], str] = None, **kwargs ) -> Union[ChatCompletion, Generator[ChatCompletionChunk]]: if max_tokens is not None: diff --git a/g4f/stubs.py b/g4f/stubs.py index b9934b8c..49cf8a88 100644 --- a/g4f/stubs.py +++ b/g4f/stubs.py @@ -1,6 +1,8 @@ from __future__ import annotations +from typing import Union + class Model(): ... @@ -52,7 +54,7 @@ class ChatCompletionChunk(Model): } class ChatCompletionMessage(Model): - def __init__(self, content: str | None): + def __init__(self, content: Union[str, None]): self.role = "assistant" self.content = content @@ -72,7 +74,9 @@ class ChatCompletionChoice(Model): } class ChatCompletionDelta(Model): - def __init__(self, content: str | None): + content: Union[str, None] = None + + def __init__(self, content: Union[str, None]): if content is not None: self.content = content @@ -80,7 +84,7 @@ class ChatCompletionDelta(Model): return self.__dict__ class ChatCompletionDeltaChoice(Model): - def __init__(self, delta: ChatCompletionDelta, finish_reason: str | None): + def __init__(self, delta: ChatCompletionDelta, finish_reason: Union[str, None]): self.delta = delta self.finish_reason = finish_reason |