diff options
Diffstat (limited to 'etc/examples')
-rw-r--r-- | etc/examples/api.py | 19 | ||||
-rw-r--r-- | etc/examples/ecosia.py | 18 | ||||
-rw-r--r-- | etc/examples/openaichat.py | 23 |
3 files changed, 60 insertions, 0 deletions
diff --git a/etc/examples/api.py b/etc/examples/api.py new file mode 100644 index 00000000..d4d03a77 --- /dev/null +++ b/etc/examples/api.py @@ -0,0 +1,19 @@ +import requests +import json +url = "http://localhost:1337/v1/chat/completions" +body = { + "model": "", + "provider": "MetaAI", + "stream": True, + "messages": [ + {"role": "assistant", "content": "What can you do? Who are you?"} + ] +} +lines = requests.post(url, json=body, stream=True).iter_lines() +for line in lines: + if line.startswith(b"data: "): + try: + print(json.loads(line[6:]).get("choices", [{"delta": {}}])[0]["delta"].get("content", ""), end="") + except json.JSONDecodeError: + pass +print()
\ No newline at end of file diff --git a/etc/examples/ecosia.py b/etc/examples/ecosia.py new file mode 100644 index 00000000..5a2ae520 --- /dev/null +++ b/etc/examples/ecosia.py @@ -0,0 +1,18 @@ +import asyncio +import g4f +from g4f.client import AsyncClient + +async def main(): + client = AsyncClient( + provider=g4f.Provider.Ecosia, + ) + async for chunk in client.chat.completions.create( + [{"role": "user", "content": "happy dogs on work. write some lines"}], + g4f.models.default, + stream=True, + green=True, + ): + print(chunk.choices[0].delta.content or "", end="") + print(f"\nwith {chunk.model}") + +asyncio.run(main())
\ No newline at end of file diff --git a/etc/examples/openaichat.py b/etc/examples/openaichat.py new file mode 100644 index 00000000..291daa2c --- /dev/null +++ b/etc/examples/openaichat.py @@ -0,0 +1,23 @@ +from g4f.client import Client +from g4f.Provider import OpenaiChat, RetryProvider + +# compatible countries: https://pastebin.com/UK0gT9cn +client = Client( + proxies = { + 'http': 'http://username:password@host:port', # MUST BE WORKING OPENAI COUNTRY PROXY ex: USA + 'https': 'http://username:password@host:port' # MUST BE WORKING OPENAI COUNTRY PROXY ex: USA + }, + provider = RetryProvider([OpenaiChat], + single_provider_retry=True, max_retries=5) +) + +messages = [ + {'role': 'user', 'content': 'Hello'} +] + +response = client.chat.completions.create(model='gpt-3.5-turbo', + messages=messages, + stream=True) + +for message in response: + print(message.choices[0].delta.content or "")
\ No newline at end of file |