diff options
author | Bagus Indrayana <bagusindrayanaindo@gmail.com> | 2023-07-12 01:49:23 +0200 |
---|---|---|
committer | Bagus Indrayana <bagusindrayanaindo@gmail.com> | 2023-07-12 01:49:23 +0200 |
commit | 6fb45515400488b32ac970bbd9f6f51023259b9f (patch) | |
tree | fb624601b90655fc9950f4c3cf1edb197c594664 /g4f | |
parent | Merge pull request #741 from bagusindrayana/aiservice (diff) | |
download | gpt4free-6fb45515400488b32ac970bbd9f6f51023259b9f.tar gpt4free-6fb45515400488b32ac970bbd9f6f51023259b9f.tar.gz gpt4free-6fb45515400488b32ac970bbd9f6f51023259b9f.tar.bz2 gpt4free-6fb45515400488b32ac970bbd9f6f51023259b9f.tar.lz gpt4free-6fb45515400488b32ac970bbd9f6f51023259b9f.tar.xz gpt4free-6fb45515400488b32ac970bbd9f6f51023259b9f.tar.zst gpt4free-6fb45515400488b32ac970bbd9f6f51023259b9f.zip |
Diffstat (limited to 'g4f')
-rw-r--r-- | g4f/Provider/Providers/AiService.py | 40 | ||||
-rw-r--r-- | g4f/Provider/Providers/BingHuan.py | 27 | ||||
-rw-r--r-- | g4f/Provider/Providers/Wewordle.py | 73 | ||||
-rw-r--r-- | g4f/Provider/Providers/helpers/binghuan.py | 221 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 3 |
5 files changed, 364 insertions, 0 deletions
diff --git a/g4f/Provider/Providers/AiService.py b/g4f/Provider/Providers/AiService.py new file mode 100644 index 00000000..8d475118 --- /dev/null +++ b/g4f/Provider/Providers/AiService.py @@ -0,0 +1,40 @@ +import os,sys +import requests +from ...typing import get_type_hints + +url = "https://aiservice.vercel.app/api/chat/answer" +model = ['gpt-3.5-turbo'] +supports_stream = False +needs_auth = False + + +def _create_completion(model: str, messages: list, stream: bool, **kwargs): + base = '' + for message in messages: + base += '%s: %s\n' % (message['role'], message['content']) + base += 'assistant:' + + headers = { + "accept": "*/*", + "content-type": "text/plain;charset=UTF-8", + "sec-fetch-dest": "empty", + "sec-fetch-mode": "cors", + "sec-fetch-site": "same-origin", + "Referer": "https://aiservice.vercel.app/chat", + } + data = { + "input": base + } + response = requests.post(url, headers=headers, json=data) + if response.status_code == 200: + _json = response.json() + yield _json['data'] + else: + print(f"Error Occurred::{response.status_code}") + return None + + + +params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \ + '(%s)' % ', '.join( + [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
\ No newline at end of file diff --git a/g4f/Provider/Providers/BingHuan.py b/g4f/Provider/Providers/BingHuan.py new file mode 100644 index 00000000..7344a342 --- /dev/null +++ b/g4f/Provider/Providers/BingHuan.py @@ -0,0 +1,27 @@ +import os,sys +import json +import subprocess +from ...typing import sha256, Dict, get_type_hints + +url = 'https://b.ai-huan.xyz' +model = ['gpt-3.5-turbo', 'gpt-4'] +supports_stream = True +needs_auth = False + +def _create_completion(model: str, messages: list, stream: bool, **kwargs): + path = os.path.dirname(os.path.realpath(__file__)) + config = json.dumps({ + 'messages': messages, + 'model': model}, separators=(',', ':')) + cmd = ['python', f'{path}/helpers/binghuan.py', config] + + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + for line in iter(p.stdout.readline, b''): + yield line.decode('cp1252') + + + +params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \ + '(%s)' % ', '.join( + [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
\ No newline at end of file diff --git a/g4f/Provider/Providers/Wewordle.py b/g4f/Provider/Providers/Wewordle.py new file mode 100644 index 00000000..95966fbd --- /dev/null +++ b/g4f/Provider/Providers/Wewordle.py @@ -0,0 +1,73 @@ +import os,sys +import requests +import json +import random +import time +import string +from ...typing import sha256, Dict, get_type_hints + +url = "https://wewordle.org/gptapi/v1/android/turbo" +model = ['gpt-3.5-turbo'] +supports_stream = False +needs_auth = False + + +def _create_completion(model: str, messages: list, stream: bool, **kwargs): + base = '' + for message in messages: + base += '%s: %s\n' % (message['role'], message['content']) + base += 'assistant:' + # randomize user id and app id + _user_id = ''.join(random.choices(f'{string.ascii_lowercase}{string.digits}', k=16)) + _app_id = ''.join(random.choices(f'{string.ascii_lowercase}{string.digits}', k=31)) + # make current date with format utc + _request_date = time.strftime("%Y-%m-%dT%H:%M:%S.000Z", time.gmtime()) + headers = { + 'accept': '*/*', + 'pragma': 'no-cache', + 'Content-Type': 'application/json', + 'Connection':'keep-alive' + # user agent android client + # 'User-Agent': 'Dalvik/2.1.0 (Linux; U; Android 10; SM-G975F Build/QP1A.190711.020)', + + } + data = { + "user": _user_id, + "messages": [ + {"role": "user", "content": base} + ], + "subscriber": { + "originalPurchaseDate": None, + "originalApplicationVersion": None, + "allPurchaseDatesMillis": {}, + "entitlements": { + "active": {}, + "all": {} + }, + "allPurchaseDates": {}, + "allExpirationDatesMillis": {}, + "allExpirationDates": {}, + "originalAppUserId": f"$RCAnonymousID:{_app_id}", + "latestExpirationDate": None, + "requestDate": _request_date, + "latestExpirationDateMillis": None, + "nonSubscriptionTransactions": [], + "originalPurchaseDateMillis": None, + "managementURL": None, + "allPurchasedProductIdentifiers": [], + "firstSeen": _request_date, + "activeSubscriptions": [] + } + } + response = requests.post(url, headers=headers, data=json.dumps(data)) + if response.status_code == 200: + _json = response.json() + if 'message' in _json: + yield _json['message']['content'] + else: + print(f"Error Occurred::{response.status_code}") + return None + +params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \ + '(%s)' % ', '.join( + [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
\ No newline at end of file diff --git a/g4f/Provider/Providers/helpers/binghuan.py b/g4f/Provider/Providers/helpers/binghuan.py new file mode 100644 index 00000000..203bbe45 --- /dev/null +++ b/g4f/Provider/Providers/helpers/binghuan.py @@ -0,0 +1,221 @@ +# Original Code From : https://gitler.moe/g4f/gpt4free +# https://gitler.moe/g4f/gpt4free/src/branch/main/g4f/Provider/Providers/helpers/bing.py +import sys +import ssl +import uuid +import json +import time +import random +import asyncio +import certifi +# import requests +from curl_cffi import requests +import websockets +import browser_cookie3 + +config = json.loads(sys.argv[1]) + +ssl_context = ssl.create_default_context() +ssl_context.load_verify_locations(certifi.where()) + + + +conversationstyles = { + 'gpt-4': [ #'precise' + "nlu_direct_response_filter", + "deepleo", + "disable_emoji_spoken_text", + "responsible_ai_policy_235", + "enablemm", + "h3precise", + "rcsprtsalwlst", + "dv3sugg", + "autosave", + "clgalileo", + "gencontentv3" + ], + 'balanced': [ + "nlu_direct_response_filter", + "deepleo", + "disable_emoji_spoken_text", + "responsible_ai_policy_235", + "enablemm", + "harmonyv3", + "rcsprtsalwlst", + "dv3sugg", + "autosave" + ], + 'gpt-3.5-turbo': [ #'precise' + "nlu_direct_response_filter", + "deepleo", + "disable_emoji_spoken_text", + "responsible_ai_policy_235", + "enablemm", + "h3imaginative", + "rcsprtsalwlst", + "dv3sugg", + "autosave", + "gencontentv3" + ] +} + +def format(msg: dict) -> str: + return json.dumps(msg) + '\x1e' + +def get_token(): + return + + try: + cookies = {c.name: c.value for c in browser_cookie3.edge(domain_name='bing.com')} + return cookies['_U'] + except: + print('Error: could not find bing _U cookie in edge browser.') + exit(1) + +class AsyncCompletion: + async def create( + prompt : str = None, + optionSets : list = None, + token : str = None): # No auth required anymore + + create = None + for _ in range(5): + try: + create = requests.get('https://b.ai-huan.xyz/turing/conversation/create', + headers = { + 'host': 'b.ai-huan.xyz', + 'accept-encoding': 'gzip, deflate, br', + 'connection': 'keep-alive', + 'authority': 'b.ai-huan.xyz', + 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7', + 'accept-language': 'en-US,en;q=0.9', + 'cache-control': 'max-age=0', + 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"', + 'sec-ch-ua-arch': '"x86"', + 'sec-ch-ua-bitness': '"64"', + 'sec-ch-ua-full-version': '"110.0.1587.69"', + 'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-model': '""', + 'sec-ch-ua-platform': '"Windows"', + 'sec-ch-ua-platform-version': '"15.0.0"', + 'sec-fetch-dest': 'document', + 'sec-fetch-mode': 'navigate', + 'sec-fetch-site': 'none', + 'sec-fetch-user': '?1', + 'upgrade-insecure-requests': '1', + 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69', + 'x-edge-shopping-flag': '1', + 'x-forwarded-for': f'13.{random.randint(104, 107)}.{random.randint(0, 255)}.{random.randint(0, 255)}' + } + ) + + conversationId = create.json()['conversationId'] + clientId = create.json()['clientId'] + conversationSignature = create.json()['conversationSignature'] + + except Exception as e: + time.sleep(0.5) + continue + + if create == None: raise Exception('Failed to create conversation.') + + wss: websockets.WebSocketClientProtocol or None = None + + wss = await websockets.connect('wss://sydney.vcanbb.chat/sydney/ChatHub', max_size = None, ssl = ssl_context, + extra_headers = { + 'accept': 'application/json', + 'accept-language': 'en-US,en;q=0.9', + 'content-type': 'application/json', + 'sec-ch-ua': '"Not_A Brand";v="99", Microsoft Edge";v="110", "Chromium";v="110"', + 'sec-ch-ua-arch': '"x86"', + 'sec-ch-ua-bitness': '"64"', + 'sec-ch-ua-full-version': '"109.0.1518.78"', + 'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-model': "", + 'sec-ch-ua-platform': '"Windows"', + 'sec-ch-ua-platform-version': '"15.0.0"', + 'sec-fetch-dest': 'empty', + 'sec-fetch-mode': 'cors', + 'sec-fetch-site': 'same-origin', + 'x-ms-client-request-id': str(uuid.uuid4()), + 'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32', + 'Referer': 'https://b.ai-huan.xyz/search?q=Bing+AI&showconv=1&FORM=hpcodx', + 'Referrer-Policy': 'origin-when-cross-origin', + 'x-forwarded-for': f'13.{random.randint(104, 107)}.{random.randint(0, 255)}.{random.randint(0, 255)}' + } + ) + + await wss.send(format({'protocol': 'json', 'version': 1})) + await wss.recv() + + struct = { + 'arguments': [ + { + 'source': 'cib', + 'optionsSets': optionSets, + 'isStartOfSession': True, + 'message': { + 'author': 'user', + 'inputMethod': 'Keyboard', + 'text': prompt, + 'messageType': 'Chat' + }, + 'conversationSignature': conversationSignature, + 'participant': { + 'id': clientId + }, + 'conversationId': conversationId + } + ], + 'invocationId': '0', + 'target': 'chat', + 'type': 4 + } + + await wss.send(format(struct)) + + base_string = '' + + final = False + while not final: + objects = str(await wss.recv()).split('\x1e') + for obj in objects: + if obj is None or obj == '': + continue + + response = json.loads(obj) + #print(response, flush=True, end='') + if response.get('type') == 1 and response['arguments'][0].get('messages',): + response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text') + + yield (response_text.replace(base_string, '')) + base_string = response_text + + elif response.get('type') == 2: + final = True + + await wss.close() + +# i thing bing realy donset understand multi message (based on prompt template) +def convert(messages): + context = "" + for message in messages: + context += "[%s](#message)\n%s\n\n" % (message['role'], + message['content']) + return context + +async def run(optionSets, messages): + prompt = messages[-1]['content'] + if(len(messages) > 1): + prompt = convert(messages) + async for value in AsyncCompletion.create(prompt=prompt, optionSets=optionSets): + try: + print(value, flush=True, end='') + except UnicodeEncodeError as e: + # emoji encoding problem + print(value.encode('utf-8'), flush=True, end='') + +optionSet = conversationstyles[config['model']] +asyncio.run(run(optionSet, config['messages']))
\ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 3a86291d..e8fbf617 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -19,6 +19,9 @@ from .Providers import ( EasyChat, Acytoo, DFEHub, + AiService, + BingHuan, + Wewordle ) Palm = Bard |