Skip to content

Commit

Permalink
🐛 3.6.7: Fix openai.py
Browse files Browse the repository at this point in the history
  • Loading branch information
AirportR committed Feb 13, 2024
1 parent 3d2f7ef commit f3dd5a3
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 151 deletions.
38 changes: 21 additions & 17 deletions addons/builtin/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ async def fetch_openai(collector, session: aiohttp.ClientSession, proxy=None):
:param proxy:
:return:
"""
resp1 = await session.get('https://api.openai.com/compliance/cookie_requirements', headers={
h1 = {
'authority': 'api.openai.com',
'accept': '*/*',
'accept-language': 'zh-CN,zh;q=0.9',
Expand All @@ -44,10 +44,8 @@ async def fetch_openai(collector, session: aiohttp.ClientSession, proxy=None):
'sec-fetch-site': 'same-site',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) ' +
'Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0'
}, proxy=proxy, timeout=5)
if resp1.status != 200:
return True
resp2 = await session.get('https://ios.chat.openai.com/', headers={
}
h2 = {
'authority': 'ios.chat.openai.com',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/' +
'signed-exchange;v=b3;q=0.7',
Expand All @@ -62,7 +60,11 @@ async def fetch_openai(collector, session: aiohttp.ClientSession, proxy=None):
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) ' +
'Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0'
}, proxy=proxy, timeout=5)
}
resp1 = await session.get('https://api.openai.com/compliance/cookie_requirements', headers=h1,
proxy=proxy, timeout=5)
resp2 = await session.get('https://ios.chat.openai.com/', headers=h2,
proxy=proxy, timeout=5)
# 获取响应的文本内容
text1 = await resp1.text()
text2 = await resp2.text()
Expand Down Expand Up @@ -114,17 +116,19 @@ def get_openai_info(ReCleaner):


async def demo():
class FakeColl:
def __init__(self):
self.info = {}
self.data = self.info

fakecl = FakeColl()

session = aiohttp.ClientSession()
await fetch_openai(fakecl, session, proxy='http://127.0.0.1:11112')
print(get_openai_info(fakecl))
await session.close()
# class FakeColl:
# def __init__(self):
# self.info = {}
# self.data = self.info
#
# fakecl = FakeColl()
#
# session = aiohttp.ClientSession()
# await fetch_openai(fakecl, session, proxy='http://127.0.0.1:11112')
# print(get_openai_info(fakecl))
# await session.close()
from utils import script_demo
await script_demo(fetch_openai, proxy='http://127.0.0.1:11112')

if __name__ == "__main__":
asyncio.run(demo())
21 changes: 19 additions & 2 deletions utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import aiohttp

from utils.cron import *
from typing import Callable, Any
from typing import Callable, Any, Union, Coroutine

__version__ = "3.6.7" # 项目版本号
__all__ = ["cron_delete_message", "cron_edit_message", "message_delete_queue", "message_edit_queue", "__version__",
"retry"]
"retry", "script_demo"]


def default_breakfunc(ret_val: bool) -> bool:
Expand Down Expand Up @@ -37,3 +37,20 @@ async def inner(*args, **kwargs):
return inner

return wrapper


async def script_demo(script_func: Union[Callable, Coroutine], *arg, **kwargs):
class FakeColl:
def __init__(self):
self.info = {}
self.data = self.info

fakecl = FakeColl()

session = aiohttp.ClientSession()
if asyncio.iscoroutine(script_func):
await script_func
else:
await script_func(fakecl, session, *arg, **kwargs)
print(fakecl.info)
await session.close()
134 changes: 2 additions & 132 deletions utils/collector.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import asyncio
import ssl
import time
from typing import List
from asyncio import coroutine
from typing import List, Callable, Union, Coroutine
from urllib.parse import quote

import aiohttp
Expand Down Expand Up @@ -505,136 +506,5 @@ async def start(self, host: str, port: int, proxy=None):
return self.info


async def delay(session: aiohttp.ClientSession, proxyname, testurl, hostname, port, timeout):
url = 'http://{}:{}/proxies/{}/delay?timeout={}&url={}'.format(hostname, port, proxyname, timeout, testurl)
async with session.get(url) as r:
try:
if r.status == 200:
text = await r.json()
return text['delay']
else:
logger.info(proxyname + ":" + str(await r.json()) + str(r.status))
return -1
except ClientConnectorError as c:
logger.warning("连接失败:", c)
return -1


async def delay_providers(providername, hostname='127.0.0.1', port=11230, session: aiohttp.ClientSession = None):
healthcheckurl = 'http://{}:{}/providers/proxies/{}/healthcheck'.format(hostname, port, providername)
url = 'http://{}:{}/providers/proxies/{}/'.format(hostname, port, providername)
if session is None:
session = aiohttp.ClientSession()
try:
await session.get(healthcheckurl)
async with session.get(url) as r:
if r.status == 200:
text = await r.json()
# 拿到延迟数据
delays = []
node = text['proxies']
for n in node:
s = n['history'].pop()
de = s['delay']
delays.append(de)
await session.close()
return delays
else:
logger.warning("延迟测试出错:" + str(r.status))
await session.close()
return 0
except ClientConnectorError as c:
logger.warning("连接失败:", c)
await session.close()
return 0


async def batch_delay(proxyname: list, session: aiohttp.ClientSession = None,
testurl=config.getGstatic(),
hostname='127.0.0.1', port=11230, timeout='5000'):
"""
批量测试延迟,仅适用于不含providers的订阅
:param timeout:
:param port: 外部控制器端口
:param hostname: 主机名
:param testurl: 测试网址
:param session: 一个连接session
:param proxyname: 一组代理名
:return: list: 延迟
"""
try:
if session is None:
async with aiohttp.ClientSession() as session:
tasks = []
for name in proxyname:
task = asyncio.create_task(
delay(session, name, testurl=testurl, hostname=hostname, port=port, timeout=timeout))
tasks.append(task)
done = await asyncio.gather(*tasks)
return done
else:
tasks = []
for name in proxyname:
task = asyncio.create_task(
delay(session, name, testurl=testurl, hostname=hostname, port=port, timeout=timeout))
tasks.append(task)
done = await asyncio.gather(*tasks)
return done
except Exception as e:
logger.error(e)
return None


async def delay_https(session: aiohttp.ClientSession, proxy=None, testurl=config.getGstatic(),
timeout=10):
# _headers = {
# 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
# 'Chrome/102.0.5005.63 Safari/537.36'
# }
_headers2 = {'User-Agent': 'clash'}
try:
s1 = time.time()
async with session.get(url=testurl, proxy=proxy, headers=_headers2,
timeout=timeout) as r:
if r.status == 502:
pass
# logger.error("dual stack tcp shake hands failed")
if r.status == 204 or r.status == 200:
delay1 = time.time() - s1
# print(delay1)
return delay1
else:
return 0
except Exception as e:
logger.error(str(e))
return 0


async def delay_https_task(session: aiohttp.ClientSession = None, collector=None, proxy=None, times=5):
if session is None:
async with aiohttp.ClientSession() as session:
tasks = [asyncio.create_task(delay_https(session=session, proxy=proxy)) for _ in range(times)]
result = await asyncio.gather(*tasks)
sum_num = [r for r in result if r != 0]
http_delay = sum(sum_num) / len(sum_num) if len(sum_num) else 0
http_delay = "%.0fms" % (http_delay * 1000)
# print("http平均延迟:", http_delay)
http_delay = int(http_delay[:-2])
if collector is not None:
collector.info['HTTP(S)延迟'] = http_delay
return http_delay
else:
tasks = [asyncio.create_task(delay_https(session=session, proxy=proxy)) for _ in range(times)]
result = await asyncio.gather(*tasks)
sum_num = [r for r in result if r != 0]
http_delay = sum(sum_num) / len(sum_num) if len(sum_num) else 0
http_delay = "%.0fms" % (http_delay * 1000)
http_delay = int(http_delay[:-2])
# print("http平均延迟:", http_delay)
if collector is not None:
collector.info['HTTP(S)延迟'] = http_delay
return http_delay


if __name__ == "__main__":
pass

0 comments on commit f3dd5a3

Please sign in to comment.