import time
import argparse
import concurrent.futures
import asyncio
import aiohttp
async def req_url_aio(url, idx, sem):
status = 0
starttime = time.time()
async with sem:
try:
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
status = resp.status
await resp.text()
finally:
pass
return (idx, {"s":status, "t":(time.time()- starttime)})
class TestUrl_AIO():
def __init__(self, url, concurrency_num, request_num):
self.url = url
self.concurrency_num = concurrency_num
self.request_num = request_num
async def begin(self):
sem = asyncio.Semaphore(self.concurrency_num)
return_dict = {}
task_to_req = [ asyncio.create_task(req_url_aio(self.url, reqidx, sem)) for reqidx in range(0, self.request_num)]
for f in asyncio.as_completed(task_to_req):
ret = await f
return_dict[ret[0]] = ret[1]
return return_dict
async def main():
parser = argparse.ArgumentParser(description='scanner of ip or port')
parser.add_argument('-n', metavar='concurrency number', type=int, nargs=1, help='you can set concurrency number, default is 1', required=True )
parser.add_argument('-m', metavar='requests number', type=int, nargs=1, help='max request number, default is 1', required=True )
parser.add_argument('-url', metavar='request url', type=str, nargs=1, help='scan ip set ip1-ip2, scan tcp port set ip', required=True )
parser.add_argument('-w', metavar='file name', type=str, nargs=1, help='can save result to a json file', required=False )
args = parser.parse_args()
test = TestUrl_AIO(args.url[0], args.n[0], args.m[0])
starttime = time.time()
ret = await test.begin()
data = ret.copy()
p = [(v["t"]) for v in data.values()]
avg = sum(p)/100
print(f'avg time:{avg}')
p.sort()
print(f'95% time:{p[94]}')
print(time.time()- starttime)
if __name__ == "__main__":
ioloop = asyncio.new_event_loop()
ioloop.run_until_complete(main())
ioloop.close()
评论 (1 条评论)