#/usr/bin python# -*- encoding: utf-8 -*-import argparseimport multiprocessingimport urllib2import timeimport logging
import conf
def performance_test(url, concurrency, max_request): """main""" manager = multiprocessing.Manager() request_num = manager.Value("d",0) resp_time=manager.list() process_pool = [] # start performance test # request_process(url, request_num, max_request, resp_time) for num in xrange(0, concurrency): p = multiprocessing.Process(target=request_process, args=(url, request_num, max_request, resp_time,)) process_pool.append(p) for p in process_pool: p.start() for p in process_pool: p.join() # p.close() print len(resp_time) resp_time_info = resp_time resp_time_info.sort() average = sum(resp_time_info) / len(resp_time_info) resp_95 = resp_time_info[int(0.95 * len(resp_time_info))] return average, resp_95
def request_process(url, request_num, max_request, resp_time): """get url until request_num large or equal max_request""" error_num = 0 while request_num.value <= max_request: request_num.value += 1 start_time = time.time() header = {} try: req = urllib2.Request(url) resp = urllib2.urlopen(req, timeout=conf.TIMEOUT) if resp.code != 200: logging.warn("get url %s error, resp_code: %s" % (url, resp.code)) error_num += 1 request_num.value -= 1 else: logging.debug("get url %s success" % url) error_num = 0 resp_time.append(time.time() - start_time)
except Exception as err: logging.warn("get url %s error: %s" % (url, err)) error_num += 1 request_num.value -= 1 if error_num > 3: logging.error("too much error...") break
def get_argparse(): """get parse input args""" parser = argparse.ArgumentParser(description='Performance test tool. ' 'use like: python performance_test.py -c 10 -n 100 -u http://www.baidu.com') parser.add_argument('-c', '--concurrency', dest='concurrency', type=int, default=conf.CONCORRENCY, help='Number of multiple request to make at a time') parser.add_argument('-n', '--requests', dest='requests', type=int, default=conf.MAX_REQUESTS, help='Number of requests to perform') parser.add_argument('-u', '--url', dest='urls', type=str, required=True, help='url to request') args = parser.parse_args() return args
if __name__ == "__main__": args = get_argparse() concurrency = args.concurrency max_request = args.requests urls = args.urls print performance_test(urls, concurrency, max_request)
评论