def main(): argv = sys.argv[1:] if '@' not in argv: # single execution opts = opt.parse_command_line() trainer = Trainer(opts) trainer.run() else: runs = [] last_run = [] for v in argv: if v == '@': if len(last_run) >= 1: runs.append(last_run) last_run = [] else: last_run.append(v) if len(last_run) >= 1: runs.append(last_run) global_opts = opt.parse_command_line(runs[0]) trainer = None for run in runs[1:]: print(run) opts = opt.parse_command_line(run) opts.train_dir = global_opts.train_dir opts.val_dir = global_opts.val_dir if global_opts.local_rank is not None: opts.local_rank = global_opts.local_rank if trainer is None: trainer = Trainer(opts) else: trainer.options = opts trainer.run()
def main(): from frontier import Frontier from options import parse_command_line parse_command_line() ft = Frontier([ ('http://localhost/', 1), ]) Master(ft).start() IOLoop.instance().start()
def main(): from fetcher import HTTPFetcher from robot import Robot, ResponseHandler from options import parse_command_line parse_command_line() class TestHandler(ResponseHandler): def on_get(self): print self.response rb = Robot(HTTPFetcher(), [ ('/.*', TestHandler), ]) Worker(rb).start() IOLoop.instance().start()
define('use_curl', type=bool, default=False, help='use pycurl as AsyncHTTPClient backend') define('multi_processes', type=int, default=-1, help='run as multi-processes, 0 for cpu count') define('max_clients', type=int, default=10, help='max concurrent clients') define('time_len', type=int, default=60, help='time length of the test') define('timeout', type=float, default=1.0, help='request timeout') define('follow_redirects', type=bool, default=True, help='request follow redirects') define('validate_cert', type=bool, default=True, help='request validate cert') define('urls_file', help='request urls file') define('url_template', help='request url template, if not "", build with values in urls_file') define('log_info_format', help='info log format') define('log_warning_format', help='warning log format') define('log_error_format', help='error log format') parse_config_file(setup_settings()) requests = parse_command_line() if options.use_curl: httpclient.AsyncHTTPClient.configure( 'tornado.curl_httpclient.CurlAsyncHTTPClient') else: httpclient.AsyncHTTPClient.configure( 'torbench.simple_httpclient.SimpleAsyncHTTPClient') class BenchClient(object): def __init__(self, requests, timeout, max_clients, time_len=60): assert requests self._io_loop = ioloop.IOLoop()
os.path.join(self.options.out, 'weight.{}.pth'.format(self.team))) else: torch.save(model.state_dict(), os.path.join(self.options.out, 'weight.pth')) def run(self): self.setup_distributed() self.setup_model() self.setup_dataset() self.setup_optimizer() try: os.makedirs(self.options.out) except OSError: pass self.dummy_communication() self.train() def accuracy_count_match(out, target): pred = out.max(1).indices return pred.eq(target).cpu().sum() if __name__ == '__main__': opts = opt.parse_command_line() trainer = Trainer(opts) trainer.run()
rcParams['savefig.dpi'] = options.dpi except ImportError: plt = None from utils import setup_settings from options import define, options, parse_command_line, parse_config_file define('multi_processes', type=int, default=-1, help='benched as multi-processes') define('max_clients', type=int, default=10, help='max concurrent clients') define('dpi', type=int, default=72, help='figure DPI') define('figure_file', default='log.png', help='figure file') parse_config_file(setup_settings()) log_files = parse_command_line() REGEX = re.compile(r'\[(?P<level>[IWE]) (?P<date>\d+) (?P<time>[\d:\.]+)' r' (?P<file>\w+):(?P<lineno>\d+)\]' r' cost:\[(?P<cost>[\d\.]+)\]' r' code:\[(?P<code>\d+)\]' r' url:\[(?P<url>\S+)\]' r'( error:\[(?P<error>[^\]]+)\])?') LEVEL_MAP = { 'I': 'info', 'W': 'warning', 'E': 'error', }
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Created on 2013-04-26. Yeolar <*****@*****.**> # from zmq.eventloop.ioloop import IOLoop import fetcher import robot import worker from options import parse_command_line parse_command_line() class AHandler(robot.ResponseHandler): def on_error(self): if self.response.error: raise robot.OnResponseError(self.response.code, self.response.error) def on_get(self): print self.response def main(): http_fetcher = fetcher.HTTPFetcher() rb = robot.Robot(http_fetcher, [ ('/.*', AHandler), ])
except ImportError: plt = None from utils import setup_settings from options import define, options, parse_command_line, parse_config_file define('multi_processes', type=int, default=-1, help='benched as multi-processes') define('max_clients', type=int, default=10, help='max concurrent clients') define('dpi', type=int, default=72, help='figure DPI') define('figure_file', default='log.png', help='figure file') parse_config_file(setup_settings()) log_files = parse_command_line() REGEX = re.compile(r'\[(?P<level>[IWE]) (?P<date>\d+) (?P<time>[\d:\.]+)' r' (?P<file>\w+):(?P<lineno>\d+)\]' r' cost:\[(?P<cost>[\d\.]+)\]' r' code:\[(?P<code>\d+)\]' r' url:\[(?P<url>\S+)\]' r'( error:\[(?P<error>[^\]]+)\])?') LEVEL_MAP = { 'I': 'info', 'W': 'warning', 'E': 'error', }
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Created on 2013-04-26. Yeolar <*****@*****.**> # from zmq.eventloop.ioloop import IOLoop import fetcher import robot import worker from options import parse_command_line parse_command_line() class AHandler(robot.ResponseHandler): def on_error(self): if self.response.error: raise robot.OnResponseError(self.response.code, self.response.error) def on_get(self): print self.response def main(): http_fetcher = fetcher.HTTPFetcher() rb = robot.Robot(http_fetcher, [ ('/.*', AHandler),