def get(self, shard=None, contest_id=None): if contest_id is not None: self.contest = self.safe_get_item(Contest, contest_id) contest_address = "/%s" % contest_id else: contest_address = "" if shard is None: shard = "all" self.r_params = self.render_params() self.r_params["resource_shards"] = \ get_service_shards("ResourceService") self.r_params["resource_addresses"] = {} if shard == "all": for i in range(self.r_params["resource_shards"]): self.r_params["resource_addresses"][i] = get_service_address( ServiceCoord("ResourceService", i)).ip else: shard = int(shard) try: address = get_service_address( ServiceCoord("ResourceService", shard)) except KeyError: self.redirect("/resourceslist%s" % contest_address) return self.r_params["resource_addresses"][shard] = address.ip self.render("resources.html", **self.r_params)
def test_success(self): """Test success cases.""" self.assertEqual( get_service_address(ServiceCoord("Service", 0)), Address("0.0.0.0", 0)) self.assertEqual( get_service_address(ServiceCoord("Service", 1)), Address("0.0.0.1", 1))
def __init__(self, shard=0, listen_on_address=None): signal.signal(signal.SIGINT, lambda unused_x, unused_y: self.exit()) self.name = self.__class__.__name__ self.shard = shard self._my_coord = ServiceCoord(self.name, self.shard) # Dictionaries of (to be) connected RemoteServiceClients. self.remote_services = {} self.initialize_logging() # We setup the listening address for services which want to # connect with us. try: address = get_service_address(self._my_coord) except KeyError: raise ConfigError("Unable to find address for service %r. " "Is it specified in core_services in cms.conf?" % (self._my_coord,)) logger.info("--- %s %s %s", self.name, listen_on_address, address) if listen_on_address is not None: self.rpc_server = StreamServer( Address(listen_on_address, address.port), self._connection_handler) else: self.rpc_server = StreamServer(address, self._connection_handler) self.backdoor = None
def __init__(self, service, remote_service_coord=None, address=None): """Create a communication channel to a remote service. service (Service): the local service. remote_service_coord (ServiceCoord): the description of the remote service to connect to. address (Address): alternatively, the address to connect to (used when accepting a connection). """ if address is None and remote_service_coord is None: raise asynchat.async_chat.__init__(self) # service is the local service connecting to the remote one. self.service = service if address is None: self.remote_service_coord = remote_service_coord self.address = get_service_address(remote_service_coord) else: self.remote_service_coord = "" self.address = address self.connected = False self.data = []
def get(self, contest_id=None): if contest_id is not None: self.contest = self.safe_get_item(Contest, contest_id) self.r_params = self.render_params() self.r_params["resource_addresses"] = {} services = get_service_shards("ResourceService") for i in range(services): self.r_params["resource_addresses"][i] = get_service_address(ServiceCoord("ResourceService", i)).ip self.render("resourceslist.html", **self.r_params)
def get(self, contest_id=None): if contest_id is not None: self.contest = self.safe_get_item(Contest, contest_id) self.r_params = self.render_params() self.r_params["resource_addresses"] = {} services = get_service_shards("ResourceService") for i in range(services): self.r_params["resource_addresses"][i] = get_service_address( ServiceCoord("ResourceService", i)).ip self.render("resourceslist.html", **self.r_params)
def get(self, contest_id=None): if contest_id is not None: self.contest = self.safe_get_item(Contest, contest_id) r_params = self.render_params() r_params["resource_shards"] = get_service_shards("ResourceService") r_params["resource_addresses"] = {} for i in xrange(r_params["resource_shards"]): r_params["resource_addresses"][i] = get_service_address( ServiceCoord("ResourceService", i)).ip self.render("resources.html", **r_params)
def __init__(self, remote_service_coord, auto_retry=None): """Create a caller for the service at the given coords. remote_service_coord (ServiceCoord): the coordinates (i.e. name and shard) of the service to which to send RPC requests. auto_retry (float|None): if a number is given then it's the interval (in seconds) between attempts to reconnect to the remote service in case the connection is lost; if not given no automatic reconnection attempts will occur. """ super(RemoteServiceClient, self).__init__(get_service_address(remote_service_coord)) self.remote_service_coord = remote_service_coord self.pending_outgoing_requests = dict() self.pending_outgoing_requests_results = dict() self.auto_retry = auto_retry
def __init__(self, remote_service_coord, auto_retry=None): """Create a caller for the service at the given coords. remote_service_coord (ServiceCoord): the coordinates (i.e. name and shard) of the service to which to send RPC requests. auto_retry (float|None): if a number is given then it's the interval (in seconds) between attempts to reconnect to the remote service in case the connection is lost; if not given no automatic reconnection attempts will occur. """ super(RemoteServiceClient, self).__init__( get_service_address(remote_service_coord)) self.remote_service_coord = remote_service_coord self.pending_outgoing_requests = dict() self.pending_outgoing_requests_results = dict() self.auto_retry = auto_retry
def __init__(self, shard=0): signal.signal(signal.SIGINT, lambda unused_x, unused_y: self.exit()) self.name = self.__class__.__name__ self.shard = shard self._my_coord = ServiceCoord(self.name, self.shard) # Dictionaries of (to be) connected RemoteServiceClients. self.remote_services = {} self.initialize_logging() # We setup the listening address for services which want to # connect with us. try: address = get_service_address(self._my_coord) except KeyError: logger.critical("Couldn't find %r in the configuration.", self._my_coord) sys.exit(1) self.rpc_server = StreamServer(address, self._connection_handler) self.backdoor = None
def __init__(self, shard=0, custom_logger=None): signal.signal(signal.SIGINT, lambda unused_x, unused_y: self.exit()) global logger if custom_logger is None: logger = Logger() else: logger = custom_logger self.shard = shard # Stores the function to call periodically. It is to be # managed with heapq. Format: (next_timeout, period, function, # plus) self._timeouts = [] # If we want to exit the main loop self._exit = False # The return values of the rpc calls executed in a different # thread. With the corresponding lock to aquire before # interfering with _threaded_responses. Format: list of # parameters for send_reply. self._threaded_responses = [] self._threaded_responses_lock = threading.Lock() # Dictionaries of (to be) connected RemoteService, and # dictionaries of callback functions that are going to be # called when the remote service becomes online. self.remote_services = {} self.on_remote_service_connected = {} self._my_coord = ServiceCoord(self.__class__.__name__, self.shard) # We setup the listening address for services which want to # connect with us. try: address = get_service_address(self._my_coord) except KeyError: address = None if address is not None: self.server = ListeningSocket(self, address)
def __init__(self, shard=0): signal.signal(signal.SIGINT, lambda unused_x, unused_y: self.exit()) self.name = self.__class__.__name__ self.shard = shard self._my_coord = ServiceCoord(self.name, self.shard) # Dictionaries of (to be) connected RemoteServiceClients. self.remote_services = {} self.initialize_logging() # We setup the listening address for services which want to # connect with us. try: address = get_service_address(self._my_coord) except KeyError: raise ConfigError("Unable to find address for service %r. " "Is it specified in core_services in cms.conf?" % (self._my_coord,)) self.rpc_server = StreamServer(address, self._connection_handler) self.backdoor = None
def main(): parser = optparse.OptionParser(usage="usage: %prog [options]") parser.add_option("-c", "--contest", help="contest ID to export", dest="contest_id", action="store", type="int", default=None) parser.add_option("-n", "--actor-num", help="the number of actors to spawn", dest="actor_num", action="store", type="int", default=None) parser.add_option("-s", "--sort-actors", help="sort usernames alphabetically " "instead of randomizing before slicing them", action="store_true", default=False, dest="sort_actors") parser.add_option("-u", "--base-url", help="base URL for placing HTTP requests", action="store", default=None, dest="base_url") parser.add_option("-S", "--submissions-path", help="base path for submission to send", action="store", default=None, dest="submissions_path") options = parser.parse_args()[0] users, tasks = harvest_contest_data(options.contest_id) if options.actor_num is not None: user_items = users.items() if options.sort_actors: user_items.sort() else: random.shuffle(user_items) users = dict(user_items[:options.actor_num]) # If the base URL is not specified, we try to guess it; anyway, # the guess code isn't very smart... if options.base_url is not None: base_url = options.base_url else: base_url = "http://%s:%d/" % \ (get_service_address(ServiceCoord('ContestWebServer', 0))[0], config.contest_listen_port[0]) actors = [ RandomActor( username, data['password'], DEFAULT_METRICS, tasks, log=RequestLog(log_dir=os.path.join('./test_logs', username)), base_url=base_url, submissions_path=options.submissions_path) for username, data in users.iteritems() ] for actor in actors: actor.start() try: while True: time.sleep(1) except KeyboardInterrupt: print >> sys.stderr, "Taking down actors" for actor in actors: actor.die = True # Turn on some memory profiling #from meliae import scanner #print "Dumping" #scanner.dump_all_objects('objects.json') #print "Dump finished" finished = False while not finished: for actor in actors: actor.join() else: finished = True print >> sys.stderr, "Test finished" great_log = RequestLog() for actor in actors: great_log.merge(actor.log) great_log.print_stats()
def main(): parser = optparse.OptionParser(usage="usage: %prog [options]") parser.add_option("-c", "--contest", help="contest ID to export", dest="contest_id", action="store", type="int", default=None) parser.add_option("-n", "--actor-num", help="the number of actors to spawn", dest="actor_num", action="store", type="int", default=None) parser.add_option("-s", "--sort-actors", help="sort usernames alphabetically " "instead of randomizing before slicing them", action="store_true", default=False, dest="sort_actors") parser.add_option("-u", "--base-url", help="base URL for placing HTTP requests", action="store", default=None, dest="base_url") parser.add_option("-S", "--submissions-path", help="base path for submission to send", action="store", default=None, dest="submissions_path") parser.add_option("-p", "--prepare-path", help="file to put contest info to", action="store", default=None, dest="prepare_path") parser.add_option("-r", "--read-from", help="file to read contest info from", action="store", default=None, dest="read_from") options = parser.parse_args()[0] # If prepare_path is specified we only need to save some useful # contest data and exit. if options.prepare_path is not None: users, tasks = harvest_contest_data(options.contest_id) contest_data = dict() contest_data['users'] = users contest_data['tasks'] = tasks with io.open(options.prepare_path, "wt", encoding="utf-8") as file_: file_.write("%s" % contest_data) return users = [] tasks = [] # If read_from is not specified, read contest data from database # if it is specified - read contest data from the file if options.read_from is None: users, tasks = harvest_contest_data(options.contest_id) else: with io.open(options.read_from, "rt", encoding="utf-8") as file_: contest_data = ast.literal_eval(file_.read()) users = contest_data['users'] tasks = contest_data['tasks'] if options.actor_num is not None: user_items = users.items() if options.sort_actors: user_items.sort() else: random.shuffle(user_items) users = dict(user_items[:options.actor_num]) # If the base URL is not specified, we try to guess it; anyway, # the guess code isn't very smart... if options.base_url is not None: base_url = options.base_url else: base_url = "http://%s:%d/" % \ (get_service_address(ServiceCoord('ContestWebServer', 0))[0], config.contest_listen_port[0]) actors = [RandomActor(username, data['password'], DEFAULT_METRICS, tasks, log=RequestLog(log_dir=os.path.join('./test_logs', username)), base_url=base_url, submissions_path=options.submissions_path) for username, data in users.iteritems()] for actor in actors: actor.start() try: while True: time.sleep(1) except KeyboardInterrupt: print("Taking down actors", file=sys.stderr) for actor in actors: actor.die = True # Uncomment to turn on some memory profiling. # from meliae import scanner # print("Dumping") # scanner.dump_all_objects('objects.json') # print("Dump finished") finished = False while not finished: for actor in actors: actor.join() else: finished = True print("Test finished", file=sys.stderr) great_log = RequestLog() for actor in actors: great_log.merge(actor.log) great_log.print_stats()
def test_service_not_present(self): """Test failure when the service is invalid.""" with self.assertRaises(KeyError): get_service_address(ServiceCoord("ServiceNotPresent", 0))
def main(): parser = argparse.ArgumentParser(description="Stress tester for CMS") parser.add_argument("-c", "--contest-id", action="store", type=int, required=True, help="ID of the contest to test against") parser.add_argument("-n", "--actor-num", action="store", type=int, help="the number of actors to spawn") parser.add_argument( "-s", "--sort-actors", action="store_true", help="sort usernames alphabetically before slicing them") parser.add_argument("-u", "--base-url", action="store", type=utf8_decoder, help="base contest URL for placing HTTP requests " "(without trailing slash)") parser.add_argument("-S", "--submissions-path", action="store", type=utf8_decoder, help="base path for submission to send") parser.add_argument("-p", "--prepare-path", action="store", type=utf8_decoder, help="file to put contest info to") parser.add_argument("-r", "--read-from", action="store", type=utf8_decoder, help="file to read contest info from") parser.add_argument("-t", "--time-coeff", action="store", type=float, default=10.0, help="average wait between actions") parser.add_argument("-o", "--only-submit", action="store_true", help="whether the actor only submits solutions") args = parser.parse_args() # If prepare_path is specified we only need to save some useful # contest data and exit. if args.prepare_path is not None: users, tasks = harvest_contest_data(args.contest_id) contest_data = dict() contest_data['users'] = users contest_data['tasks'] = tasks with io.open(args.prepare_path, "wt", encoding="utf-8") as file_: file_.write("%s" % contest_data) return assert args.time_coeff > 0.0 assert not (args.only_submit and len(args.submissions_path) == 0) users = [] tasks = [] # If read_from is not specified, read contest data from database # if it is specified - read contest data from the file if args.read_from is None: users, tasks = harvest_contest_data(args.contest_id) else: with io.open(args.read_from, "rt", encoding="utf-8") as file_: contest_data = ast.literal_eval(file_.read()) users = contest_data['users'] tasks = contest_data['tasks'] if len(users) == 0: print("No viable users, terminating.") return if args.actor_num is not None: user_items = list(iteritems(users)) if args.sort_actors: user_items.sort() else: random.shuffle(user_items) users = dict(user_items[:args.actor_num]) # If the base URL is not specified, we try to guess it; anyway, # the guess code isn't very smart... if args.base_url is not None: base_url = args.base_url else: base_url = "http://%s:%d/" % \ (get_service_address(ServiceCoord('ContestWebServer', 0))[0], config.contest_listen_port[0]) metrics = DEFAULT_METRICS metrics["time_coeff"] = args.time_coeff actor_class = RandomActor if args.only_submit: actor_class = SubmitActor actors = [ actor_class( username, data['password'], metrics, tasks, log=RequestLog(log_dir=os.path.join('./test_logs', username)), base_url=base_url, submissions_path=args.submissions_path) for username, data in iteritems(users) ] for actor in actors: actor.start() try: while True: time.sleep(1) except KeyboardInterrupt: print("Taking down actors", file=sys.stderr) for actor in actors: actor.die = True # Uncomment to turn on some memory profiling. # from meliae import scanner # print("Dumping") # scanner.dump_all_objects('objects.json') # print("Dump finished") for actor in actors: actor.join() print("Test finished", file=sys.stderr) great_log = RequestLog() for actor in actors: great_log.merge(actor.log) great_log.print_stats()
def main(): parser = optparse.OptionParser(usage="usage: %prog [options]") parser.add_option("-c", "--contest", help="contest ID to export", dest="contest_id", action="store", type="int", default=None) parser.add_option("-n", "--actor-num", help="the number of actors to spawn", dest="actor_num", action="store", type="int", default=None) parser.add_option("-s", "--sort-actors", help="sort usernames alphabetically " "instead of randomizing before slicing them", action="store_true", default=False, dest="sort_actors") parser.add_option("-u", "--base-url", help="base URL for placing HTTP requests", action="store", default=None, dest="base_url") parser.add_option("-S", "--submissions-path", help="base path for submission to send", action="store", default=None, dest="submissions_path") options = parser.parse_args()[0] users, tasks = harvest_contest_data(options.contest_id) if options.actor_num is not None: user_items = users.items() if options.sort_actors: user_items.sort() else: random.shuffle(user_items) users = dict(user_items[:options.actor_num]) # If the base URL is not specified, we try to guess it; anyway, # the guess code isn't very smart... if options.base_url is not None: base_url = options.base_url else: base_url = "http://%s:%d/" % \ (get_service_address(ServiceCoord('ContestWebServer', 0))[0], config.contest_listen_port[0]) actors = [RandomActor(username, data['password'], DEFAULT_METRICS, tasks, log=RequestLog(log_dir=os.path.join('./test_logs', username)), base_url=base_url, submissions_path=options.submissions_path) for username, data in users.iteritems()] for actor in actors: actor.start() try: while True: time.sleep(1) except KeyboardInterrupt: print >> sys.stderr, "Taking down actors" for actor in actors: actor.die = True # Turn on some memory profiling #from meliae import scanner #print "Dumping" #scanner.dump_all_objects('objects.json') #print "Dump finished" finished = False while not finished: for actor in actors: actor.join() else: finished = True print >> sys.stderr, "Test finished" great_log = RequestLog() for actor in actors: great_log.merge(actor.log) great_log.print_stats()
def main(): parser = optparse.OptionParser(usage="usage: %prog [options]") parser.add_option("-c", "--contest", help="contest ID to export", dest="contest_id", action="store", type="int", default=None) parser.add_option("-n", "--actor-num", help="the number of actors to spawn", dest="actor_num", action="store", type="int", default=None) parser.add_option("-s", "--sort-actors", help="sort usernames alphabetically " "instead of randomizing before slicing them", action="store_true", default=False, dest="sort_actors") parser.add_option("-u", "--base-url", help="base URL for placing HTTP requests", action="store", default=None, dest="base_url") parser.add_option("-S", "--submissions-path", help="base path for submission to send", action="store", default=None, dest="submissions_path") parser.add_option("-p", "--prepare-path", help="file to put contest info to", action="store", default=None, dest="prepare_path") parser.add_option("-r", "--read-from", help="file to read contest info from", action="store", default=None, dest="read_from") options = parser.parse_args()[0] # If prepare_path is specified we only need to save some useful # contest data and exit. if options.prepare_path is not None: users, tasks = harvest_contest_data(options.contest_id) contest_data = dict() contest_data['users'] = users contest_data['tasks'] = tasks with io.open(options.prepare_path, "wt", encoding="utf-8") as file_: file_.write("%s" % contest_data) return users = [] tasks = [] # If read_from is not specified, read contest data from database # if it is specified - read contest data from the file if options.read_from is None: users, tasks = harvest_contest_data(options.contest_id) else: with io.open(options.read_from, "rt", encoding="utf-8") as file_: contest_data = ast.literal_eval(file_.read()) users = contest_data['users'] tasks = contest_data['tasks'] if options.actor_num is not None: user_items = users.items() if options.sort_actors: user_items.sort() else: random.shuffle(user_items) users = dict(user_items[:options.actor_num]) # If the base URL is not specified, we try to guess it; anyway, # the guess code isn't very smart... if options.base_url is not None: base_url = options.base_url else: base_url = "http://%s:%d/" % \ (get_service_address(ServiceCoord('ContestWebServer', 0))[0], config.contest_listen_port[0]) actors = [ RandomActor( username, data['password'], DEFAULT_METRICS, tasks, log=RequestLog(log_dir=os.path.join('./test_logs', username)), base_url=base_url, submissions_path=options.submissions_path) for username, data in users.iteritems() ] for actor in actors: actor.start() try: while True: time.sleep(1) except KeyboardInterrupt: print("Taking down actors", file=sys.stderr) for actor in actors: actor.die = True # Uncomment to turn on some memory profiling. # from meliae import scanner # print("Dumping") # scanner.dump_all_objects('objects.json') # print("Dump finished") finished = False while not finished: for actor in actors: actor.join() else: finished = True print("Test finished", file=sys.stderr) great_log = RequestLog() for actor in actors: great_log.merge(actor.log) great_log.print_stats()
def main(): parser = argparse.ArgumentParser(description="Stress tester for CMS") parser.add_argument( "-c", "--contest-id", action="store", type=int, required=True, help="ID of the contest to test against") parser.add_argument( "-n", "--actor-num", action="store", type=int, help="the number of actors to spawn") parser.add_argument( "-s", "--sort-actors", action="store_true", help="sort usernames alphabetically before slicing them") parser.add_argument( "-u", "--base-url", action="store", type=utf8_decoder, help="base contest URL for placing HTTP requests " "(without trailing slash)") parser.add_argument( "-S", "--submissions-path", action="store", type=utf8_decoder, help="base path for submission to send") parser.add_argument( "-p", "--prepare-path", action="store", type=utf8_decoder, help="file to put contest info to") parser.add_argument( "-r", "--read-from", action="store", type=utf8_decoder, help="file to read contest info from") parser.add_argument( "-t", "--time-coeff", action="store", type=float, default=10.0, help="average wait between actions") parser.add_argument( "-o", "--only-submit", action="store_true", help="whether the actor only submits solutions") args = parser.parse_args() # If prepare_path is specified we only need to save some useful # contest data and exit. if args.prepare_path is not None: users, tasks = harvest_contest_data(args.contest_id) contest_data = dict() contest_data['users'] = users contest_data['tasks'] = tasks with io.open(args.prepare_path, "wt", encoding="utf-8") as file_: file_.write("%s" % contest_data) return assert args.time_coeff > 0.0 assert not (args.only_submit and len(args.submissions_path) == 0) users = [] tasks = [] # If read_from is not specified, read contest data from database # if it is specified - read contest data from the file if args.read_from is None: users, tasks = harvest_contest_data(args.contest_id) else: with io.open(args.read_from, "rt", encoding="utf-8") as file_: contest_data = ast.literal_eval(file_.read()) users = contest_data['users'] tasks = contest_data['tasks'] if len(users) == 0: print("No viable users, terminating.") return if args.actor_num is not None: user_items = list(iteritems(users)) if args.sort_actors: user_items.sort() else: random.shuffle(user_items) users = dict(user_items[:args.actor_num]) # If the base URL is not specified, we try to guess it; anyway, # the guess code isn't very smart... if args.base_url is not None: base_url = args.base_url else: base_url = "http://%s:%d/" % \ (get_service_address(ServiceCoord('ContestWebServer', 0))[0], config.contest_listen_port[0]) metrics = DEFAULT_METRICS metrics["time_coeff"] = args.time_coeff actor_class = RandomActor if args.only_submit: actor_class = SubmitActor actors = [actor_class(username, data['password'], metrics, tasks, log=RequestLog(log_dir=os.path.join('./test_logs', username)), base_url=base_url, submissions_path=args.submissions_path) for username, data in iteritems(users)] for actor in actors: actor.start() try: while True: time.sleep(1) except KeyboardInterrupt: print("Taking down actors", file=sys.stderr) for actor in actors: actor.die = True # Uncomment to turn on some memory profiling. # from meliae import scanner # print("Dumping") # scanner.dump_all_objects('objects.json') # print("Dump finished") for actor in actors: actor.join() print("Test finished", file=sys.stderr) great_log = RequestLog() for actor in actors: great_log.merge(actor.log) great_log.print_stats()