def start_simulation(self, widget): N = [] t_zero = time.time() for i in range(0, len(self.Nodes)): N.append([[0, 0], 0]) M = Map(self.Nodes, self.Connectors) S = Simulation(10, 2, self.Nodes, self.Connectors) print "Nodes: " + str(self.Nodes) print "Connectors: " + str(self.Connectors) self.Vehicles = utils.generate_vehicles(self.Connectors) print "Vehicles: " + str(self.Vehicles) M.disp_map() while True: time.sleep(0.05) [N, self.Vehicles] = S.simulate_vehicles(self.Vehicles, N) M.disp_vehicles(self.Vehicles, N) #M.disp_infections(self.Vehicles, N) for event in pygame.event.get(): if event.type == pygame.QUIT: M.close_window() #DB.save(x) self.info.set_text("Runtime: " + str(time.time() - t_zero) + "s") print "Runtime: " + str(time.time() - t_zero) + "s" return 0
def state_dict(self): state_dict = Map() state_dict.G = self.G.state_dict() if self.D is not None: state_dict.D = self.D.state_dict() return state_dict.toDict()
def __init__(self, maze_dim): ''' Use the initialization function to set up attributes that your robot will use to learn and navigate the maze. Some initial attributes are provided based on common information, including the size of the maze the robot is placed in. ''' self.location = [0, 0] self.heading = 'up' self.maze_dim = maze_dim # the robots map of the maze # the robots goal in the mapping stage is to # fill every 0 in the map with a value describing # the walls on each of the 4 sides of the square. self.map = Map(maze_dim) # are we in mapping mode? self.mapping = True self.reached_goal = False # is the previous square open? self.prev_open = 0 # heuristic function we use to weight possible moves in mapping self.heuristic = AStar(self) logging.debug("Using heuristic {}".format(self.heuristic)) # step counter self.step = 0 # maximum steps used to map self.STEP_LIMIT = 900 self.moves = []
def __init__(self, config, writerDef): self.config = config self.writerDef = writerDef self.writer_id = writerDef["writer-id"] # read common writer's params base_key = "common.umcpush.writer-params" self.params = Map( delay_writes=self.config.value(base_key + ".delay-writes", 0.2), delay_runs=self.config.value(base_key + ".delay-runs", 10), connection_retry_count=self.config.value( base_key + ".connection-retry-count", 5), connection_retry_interval=self.config.value( base_key + ".connection-retry-interval", 10), write_interval=self.config.value(base_key + ".write-interval", 0), ) # base key for this writer's configuration #self.base_key="common.umcpush.{writer_id}.".format(writer_id=self.writer_id) # update any specific writer's param of this writer # update any value that may be overriden in writer's specific parameters wparams = self.param("writer-params") if wparams is not None: for k, v in wparams.items(): k = k.replace("-", "_") # update only params that exist in common params if self.params.get(k) is not None: self.params[k] = v else: # this param may be used in child's configuration pass
def __init__(self, *args, **kwds): BaseHeuristic.__init__(self, *args, **kwds) # g-values for A* self.g_values = Map(self.robot.maze_dim) self.g_values.set([0, 0], 1) # parameters used later in simulation self.reset = {} self.TURN_PENALTY = 2 self.reverse = False
def addTask(self, targetClass, time_interval, run_on_global_pause=False, time_limit_pause=0, pause_for=0, time_limit_disable=0, disabled=False): taskdef=Map( time_interval=time_interval, target=targetClass(), run_on_global_pause=run_on_global_pause, time_limit_pause=time_limit_pause, pause_for=pause_for, time_limit_disable=time_limit_disable, last_run_time=0, last_run_duration=0, result=True, disabled=disabled, run_after=0) taskdef.name=taskdef.target.__class__.__name__ self.tasks.append(taskdef) return taskdef
def read_umcdef(self, umc_id, umcconf): writers = self.config.value_element(umcconf, "writers", []) for writer in writers: if writer["writer-id"] == self.writer_id: return Map(enabled=self.config.value_element( writer, "enabled", True), writerDef=writer) # writer definition for this umc instance has not been found return Map(enabled=False, writerDef=None)
def __init__(self, config, writerDef): super(InfluxDBWriter, self).__init__(config, writerDef) # read params self.idb_params = Map(db_url=self.param("connect.url", None), db_user=self.param("connect.user", None), db_pass=self.param("connect.pass", None), db_name=self.param("connect.dbname", None)) # check the db was defined if self.idb_params.db_url == None or self.idb_params.db_name == None: raise Exception( "Invalid DB connection details (db_url or db_name is missing)." ) # parse url to get host and port m = re.search("http://([a-zA-Z0-9\.]+):([0-9]+)/?", self.idb_params.db_url) if not (m): raise Exception('The DB url %s is invalid.' % db_url) self.idb_params.host = m.group(1) self.idb_params.port = m.group(2) # create the influxdb client self.client = InfluxDBClient(self.idb_params.host, self.idb_params.port, self.idb_params.db_user, self.idb_params.db_pass, self.idb_params.db_name) Msg.info1_msg( "DB connection details are: host=%s,port=%s,user=%s,pass=xxxxx,dbname=%s" % (self.idb_params.host, self.idb_params.port, self.idb_params.db_user, self.idb_params.db_name))
def serverlist(self): # default tcp port tcp_port = self.config.value("common.umcrunner.http.tcp-port", 1989) # create dict object server_list = {} server_binding = self.config.value( "common.umcrunner.http.server-binding") if server_binding is not None: for hostname in server_binding.split(","): hostname = hostname.strip() try: df = Map( hostname=hostname, address=socket.gethostbyname(hostname), tcp_port=tcp_port, enabled=True, me=(True if hostname.lower() == socket.gethostname().lower() else False) # TODO: check that address is on this host = better "me" ) server_list[hostname] = df except Exception as e: if "Name or service not known" not in str(e): Msg.err_msg( "Error occured when obtaining configuration for server's hostname '%s': %s!" % (hostname, e)) pass # // server_binding return server_list
def do_GET(self): # umcrunner stats if self.process_cluster_request( "get", "/stats/hosts/{hostname}", True, GlobalContext.params.logstats_interval, False, lambda params: Map(code=200, json=[GlobalContext.umcrunner_stats.to_json()] )) is not None: return # umc stats if self.process_cluster_request( "get", "/stats/hosts/{hostname}/umc/{umc}", True, GlobalContext.params.logstats_interval, False, self.callback_umcdef_content) is not None: return # umc error log if self.process_cluster_request( "get", "/logs/error/hosts/{hostname}/umc/{umc}", False, GlobalContext.params.logstats_interval, False, self.callback_umc_errorlog) is not None: return # messages log steam (server-sent events) if self.process_cluster_request("get", "/logs/stream/hosts/{hostname}", False, 0, True, self.callback_logstream) is not None: return # others are not found self.send_response(404)
def get_connector_spec(self, widget): NCM = Map(self.Nodes, self.Connectors) #NCM.Connectors.append(self.newConnector) while True: NCM.Conn.append(self.newConnector) NCM.disp_map()#, self.newConnector]) NCM.Conn.pop(-1) pos = NCM.get_pos() if not NCM.Open: del(NCM) return 0 for node in self.Nodes: if utils.dist(pos, node[2]) < NCM.hitbox: if len(self.newConnector[1]) < 1: self.newConnector[1].append(self.Nodes.index(node)) elif pos != self.newConnector[1][-1]: self.newConnector[1].append(self.Nodes.index(node))
def get_connector_spec(self, widget): NCM = Map(self.Nodes, self.Connectors) #NCM.Connectors.append(self.newConnector) while True: NCM.Conn.append(self.newConnector) NCM.disp_map() #, self.newConnector]) NCM.Conn.pop(-1) pos = NCM.get_pos() if not NCM.Open: del (NCM) return 0 for node in self.Nodes: if utils.dist(pos, node[2]) < NCM.hitbox: if len(self.newConnector[1]) < 1: self.newConnector[1].append(self.Nodes.index(node)) elif pos != self.newConnector[1][-1]: self.newConnector[1].append(self.Nodes.index(node))
def _get_batch(self, bucket_id, batch_ids): if bucket_id is None: raw = [self.data[i] for i in batch_ids] else: raw = [self.data[bucket_id][i] for i in batch_ids] max_l = max(map(len, raw)) + 1 input_ids = list() target_ids = list() for d in raw: input_ids.append(pad([SOS_ID] + d, max_l)) target_ids.append(pad(d + [EOS_ID], max_l)) input_ids = np.asarray(input_ids) target_ids = np.asarray(target_ids) inputs = Map(input_ids=input_ids, target_ids=target_ids) batch = Map(inputs=inputs) return batch
def load_state_dict(self, state_dict): state_dict = Map(**state_dict) if isinstance(self.model.G, DataParallel) or isinstance(self.model.G, DistributedDataParallel): self.model.G.module.load_state_dict(state_dict.G) if self.model.D is not None: self.model.D.module.load_state_dict(state_dict.D) else: self.model.G.load_state_dict(state_dict.G) if self.model.D is not None: self.model.D.load_state_dict(state_dict.D)
def create_data(self, url, content, created_time, age): if self.data.get(url) is None: self.data[url] = Map() self.data[url].content = content self.data[url].created_time = created_time self.data[url].age = age if self.data[url].lock is None: self.data[url].lock = RLock() return self.data[url]
class UmcWriter(object): def __init__(self, config, writerDef): self.config = config self.writerDef = writerDef self.writer_id = writerDef["writer-id"] # read common writer's params base_key = "common.umcpush.writer-params" self.params = Map( delay_writes=self.config.value(base_key + ".delay-writes", 0.2), delay_runs=self.config.value(base_key + ".delay-runs", 10), connection_retry_count=self.config.value( base_key + ".connection-retry-count", 5), connection_retry_interval=self.config.value( base_key + ".connection-retry-interval", 10), write_interval=self.config.value(base_key + ".write-interval", 0), ) # base key for this writer's configuration #self.base_key="common.umcpush.{writer_id}.".format(writer_id=self.writer_id) # update any specific writer's param of this writer # update any value that may be overriden in writer's specific parameters wparams = self.param("writer-params") if wparams is not None: for k, v in wparams.items(): k = k.replace("-", "_") # update only params that exist in common params if self.params.get(k) is not None: self.params[k] = v else: # this param may be used in child's configuration pass # // init # def param_key(self,param_name): # return self.base_key + param_name def param(self, param_name, default=None): return self.config.value_element(self.writerDef, param_name, default) def read_umcdef(self, umc_id, umcconf): writers = self.config.value_element(umcconf, "writers", []) for writer in writers: if writer["writer-id"] == self.writer_id: return Map(enabled=self.config.value_element( writer, "enabled", True), writerDef=writer) # writer definition for this umc instance has not been found return Map(enabled=False, writerDef=None)
class State(BaseObject): idCounter = 0 def __init__(self): self.id = State.idCounter State.idCounter += 1 self.map = Map() self.isFinal = False def __str__(self): stateName = "S" + str(self.id) if self.isFinal: stateName = "{%s}" % stateName return stateName def addNextState(self, symbol, state): self.map.add(symbol, state) def getNextStates(self, symbol): if symbol in self.map: return self.map[symbol] else: return None def getNextState(self, symbol): if self.map.has_key(symbol): return self.map[symbol][0] return None def removeNextStates(self, symbol): del(self.map[symbol]) def replaceState(self, oldState, newState): replacementCount = 0 for key in self.map.keys(): states = self.map[key] for index,state in enumerate(states): if state == oldState: states[index] = newState replacementCount += 1 return replacementCount def isDead(self): if self.isFinal or len(self.map) == 0: return False for key in self.map.keys(): states = self.map[key] for state in states: if state == self: return False return True def getAllKeys(self): return self.map.keys() @staticmethod def resetIdCounter(): State.idCounter = 0
def read_umcdef(self, umc_id, umcconf): # tags and fields cols of this umc definition tcols = [ x.strip() for x in self.config.value_element( umcconf, "reader.tags").split(',') if x != '' ] fcols = [ x.strip() for x in self.config.value_element( umcconf, "reader.fields").split(',') if x != '' ] # combine with common tags and fields cols tcols.extend(x for x in [y.strip() for y in self.params.common_tags] if x != '' and x not in tcols and '!' + x not in tcols) fcols.extend(x for x in [y.strip() for y in self.params.common_fields] if x != '' and x not in fcols and '!' + x not in tcols) # remove all commented out fields and tags tcols = [x for x in tcols if not (x.startswith('!'))] fcols = [x for x in fcols if not (x.startswith('!'))] # read and check time field and its format timeformat = self.config.value_element(umcconf, "reader.timeformat", self.params.default_timeformat) try: if timeformat not in ['_unix_', '_time_s_', '_time_ms_']: strftime(timeformat, gmtime()) except Exception as e: raise Exception( "The time format '%s' is invalid for umc '%s': %s!" % (timeformat, umc_id, e)) timefield = self.config.value_element(umcconf, "reader.timefield", self.params.default_timefield) tzfield = self.config.value_element(umcconf, "reader.tzfield", None) filter = self.config.value_element(umcconf, "reader.filter", None) # transformation expressions transform = self.config.value_element(umcconf, "reader.transform", None) return Map(tcols=tcols, fcols=fcols, timeformat=timeformat, timefield=timefield, tzfield=tzfield, filter=filter, transform=transform)
def state_dict(self): state_dict = Map() if isinstance(self.model.G, DataParallel) or isinstance(self.model.G, DistributedDataParallel): state_dict.G = self.model.G.module.state_dict() if self.model.D is not None: state_dict.D = self.model.D.module.state_dict() else: state_dict.G = self.model.G.state_dict() if self.model.D is not None: state_dict.D = self.model.D.state_dict() return state_dict.toDict()
def __init__(self, config, writer_id): self.config = config # read common reader's params base_key = "common.umcpush.reader-params" self.params = Map( max_batchsize_rows=self.config.value( base_key + ".max-batchsize-rows", 50), max_batchsize_files=self.config.value( base_key + ".max-batchsize-files", 300), log_file_group=self.config.value(base_key + ".log-file-group", 1), common_tags=self.config.value(base_key + ".common-tags").split(','), common_fields=self.config.value(base_key + ".common-fields").split(','), default_timefield=self.config.value( base_key + ".default-timefield", "datetime"), default_timeformat=self.config.value( base_key + ".default-timeformat", "%Y-%m-%d %H:%M:%S"), tzoffset=utils.float_ex( self.config.value(base_key + ".tzoffset", 0), 0)) # update any value that may be overriden in writer's specific parameters writers = config.value("common.umcpush.writers") for writer in writers: if writer["writer-id"] == writer_id: rparams = writer["reader-params"] if rparams is not None: for k, v in rparams.items(): k = k.replace("-", "_") if self.params.get(k): self.params[k] = v else: Msg.warn_msg( "The reader param %s is invalid in %s" % (k, key))
def is_node_ok(self, node): if len(node[1][0]) < 1 or len(node[1][1]) < 1: return "Node size not correct" if node[0] == '': return "Fill the name field" for n in self.Nodes: if n[0] == node[0]: return "Name already used" if node[2] != -1: M = Map(self.Nodes, self.Connectors) if utils.dist(n[2], node[2]) < M.hitbox: return "Node too close to a peer" del (M) else: return "Choose a position for the node" return True
def callback_umc_errorlog(self, params): content = Map(code=200, json=[]) for ud in GlobalContext.umcdefs: ud.lock.acquire() try: if ud.umc_instanceid.startswith(params.params.umc): errorlog = "%s/%s.error.out" % (get_umc_instance_log_dir( ud.umc_instanceid, GlobalContext), ud.umc_instanceid) if os.path.exists(errorlog): content.json.append( json.dumps({ "umc_instanceid": ud.umc_instanceid, "rows": tail(errorlog, 10) })) # // if umc id finally: ud.lock.release() return content
def __init__(self, config): self.config = config self.params = Map( http_enabled=self.config.value("common.umcrunner.http.enabled", True), tcp_port=self.config.value("common.umcrunner.http.tcp-port", 1989), log_file_groups=self.config.value( "common.umcrunner.log-file-groups", "-"), run_interval=self.config.value("common.umcrunner.run-interval", 10), prcstats_interval=self.config.value( "common.umcrunner.prcstats-interval", 5), logstats_interval=self.config.value( "common.umcrunner.logstats-interval", 60), logstats_max_duration=self.config.value( "common.umcrunner.logstats-max-duration", 2), orphans_interval=self.config.value( "common.umcrunner.orphans-interval", 5), maxproc_interval=self.config.value( "common.umcrunner.maxproc-interval", 5), maxzombies_interval=self.config.value( "common.umcrunner.maxzombies-interval", 5), loop_interval=self.config.value("common.umcrunner.loop-interval", 10), max_processes=self.config.value("common.umcrunner.max-processes", 200), retc_history=self.config.value( "common.umcrunner.returncodes-history", 10), proxy_timeout_connect=self.config.value( "common.umcrunner.proxy-timeout-connect", 0.5), proxy_timeout_read=self.config.value( "common.umcrunner.proxy-timeout-read", 5), proxy_run_threads=self.config.value( "common.umcrunner.proxy-run-threads", True), min_starting_time=self.config.value( "common.umcrunner.min-starting-time", 60), run_after_failure=self.config.value( "common.umcrunner.run-after-failure", 60), oserror_max_attempts=self.config.value( "common.umcrunner.oserror-max-attempts", 5), oserror_wait_time=self.config.value( "common.umcrunner.oserror-wait-time", 60))
def __init__(self, config, writerDef): super(OMCWriter, self).__init__(config, writerDef) # read params self.omc_params=Map( base_url=self.param("connect.base-url"), data_url=self.param("connect.data-url"), proxies=self.param("connect.proxies"), user=self.param("connect.user"), upass=self.param("connect.pass", ""), connect_timeout=self.param("connect.connect-timeout", 5), read_timeout=self.param("connect.read-timeout", 10), omc_inprogress_timeout=self.param("connect.omc-inprogress-timeout", 120)) # print params Msg.info2_msg("OMC Writer parameters: %s"%self.omc_params) # check the db was defined if self.omc_params.data_url == None: raise Exception("Invalid connection details (data_url is missing).")
def read_umcdefs(self, reader, writer): allinstances = self.get_umc_instances() umcdefs = {} for instance in allinstances: umc_id = instance["umc-id"] umcdef = Map(umcid=umc_id, enabled=False, writer=None, reader=None, instance=instance) umcdef.enabled = self.value_element(instance, "enabled", False) umcdef.writer = writer.read_umcdef(umc_id, instance) umcdef.reader = reader.read_umcdef(umc_id, instance) Msg.info1_msg("Definition retrieved for umc %s" % (umc_id)) if not (umcdef.enabled): Msg.info1_msg( "umc id %s is disabled by configuration, no datapoints will be read." % (umc_id)) elif umcdef.writer is None or umcdef.reader is None: Msg.info2_msg( "umc id %s does not have reader or writer definitions and it will be disabled." % (umc_id)) umcdef.enabled = False # disable if the writer is not enabled if not (umcdef.writer.enabled): Msg.info2_msg( "umc id %s is disabled as its writer is disabled. No data will be read for this umc id." % (umc_id)) umcdef.enabled = False if umcdefs.get(umc_id) is not None: Msg.err_msg( "There is a duplicate umc instance with id '%s' in the configuration file!" % (umc_id)) else: umcdefs[umc_id] = umcdef # // for return umcdefs
def run_task(self, GlobalContext, tdef): if GlobalContext.umcdefs is not None: for ud in GlobalContext.umcdefs: if ud.enabled: ud.lock.acquire() try: log_stats=Map(backlog_total=0, errorlog_mtime=0, errorlog_size=0, errorlog_tail=[]) log_dir=get_umc_instance_log_dir(ud.umc_instanceid, GlobalContext) if os.path.isdir(log_dir): for file in [os.path.basename(f.path) for f in scandir(log_dir)]: # match the log file waiting to be consumed # there is a maximum of 9 groups (1-9) m1 = re.match(r"^{umc_instanceid}_[0-9\-]+.log.([1-9])$".format(umc_instanceid=ud.umc_instanceid), file) if m1: fg_key="backlog_group_%s"%m1.group(1) if log_stats.get(fg_key) is None: log_stats[fg_key]=1 else: log_stats[fg_key]+=1 log_stats.backlog_total += 1 # // if match log file # match the error log m2 = re.match(r"^{umc_instanceid}(_[0-9\-]+)?.error.out$".format(umc_instanceid=ud.umc_instanceid), file) if m2: stat=os.stat(log_dir + "/" + file) log_stats.errorlog_size=stat.st_size if log_stats.errorlog_size>0: log_stats.errorlog_mtime=stat.st_mtime else: log_stats.errorlog_mtime=0 #the below takes too much time to finish, better not run this #log_stats.errorlog_tail=utils.tail(log_dir + "/" + file, 10) # // if match error log # // for else: Msg.warn_msg("Directory %s does not exist!"%log_dir) # update log stats ud.log_stats = log_stats finally: ud.lock.release() # // if enabled # // for # // if return True
def callback_umcdef_content(self, params): content = Map(code=200, json=[]) for ud in GlobalContext.umcdefs: ud.lock.acquire() try: if not (ud.get("errorlog")): sl_def = GlobalContext.server_list.get( socket.gethostname()) if sl_def is not None and sl_def.address is not None and sl_def.tcp_port is not None and sl_def.me: ud["link_errorlog"] = "/logs/error/hosts/{hostname}/umc/{umc_instance}".format( address=sl_def.address, tcp_port=sl_def.tcp_port, hostname=ud.hostname, umc_instance=ud.umc_instanceid) if params.params.umc == 'all' or ud.umc_instanceid.startswith( params.params.umc): content.json.append( ud.to_json(CustomEncoder, exclude=['proc', 'options', 'lock'])) finally: ud.lock.release() return content
def clear(self): self.values = Map() self.weights = Map()
@date 2015/11/05 11:57:00 @brief pyweb 上下文信息接口 ''' import Cookie, urlparse, urllib from utils import Map, ThreadMap __all__ = [ 'config', 'context', 'header' 'cookie', 'setCookie' ] config = Map() config.__doc__ = ''' A configuration object for various aspects of pyweb ''' context = ThreadMap() context.__doc__ = ''' A `Map` object containing various information about the request: `server` (k,v) : A dictionary containing the standard server environment variables. `host` `remote_ip` `method` `path` `query` `headers`
def __init__(self): self.id = State.idCounter State.idCounter += 1 self.map = Map() self.isFinal = False
def _generate_mismatches_pairs(self): """ Generate all mismatches pairs """ for i, name in enumerate(os.listdir(self.data_dir)): if name == ".DS_Store": continue remaining = os.listdir(self.data_dir) remaining = [f_n for f_n in remaining if f_n != ".DS_Store"] # del remaining[i] # deletes the file from the list, so that it is not chosen again other_dir = random.choice(remaining) with open(self.pairs_filepath, "a") as f: for i in range(3): file1 = random.choice(os.listdir(self.data_dir + name)) file2 = random.choice(os.listdir(self.data_dir + other_dir)) f.write(name + "\t" + file1 + '\t' + other_dir + "\t" + file2 + '\n') f.write("\n") if __name__ == '__main__': print('load all config') model_config = Map(yaml.safe_load(open('config/face_base.yaml'))) cfg = _merge_a_into_b(model_config, get_cfg(), get_cfg(), []) data_dir = cfg.DATASETS.FOLDER + 'test/' pairs_filepath = cfg.DATASETS.FOLDER + "pairs.txt" img_ext = ".png" generatePairs = GeneratePairs(data_dir, pairs_filepath, img_ext) generatePairs.generate()
# -*- coding: utf-8 -*- """ Created on Tue Mar 08 18:27:40 2016 random waypoint @author: zimu """ from utils import Map def RWP(cmap): return if __name__ == "__main__": m=Map(10,10,10,1) m.printMap() m.printBuilding() m.printCitizens()
def get_node_pos(self, widget): NNM = Map(self.Nodes, self.Connectors) NNM.disp_map() self.newNode[2] = NNM.get_pos() NNM.close_window()
def test_map_size_is_correct(): length = 10 size = length * length m = Map(length, length) assert_equals(m.number_of_fields(), size)
def msg(self, msg, code=200): return Map(code=code, json=["{ \"msg\" : \"%s\" }" % msg])
def run_task(self, GlobalContext, tdef): umc_counts=Map(count=0, enabled=0, disabled=0, running=0, waiting=0, num_children=0, rss=0, cpu=0, cpu_s=0, runs=0, errors=0, last_errortime=0, backlog_total=0) if GlobalContext.umcdefs is not None: for ud in GlobalContext.umcdefs: ud.lock.acquire() try: umc_counts.count += 1 if ud.enabled: umc_counts.enabled += 1 else: umc_counts.disabled += 1 umc_counts.errors += ud.num_errors umc_counts.runs += ud.num_runs # update last error time from the error log if it was sooner if ud.log_stats is not None and ud.log_stats.errorlog_mtime > ud.lasterror_time: ud.lasterror_time = ud.log_stats.errorlog_mtime if ud.lasterror_time > umc_counts.last_errortime: umc_counts.last_errortime = ud.lasterror_time if time.time()<ud.start_after: umc_counts.waiting += 1 umc_counts.backlog_total += ud.log_stats.backlog_total if ud.get("log_stats") and ud.get("log_stats").get("backlog_total") else 0 # umc instance statistics stats = {}; # process info p = {} try: if ud.proc is not None: umc_counts.running += 1 p["top_pid"] = ud.proc.pid #p["uptime"] = time.time() - ud.proc.create_time() p["uptime"] = time.time()-ud.last_started_time p["cmdline"] = ud.proc.cmdline() kids = ud.proc.children(True) rss = 0.0; cpu = 0 for k in kids: d = k.as_dict(attrs=['cpu_times', 'memory_info']) cpu = cpu + d["cpu_times"].user rss = rss + d["memory_info"].rss p["rss"] = float(rss/1024/1024) # in MB p["cpu"] = cpu p["cpu_s"] = cpu/p["uptime"] p["num_chproc"] = len(kids) umc_counts.rss += p["rss"] umc_counts.cpu += p["cpu"] umc_counts.cpu_s += p["cpu_s"] umc_counts.num_children += p["num_chproc"] # // end if except Exception as e: Msg.warn_msg("Error occurred when retrieving process info: %s"%str(e)) pass stats["p"] = p ud.stats = stats finally: ud.lock.release() # // for # umcrunner stats proc=psutil.Process() d = proc.as_dict(attrs=['cpu_times', 'memory_info']) uptime=time.time()-proc.create_time() hostname=socket.gethostname() GlobalContext.umcrunner_stats = Map( pid=proc.pid, hostname=hostname, uptime=uptime, cpu=d["cpu_times"].user, cpu_s=d["cpu_times"].user/uptime, rss=float(d["memory_info"].rss/1024/1024), threads=proc.num_threads(), umc_counts=umc_counts, link_umcinstances="/stats/hosts/{hostname}/umc/all".format(hostname=hostname) ) return True
def process_cluster_request(self, method, path_def, allow_all, cache_maxage, is_stream, get_content): params = PathDef(path_def).params( self.path) #get_path_params(path_def, self.path) # path must be a valid path and hostname param must exist in it if params is None or params.params.hostname is None: return None # get a list of servers this should be proxied to # if there is more than one, then proxy them, otherwise run the locally or redirect via client server_list = self.get_server_list(params) # hostname is "all", will forward to individual umcrunner servers if len(server_list) > 1 and allow_all: # check if this has been proxied already if self.headers.get("Via") is None: # acquire lock on this path to prevent other threads from doing the same cache.acquire_lock(self.path) try: # check if in cache content = cache.get(self.path) if content is None: # not in cache # proxy to all umcrunner hosts including "me" (this one) Msg.info2_msg("Sending %d proxy requests." % (len(server_list))) start_t = time.time() prqs = [] for server_def in server_list: prqs.append( ProxyRequest( method, 'http://{address}:{tcp_port}{fw_path}'. format( address=server_def.address, tcp_port=server_def.tcp_port, fw_path=params.replace( params, Map(hostname=server_def["hostname"] ))), GlobalContext.params.proxy_run_threads)) prqs[-1].send_request() # wait for all responses for x in prqs: x.wait_for_response() # get all "valid" responses resp = [r for r in prqs if r.response is not None] Msg.info2_msg( "Data from %d proxy requests retrieved in %.2f seconds." % (len(resp), time.time() - start_t)) # add result to cache; the result from individual servers should always be json array content = Map(content="[%s]" % ",".join([ r.response.text.strip()[1:-1] for r in resp if r.response.text.strip() != "[]" ])) if cache_maxage > 0: cache.create_data(self.path, content.content, time.time(), cache_maxage) # if not in cache else: Msg.info2_msg("Serving request for %s from cache." % self.path) # send back response self.send(200, {"Content-Type": "application/json"}, content.content) finally: cache.release_lock(self.path) return True # if not via else: Msg.warn_msg( "A request to %s can only come from a client, not a proxy! (%s)" % (self.path, self.headers.get("Via"))) self.send( 400, None, "Request to the resource that comes via a proxy is not allowed!" ) return False # // if multiple hostnames elif len(server_list) == 1: # params.params.hostname should be a valid hostname server_def = server_list[0] if not (server_def.me): # host should be a known host, redirect the request onto it rather than being a proxy location_url = "http://{address}:{tcp_port}{fw_path}".format( address=server_def.address, tcp_port=server_def.tcp_port, fw_path=params.replace( params, Map(hostname=server_def["hostname"]))) Msg.info2_msg("Redirecting the request to '%s'" % location_url) self.send(308, {"Location": location_url}, "") return else: if not (is_stream): content = get_content(params) if content is not None: self.send(content.code, {"Content-Type": "application/json"}, "[%s]" % ",".join(content.json)) else: # should not happen really self.send(500, None, "") return True else: get_content(params) return True # // if one hostname only else: self.send( 404, None, "The host '%s' cannot be found or is not allowed!" % params.params.hostname) return False