def __init__(self, config=None): self.logger = logging.getLogger('server_app.configmodule.ConfigJSON') if config: # self._module is set only with a DEFAULT attribute self._module = self._getmodule(config) else: config = "config" if self._module: self._default = self._module.DEFAULT self._confjson = self._module.__name__ + '.json' self.conf = DotDict(self._getconfig()) elif isinstance(config, str): if path.isfile(config + '.json'): self._confjson = config + ".json" elif isinstance(eval(config), dict): self._default = self.conf = config elif isinstance(config, dict): self._default = self.conf = eval(str(config)) else: self._default = self.conf = {} self._struct = {} self._struct = self._getstructure(self._default) self.keys = self._default.keys() self.save() self.logger.debug('__init__')
def load_runtime(self, inf): oids = self.oid.get("runtime", []) if not oids: return oldrti = inf.get("rti", DotDict()) tmprti = DotDict() newinf = DotDict() tmprti["old"] = oldrti.get("new", DotDict()) tmprti["new"] = newinf newinf["time"] = time.time() host = inf["__loopback__"] comm = inf["__snmp_comm__"] vern = inf["__snmp_vern__"] for name in oids: oid = soid.get(name) + "." + str(inf.ifIndex) try: _, _, value = sop.get(host, comm, vern, oid) newinf[name] = value except: traceback.print_exc() pass inf["rti"] = tmprti
def __init__(self, environ): self.environ = environ #: The WSGI environment self.headers = DotDict() #: Both FastCGI and HTTP headers self.http_headers = DotDict() #: Dictionary of just HTTP headers self.wsgi = DotDict() #: The WSGI objects self.__parse(environ) self.__body = None self.cookies = self.__parse_cookies(environ) #: Parsed cookies
def idic(): try: payload = request.body.read() or "{}" dic = json.JSONDecoder().decode(payload) dic = DotDict(**dic) return dic except: traceback.print_exc() return DotDict()
def PtInPolygon(location, *polygons): """ function: judge point is in polygon or not. method: get num of intersection point which cross each side of polygon and the horizontal line which through point. return: True/False, if num is odd, point is in polygon. graphical representation: ---------- / \ / \ \ p------/------ \ / ---------- """ point = DotDict(location) point.lat = point.cLat/3600000.0 point.lon = point.cLon/3600000.0 is_in = False for polygon in polygons: polygon_pts = polygon['points'] polygon_name = polygon['name'] # side num of polygon #sides = len(polygon_pts) - 1 sides = len(polygon_pts) # num of intersection point, if nCross is odd, point is in polygon nCross = 0 for i in range(sides): p1 = polygon_pts[i] p2 = polygon_pts[(i+1)%sides] p1 = DotDict(p1) p2 = DotDict(p2) if p1.lat == p2.lat: # horizontal line continue if (point.lat < min(p1.lat, p2.lat)): # cross point is in extension line of p1p2 continue if (point.lat >= max(p1.lat, p2.lat)): # cross point is in extension line of p1p2 continue; # get lon of cross point lon = (point.lat - p1.lat) * (p2.lon - p1.lon) / (p2.lat - p1.lat) + p1.lon if lon > point.lon: # on the right side of point, intersect nCross += 1 is_in |= (nCross % 2 == 1) if is_in: logging.info("Point: %s is in polygon: %s", point, polygon_name) else: logging.info("Point: %s is out polygon: %s", point, polygon_name) return is_in
def load_static(self): '''Collector information according to router's information''' host, comm, vern = self.host, self.comm, self.vern lines = sop.walk(host, comm, vern, soid.ipAdEntIfIndex) for line in lines: port_ipaddr, _, port_index = sop.splitline(line, soid.ipAdEntIfIndex) if not port_ipaddr: continue # XXX: ifindex number maybe same across different routers inf = DotDict() self.ports[port_index] = inf inf["__loopback__"] = host inf["__snmp_comm__"] = comm inf["__snmp_vern__"] = vern inf["__ipaddr__"] = port_ipaddr oids = self.oid.get("static", []) for name in oids: oid = soid.get(name) + "." + str(port_index) try: _, _, value = sop.get(host, comm, vern, oid) inf[name] = value except: traceback.print_exc() pass
def read_dataset_splits(splits=Config.SPLITS, reader=read_question_only_data, **kwargs): data = {} for split in splits: data[split] = reader(split, **kwargs) return DotDict(data)
def ms_flow_get_flow(calldic=None): ''' The request: { "args": { "router_uid": int(xxx) | "vlink_uid": int(xxx) | }, "request": "ms_flow_get_flow", "trans_id": 1464768264, "ts": "20160601160424" } response: { "result": { "flows": [ { "bps": 23400, "dst": "1.2.3.0", // srcAddr "src": "219.141.189.200", // dstAddr "uid": "xxx" // vlink_uid }, { "bps": 43287, "dst": "1.2.3.1", "src": "219.141.189.201", "uid": "f_1" }, { "bps": 63174, "dst": "1.2.3.2", "src": "219.141.189.202", "uid": "f_2" } ] }, "err_code": 0, "msg": "Demo response", "response": "ms_flow_get_flow", "trans_id": 1464768264, "ts": "20160601160424" } ''' calldic = calldic or idic() varprt(calldic) respdic = odic(calldic) respdic.result = DotDict() vlink_uid = calldic.args.vlink_uid equip_uid = calldic.args.equip_uid if vlink_uid: respdic.result.flows = get_vlink_flow(vlink_uid) elif equip_uid: respdic.result.flows = get_equip_flow(equip_uid) return json.dumps(respdic)
def _construct_adjacency_graph(self): for i, vertex in enumerate(self.importers): self.graph.append( DotDict({ 'id': i, 'Adj': [], 'p': None, 'color': "WHITE", 'd': 0, 'f': 0, 'importer': vertex })) #: Once this for loop is complete, circle back and add to the Adjacencies for vertex in self.graph: #: Should return a dictionary neighbors = vertex.importer.dependent_imports if not neighbors: continue #pdb.set_trace() for field, importer in neighbors.items( ): #: This is deterministic and therefore the results are. inner_vertex = next( (x for x in self.graph if x.importer == importer), None) assert inner_vertex is not None vertex.Adj.append(self.graph[inner_vertex.id])
def odic(indic): odic = DotDict() odic.response = indic.request odic.trans_id = indic.trans_id odic.ts = time.strftime("%Y%m%d%H%M%S") odic.result = DotDict() odic.err_code = 0 odic.msg = None return odic
def sample(self): """Samples all counters being tracked by this meter, returning a DotDict object with all of the sampled values organized by namespace. """ new_sample = DotDict() for k in self._counters.keys(): new_sample[k.name] = self._counters[k]() return new_sample
def __init__(self, status, headers={}, body=None, *args, **kwargs): ''' Initialize the HTTP Error. :param status: The status line to send the WSGI server :param headers: Headers to apply to the error ''' base_headers = DotDict(_default_error_headers) base_headers.update(headers) #: Stores any extra positional arguments; not actually used as of now self.args = args #: Stores any keyword arguments; these are passed to the template when rendered self.kwargs = kwargs self.setup_response(status, base_headers, body)
def describe(self): """Returns the description of all counters being tracked by this meter. The returned object is a DotDict object with all of the descriptions organized by counter namespace. """ if self._description is None: new_description = DotDict() for k in self._counters.keys(): new_description[k.name] = k.description self._description = new_description return self._description
def do_urls(env, data_element, release='stable'): appname, path = data_element url = Utils.getUrl(path, release) r = requests.get(url, headers=headers) assert r.status_code == 200, 'Expected status code of GET {} to be 200 got {}'.format( url, r.status_code) found = False tree = html.fromstring(r.content) if 'App.js' in r.text: for thing in tree.xpath('//script[@type="text/javascript"]'): if 'src' in thing.attrib: src = thing.attrib['src'] if 'App.js' in src: expected = '/apps/' + appname assert expected in src, "unexpected app id at {}\n{}\n{}".format( url, r.text, r.headers) found = True else: # catches apps that are not the starter app if release == 'beta': assert '<script type="text/javascript" src="/beta/apps/{}'.format( appname) in r.text else: assert '<script type="text/javascript" src="/apps/{}'.format( appname) in r.text found = True assert found, 'did not find a valid app js reference in HTML on GET {}\n{}'.format( url, r.text) if env == 'stage': assert 'X-Akamai-Staging' in r.headers, 'expected to see staging header in {}\n{}'.format( r.headers, url) else: assert 'X-Akamai-Staging' not in r.headers, 'expected to not see staging header in {}\n{}'.format( r.headers, url) assert 'X-Akamai-Session-Info' in r.headers AKA_PM_FWD_URL = Utils.extractNamedInfoHeaderValue( r.headers['X-Akamai-Session-Info'], 'AKA_PM_FWD_URL') net_storage_path = Utils.getNetStoragePath(appname, release) assert AKA_PM_FWD_URL == net_storage_path, 'expected AKA_PM_FWD_URL ({}) to match the netstorage path ({}) for the GET {}'.format( AKA_PM_FWD_URL, net_storage_path, url) if path not in output_data: output_data[path] = DotDict({'url': url}) output_data[path][env + '_hash'] = hashlib.md5( r.text.encode('utf-8')).hexdigest()
def __parse_cookies(self, environ): result = DotDict() if 'HTTP_COOKIE' in environ: cookies = environ['HTTP_COOKIE'].split('; ') for i in cookies: try: key, value = i.strip().split('=', 1) except ValueError: continue else: result[key] = value return result
def load_routers(ips): global g_routers, g_port_fr_ipaddr, g_port_fr_ifindex g_routers = {} g_port_fr_ipaddr.clr() g_port_fr_ifindex.clr() for loopback in ips: d = DotDict() d.ip_str = loopback d.community = "ctbri" d.vern = "2c" d.name = "Equip@%s" % loopback d.uid = loopback d.vendor = "FIXME" load_router(d)
def _check_for_mismatches(self, known_keys): """check for bad options from value sources""" for a_value_source in self.values_source_list: try: if a_value_source.always_ignore_mismatches: continue except AttributeError: # ok, this values source doesn't have the concept # always igoring mismatches, we won't tolerate mismatches pass # make a set of all the keys from a value source in the form # of strings like this: 'x.y.z' value_source_mapping = a_value_source.get_values(self, True) value_source_keys_set = set([ k for k in DotDict(value_source_mapping).keys_breadth_first() ]) # make a set of the keys that didn't match any of the known # keys in the requirements unmatched_keys = value_source_keys_set.difference(known_keys) # some of the unmatched keys may actually be ok because the were # used during acquisition. # remove keys of the form 'y.z' if they match a known key of the # form 'x.y.z' for key in unmatched_keys.copy(): key_is_okay = reduce(lambda x, y: x or y, (known_key.endswith(key) for known_key in known_keys)) if key_is_okay: unmatched_keys.remove(key) # anything left in the unmatched_key set is a badly formed key. # issue a warning if unmatched_keys: if self.option_definitions.admin.strict.default: # raise hell... if len(unmatched_keys) > 1: raise exc.NotAnOptionError("%s are not valid Options" % unmatched_keys) elif len(unmatched_keys) == 1: raise exc.NotAnOptionError("%s is not a valid Option" % unmatched_keys.pop()) else: warnings.warn('Invalid options: %s' % ', '.join(unmatched_keys))
def _construct_adjacency_graph(self): for i, vertex in enumerate(self.importers): #: TODO: Replace DotDict with a Node class self.graph.append( DotDict({ 'id': i, 'Adj': [], 'p': None, 'color': "WHITE", 'd': 0, 'f': 0, 'importer': vertex, #: Discloses whether this vertex in the graph is m2m to any others (will be set by vertices with #: dependent imports back to it) 'is_m2m': False })) #: Once this for loop is complete, circle back and add to the Adjacencies for vertex in self.graph: #: Should return a dictionary neighbors = vertex.importer.dependent_imports if not neighbors: continue for field, importer in neighbors.items( ): #: This is deterministic and therefore the results are. inner_vertex = next( (x for x in self.graph if x.importer == importer), None) assert inner_vertex is not None vertex.Adj.append(self.graph[inner_vertex.id]) #: Tell the vertex that it is many-2-many with respect to itself #: TODO: use this from the helpers file if it's still necesary if self.is_many_to_many(field, vertex.importer.model): self.graph[inner_vertex.id].is_m2m = True
def getequip(uid): try: return DotDict(db.equips.find_one({"uid": uid})) except: traceback.print_exc() return {}
class ConfigJSON: """Simple config manager using json The constructor can take one ``config`` parameter. ``config`` can refer to a module or a module name. If this is the case, it should contain a least a ``_DEFAULT`` config dict. :param config: A dict, a module, a module name, a "stringed" dict or a json file name (without the leadding dot or '.json' extension) :type config: dict, string :Example: >>>from configmodule import ConfigJSON >>>d = {'param1': 1, 'param2': 2} >>>s = str(d) >>>c1 = ConfigJSON(d) >>>c2 = ConfigJSON('config') # works with config.py or .config.json >>>import config >>>c3 = ConfigJSON(config) >>>c4 = ConfigJSON(s) """ # conf = {} # _confjson = '' # _module = None # _default = {} class IterConf(dict): """Structure stocking information about iterables in config""" def __init__(self, data): super().__init__(self) self['type'] = type(data) self['len'] = len(data) self['subtypes'] = type(data)(type(x) for x in data) def __init__(self, config=None): self.logger = logging.getLogger('server_app.configmodule.ConfigJSON') if config: # self._module is set only with a DEFAULT attribute self._module = self._getmodule(config) else: config = "config" if self._module: self._default = self._module.DEFAULT self._confjson = self._module.__name__ + '.json' self.conf = DotDict(self._getconfig()) elif isinstance(config, str): if path.isfile(config + '.json'): self._confjson = config + ".json" elif isinstance(eval(config), dict): self._default = self.conf = config elif isinstance(config, dict): self._default = self.conf = eval(str(config)) else: self._default = self.conf = {} self._struct = {} self._struct = self._getstructure(self._default) self.keys = self._default.keys() self.save() self.logger.debug('__init__') def __del__(self): self.logger.debug("__del__") self.save() def _getmodule(self, module): """Check config module validity""" self.logger.debug('_getmodule') if ismodule(module): conf_module = module # self.logger.debug(f"module importé: {conf_module}") elif util.find_spec(module): conf_module = import_module(module) # self.logger.debug("module importé avec importlib") if not hasattr(conf_module, 'DEFAULT'): # self.logger.debug("module désimporté: {conf_module} " # f"{hasattr(conf_module, 'DEFAULT')}") conf_module = None return conf_module def _getstructure(self, *args, **kwargs): """Return the structure of the config dictionary""" self.logger.debug('_getstructure') struct = self._struct if self._struct else {} for arg in args: self.logger.debug(f'in _getstructure: {arg}') for (k, v) in arg.items(): # self.logger.debug(f'{k}: {v}') if isinstance(v, dict): struct[k] = self._getstructure(v) elif not isinstance(v, str) and hasattr(v, '__iter__'): struct[k] = self.IterConf(v) else: struct[k] = type(v) # struct = {k: (self._getstructure(v) if isinstance(v, dict) # else self.IterConf(v) if not isinstance(v, str) # and hasattr(v, '__iter__') else type(v)) # for (k, v) in arg.items()} if kwargs: for (k, v) in kwargs.items(): if isinstance(v, dict): struct[k] = self._getstructure(v) elif not isinstance(v, str) and hasattr(v, '__iter__'): struct[k] = self.IterConf(v) else: struct[k] = type(v) return struct def _getconfig(self): """Get the actual configuration""" self.logger.debug('_getconfig') # struct = self._struct if self._struct else {} try: with open(self._confjson) as conf: if stat(self._confjson).st_size: self.conf = json.load(conf) else: self.conf = self._default except (FileNotFoundError, TypeError): with open(self._confjson, 'w') as conf: self.conf = self._default # self.logger.debug(self.conf) for k in self.conf.keys(): try: self.logger.debug(f'in configmodule, try: {k}') self._module._isvalid(self.conf, k) self.conf[k] = self.conf[k] except TypeError: self.logger.debug(f'in configmodule, error: {k}') self.conf[k] = self._default[k] return self.conf def save(self): """Write back new parameters""" self.logger.debug('save') with open(self._confjson, 'w') as conf: json.dump(self.conf, conf, indent=4)
def getvlink(uid): try: return DotDict(db.vlinks.find_one({"uid": uid})) except: traceback.print_exc() return {}
on a normal use-case are available from directly within the :mod:`frame.controller.Controller`. ''' import sys from _app import app from _routes import routes from _config import config from _logger import logger from controller import Controller from errors import * from dotdict import DotDict from pkg_resources import iter_entry_points start_http = app.start_http start_fcgi = app.start_fcgi # Module registry modules = DotDict() # Load a module by name. Can specify keyword arguments that will be sent off to the # module's entry point, if it's setup for them. :) def load_module(name, *args, **kwargs): for entry_point in (i for i in iter_entry_points('frame.modules') if i.name == name): init = entry_point.load() result = init(app, *args, **kwargs) modules[entry_point.name] = result if result else sys.modules[ init.__module__]
config = DotDict({ 'application': { 'name': 'Frame (%s)' % __app_name, 'strip_trailing_slash': True, 'dispatcher': 'routes', 'debug': False, }, 'templates': { 'directory': 'templates', 'loaders': [PackageLoader('frame', 'templates')], 'globals': {}, 'filters': {}, 'environment': {}, 'extension': '.html' }, 'preprocessors': [ 'handle_query_string', 'form_url_decoder', 'form_json_decoder', 'form_multipart_decoder', ], 'postprocessors': [ 'handle_head_request', 'add_last_modified', 'add_date' ], 'hooks': [], 'init_hooks': [], 'timezone': 'America/Los_Angeles', 'logger': { 'driver': 'stdout', 'production': { 'facility': 'user', 'out': sys.stdout, 'err': sys.stderr }, 'stdout': { 'out': sys.stdout, 'err': sys.stderr }, 'null': {} }, 'static_map': { '/static': 'static', '/static/frame': os.path.join(__frame_path, 'static') }, 'frame': { 'path': __frame_path, 'version': '0.2a', }, 'http_server': { 'num_workers': 10, }, 'response': { 'default_headers': { 'Content-Type': 'text/html', 'Server': 'Frame/0.2a', } }, 'jsonify': { 'encoder': json.JSONEncoder } })
def getport(uid): try: return DotDict(db.ports.find_one({"uid": uid})) except: traceback.print_exc() return {}
oid = DotDict({ "ipAdEntAddr": ".1.3.6.1.2.1.4.20.1.1", "ipAdEntIfIndex": ".1.3.6.1.2.1.4.20.1.2", "ipAdEntNetMask": ".1.3.6.1.2.1.4.20.1.3", "ipAdEntBcastAddr": ".1.3.6.1.2.1.4.20.1.4", "ipAdEntReasmMaxSize": ".1.3.6.1.2.1.4.20.1.5", "ifIndex": ".1.3.6.1.2.1.2.2.1.1", "ifDescr": ".1.3.6.1.2.1.2.2.1.2", "ifType": ".1.3.6.1.2.1.2.2.1.3", "ifMtu": ".1.3.6.1.2.1.2.2.1.4", "ifSpeed": ".1.3.6.1.2.1.2.2.1.5", "ifPhysAddress": ".1.3.6.1.2.1.2.2.1.6", "ifAdminStatus": ".1.3.6.1.2.1.2.2.1.7", "ifOperStatus": ".1.3.6.1.2.1.2.2.1.8", "ifLastChange": ".1.3.6.1.2.1.2.2.1.9", "ifInOctets": ".1.3.6.1.2.1.2.2.1.10", "ifInUcastPkts": ".1.3.6.1.2.1.2.2.1.11", "ifInNUcastPkts": ".1.3.6.1.2.1.2.2.1.12", "ifInDiscards": ".1.3.6.1.2.1.2.2.1.13", "ifInErrors": ".1.3.6.1.2.1.2.2.1.14", "ifInUnknownProtos": ".1.3.6.1.2.1.2.2.1.15", "ifOutOctets": ".1.3.6.1.2.1.2.2.1.16", "ifOutUcastPkts": ".1.3.6.1.2.1.2.2.1.17", "ifOutNUcastPkts": ".1.3.6.1.2.1.2.2.1.18", "ifOutDiscards": ".1.3.6.1.2.1.2.2.1.19", "ifOutErrors": ".1.3.6.1.2.1.2.2.1.20", "ifOutQLen": ".1.3.6.1.2.1.2.2.1.21", "ifSpecific": ".1.3.6.1.2.1.2.2.1.22", "ifName": ".1.3.6.1.2.1.31.1.1.1.1", "ifInMulticastPkts": ".1.3.6.1.2.1.31.1.1.1.2", "ifInBroadcastPkts": ".1.3.6.1.2.1.31.1.1.1.3", "ifOutMulticastPkts": ".1.3.6.1.2.1.31.1.1.1.4", "ifOutBroadcastPkts": ".1.3.6.1.2.1.31.1.1.1.5", "ifHCInOctets": ".1.3.6.1.2.1.31.1.1.1.6", "ifHCInUcastPkts": ".1.3.6.1.2.1.31.1.1.1.7", "ifHCInMulticastPkts": ".1.3.6.1.2.1.31.1.1.1.8", "ifHCInBroadcastPkts": ".1.3.6.1.2.1.31.1.1.1.9", "ifHCOutOctets": ".1.3.6.1.2.1.31.1.1.1.10", "ifHCOutUcastPkts": ".1.3.6.1.2.1.31.1.1.1.11", "ifHCOutMulticastPkts": ".1.3.6.1.2.1.31.1.1.1.12", "ifHCOutBroadcastPkts": ".1.3.6.1.2.1.31.1.1.1.13", "ifLinkUpDownTrapEnable": ".1.3.6.1.2.1.31.1.1.1.14", "ifHighSpeed": ".1.3.6.1.2.1.31.1.1.1.15", "ifPromiscuousMode": ".1.3.6.1.2.1.31.1.1.1.16", "ifConnectorPresent": ".1.3.6.1.2.1.31.1.1.1.17", "ifAlias": ".1.3.6.1.2.1.31.1.1.1.18", "ifCounterDiscontinuityTime": ".1.3.6.1.2.1.31.1.1.1.19", # HUAWEI-MPLS-EXTEND-MIB "hwMplsTunnelStatisticsTunnelIndex": ".1.3.6.1.4.1.2011.5.25.121.1.14.1.1", "hwMplsTunnelStatisticsIngressLSRId": ".1.3.6.1.4.1.2011.5.25.121.1.14.1.2", "hwMplsTunnelStatisticsEgressLSRId": ".1.3.6.1.4.1.2011.5.25.121.1.14.1.3", "hwMplsTunnelStatisticsHCInOctets": ".1.3.6.1.4.1.2011.5.25.121.1.14.1.4", "hwMplsTunnelStatisticsHCOutOctets": ".1.3.6.1.4.1.2011.5.25.121.1.14.1.5", })
def rec2dic(routerip, rec): try: src_addr = "%d.%d.%d.%d" % (rec.src_addr_a, rec.src_addr_b, rec.src_addr_c, rec.src_addr_d) nexthop = "%d.%d.%d.%d" % (rec.nexthop_a, rec.nexthop_b, rec.nexthop_c, rec.nexthop_d) dst_addr = "%d.%d.%d.%d" % (rec.dst_addr_a, rec.dst_addr_b, rec.dst_addr_c, rec.dst_addr_d) in_if = rec.in_if out_if = rec.out_if packets = rec.packets octets = rec.octets first = rec.first last = rec.last src_port = rec.src_port dst_port = rec.dst_port # tcp_flags = rec.tcp_flags # ip_proto = rec.ip_proto # tos = rec.tos # src_as = rec.src_as # dst_as = rec.dst_as # src_mask = rec.src_mask # dst_mask = rec.dst_mask # XXX: Skip if nextHop is 0.0.0.0 if nexthop == "0.0.0.0": klog.w("Skip when nextHop is 0.0.0.0: (%s)" % str(rec)) return None, "nextHop is 0.0.0.0" # XXX: Inf is the port send netflow package # routerip is not must the loopback address inf = spi.scget("", "ipaddr", routerip) if not inf: klog.e("ERR: Not found:", routerip) return None, "Loopback not exists" loopback = inf["__loopback__"] dic = DotDict() dic.loopback.cur = loopback # # Flow Info # dic.addr.src = src_addr dic.addr.dst = dst_addr dic.port.nexthop = nexthop tmp = spi.scget("", "ipaddr", nexthop) if tmp: dic.loopback.nxt = tmp["__loopback__"] else: dic.loopback.nxt = "<%s: NotFound>" % (nexthop) klog.e("NotFound: nexthop: ", nexthop) return None, "nexthop not exists" dic["bytes"] = octets tmp = spi.scget(loopback, "ifindex", out_if) if tmp: dic.port.output = tmp["__ipaddr__"] else: dic.port.output = "<%s@%s>" % (out_if, loopback) klog.e("NotFound: %s@%s" % (out_if, loopback)) tmp = spi.scget(loopback, "ifindex", in_if) if tmp: dic.port.input = tmp["__ipaddr__"] else: dic.port.input = "<%s@%s>" % (in_if, loopback) klog.e("NotFound: %s@%s" % (in_if, loopback)) dic["_id"] = "{loopback.cur}::{addr.src}_to_{addr.dst}".format( **dic) diff = last - first bps = 8.0 * int(dic["bytes"]) * conf.SAMPLE_RATE / diff * 1000 dic["bps"] = int(bps) dic.time.last = last dic.time.first = first dic.time.log = time.time() return dic, None except: klog.e(traceback.format_exc()) klog.e("Except rec:", varfmt(rec, color=True)) return None, "Except: %s" % traceback.format_exc()
odic = DotDict() odic.response = indic.request odic.trans_id = indic.trans_id odic.ts = time.strftime("%Y%m%d%H%M%S") odic.result = DotDict() odic.err_code = 0 odic.msg = None return odic # request_name <> func g_cmdmap = DotDict() def cmd_default(calldic=None): return "Bad request '%s'" % calldic.request g_cmdmap.default = cmd_default @post("/link/links") def docmd_ms_link_links(): calldic = idic() klog.d(varfmt(calldic, "calldic")) return g_cmdmap.get(calldic.request, "default")(calldic)
logging.basicConfig(level=logging.INFO) import mxnet as mx from mxnet import nd import mxnet.gluon as gluon from mxutils import get_shape from pointcnn import PointCNN, get_indices, get_xforms, augment, custom_metric, get_loss_sym from dotdict import DotDict import h5py import collections import data_utils ########################### Settings ############################### setting = DotDict() setting.num_class = 10 setting.sample_num = 160 setting.batch_size = 32 setting.num_epochs = 2048 setting.jitter = 0.01 setting.jitter_val = 0.01 setting.rotation_range = [0, math.pi / 18, 0, 'g'] setting.rotation_range_val = [0, 0, 0, 'u'] setting.order = 'rxyz'
def netusage(asc=True): out = [] for r in g_routers.values(): for p in r.ports.values(): klog.d(varfmt(p, "NetUsage", True)) try: d = DotDict() ipaddr = p.get("__ipaddr__") dbport = db.ports.find_one({"ip_str": ipaddr}) if not dbport: klog.e("Port (%s) not found" % ipaddr) if not conf.NETUSE_DEBUG: continue else: d.port_uid = dbport.get("uid") d.if_name = dbport.get("if_name") d["__obj_db__"] = dbport new = int(p.rti.new.ifHCOutOctets) old = int(p.rti.old.ifHCOutOctets) diff_bytes = new - old diff_seconds = p.rti.new.time - p.rti.old.time bw_in_bytes = int(p.ifHighSpeed) * 1000000 / 8 d.utilization = 100.0 * diff_bytes / diff_seconds / bw_in_bytes d.__diff_seconds = diff_seconds b = sc.tos(diff_bytes, "b", False, 3) k = sc.tos(diff_bytes, "k", True, 3) m = sc.tos(diff_bytes, "m", True, 3) g = sc.tos(diff_bytes, "g", True, 3) text = "%sB or %sK or %sM or %sG Bytes" % (b, k, m, g) d.__diff_size = text b = sc.tos(bw_in_bytes, "b", False, 3) k = sc.tos(bw_in_bytes, "k", True, 3) m = sc.tos(bw_in_bytes, "m", True, 3) g = sc.tos(bw_in_bytes, "g", True, 3) text = "%sB or %sK or %sM or %sG Bytes" % (b, k, m, g) d.__bandwidth = text d.ip = p.__ipaddr__ d.loopback = p.__loopback__ setp = g_setport_fr_ip.get(d.ip) if setp: d.port_uid = setp.uid d.if_name = setp.if_name out.append(d) except: continue mul = 10000000000 if asc else -10000000000 return sorted(out, lambda x, y: int(mul * (x.utilization - y.utilization)))
config = DotDict({ 'application': { 'name': 'Frame (%s)' % __app_name, 'strip_trailing_slash': True, 'dispatcher': 'routes', 'debug': False, }, 'templates': { 'directory': 'templates', 'loaders': [PackageLoader('frame', 'templates')], 'globals': {}, 'filters': {}, 'environment': {}, 'extension': '.html' }, 'preprocessors': [ 'handle_query_string', 'form_url_decoder', 'form_json_decoder', 'form_multipart_decoder', ], 'postprocessors': ['handle_head_request', 'add_last_modified', 'add_date'], 'hooks': [], 'init_hooks': [], 'timezone': 'America/Los_Angeles', 'logger': { 'driver': 'stdout', 'production': { 'facility': 'user', 'out': sys.stdout, 'err': sys.stderr }, 'stdout': { 'out': sys.stdout, 'err': sys.stderr }, 'null': {} }, 'static_map': { '/static': 'static', '/static/frame': os.path.join(__frame_path, 'static') }, 'frame': { 'path': __frame_path, 'version': '0.2a', }, 'http_server': { 'num_workers': 10, }, 'response': { 'default_headers': { 'Content-Type': 'text/html', 'Server': 'Frame/0.2a', } }, 'jsonify': { 'encoder': json.JSONEncoder } })
def load_static(self, host, comm, vern): '''Collector information according to router's information''' downports = [] lines = sop.walk(host, comm, vern, oid_ipAdEntIfIndex) for line in lines: port_ipaddr, _, port_index = sop.splitline(line, oid_ipAdEntIfIndex) if not port_ipaddr: continue oid_ifAdminStatus = oid_ifAdminStatusBase + str(port_index) _, _, state = sop.get(host, comm, vern, oid_ifAdminStatus) if state != 1: downports.append(port_ipaddr) continue # # Get PortInfo # # XXX: ifindex number maybe same across different routers hashkey = "%s#%s" % (host, port_index) inf = self.portInfos.get(hashkey) if not inf: inf = DotDict() self.portInfos[hashkey] = inf klog.d("New portInfo: host:%s, ipaddr:%s" % (host, port_ipaddr)) else: klog.d("Found portInfo: host:%s, ipaddr:%s" % (host, port_ipaddr)) inf["__loopback__"] = host inf["__snmp_comm__"] = comm inf["__snmp_vern__"] = vern inf["__ipaddr__"] = port_ipaddr for oid_base, name in self.oid_static.items(): oid = oid_base + "." + str(port_index) _, _, value = sop.get(host, comm, vern, oid) inf[name] = value # # Shortcuts # # self.save_ipaddr(port_ipaddr, inf) self.save_ifindex(host, inf["ifIndex"], inf) # Save to db db.devs.replace_one({"_id": hashkey}, dict(inf), True) # # Mark that this router has collected the static information # hashkeys = self.readyRouters.get(host, set()) hashkeys.add(hashkey) self.readyRouters[host] = hashkeys if downports: klog.e("(%s) DownPorts: %s" % (host, downports))
from __future__ import print_function from dotdict import DotDict hp = DotDict() # Training hyper parameters hp.train_epoch_size = 30000 hp.eval_epoch_size = 3000 hp.num_epoch = 20 hp.learning_rate = 0.04 hp.momentum = 0.9 hp.bn_mom = 0.9 hp.workspace = 512 hp.loss_type = "warpctc" # ["warpctc" "ctc"] hp.batch_size = 1024 hp.num_classes = 5990 # 0 as blank, 1~xxxx as labels hp.img_width = 280 hp.img_height = 32 # LSTM hyper parameters hp.num_hidden = 100 hp.num_lstm_layer = 2 hp.seq_length = 35 hp.num_label = 10 hp.dropout = 0.5