def get_deps(self): full_dep_list = tree() deps = self.json.get('dependencies', None) if deps: for key in deps.keys(): full_dep_list['%s@%s' % (key, deps[key])] = tree() return full_dep_list
def test_parse_npm_shrinkwrap_json(self): mani = Manifest(relative_path('shrinkwrap_json', 'npm-shrinkwrap.json')) expected = tree() buffer_str = u'buffer===4.4.0===https://registry.npmjs.org/buffer/-/buffer-4.4.0.tgz' base64_js_str = u'base64-js===1.0.2===https://registry.npmjs.org/base64-js/-/base64-js-1.0.2.tgz' ieee754_str = u'ieee754===1.1.6===https://registry.npmjs.org/ieee754/-/ieee754-1.1.6.tgz' isarray_str = u'isarray===1.0.0===https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz' expected[buffer_str][base64_js_str] = tree() expected[buffer_str][ieee754_str] = tree() expected[buffer_str][isarray_str] = tree() self.assertEqual(mani.deps, expected)
def append_dependencies_to_tree(deps, dep_json): for key in dep_json.keys(): value = dep_json.get(key, None) sub_deps = tree() version = value.get('version', None) resolved_url = value.get('resolved', None) sub_dep_json = value.get('dependencies', {}) if not resolved_url: resolved_url = Manifest.calculate_resolved_url(key, version) if type(version) not in [str, unicode]: raise RuntimeError( 'Invalid value %s for %s.version, expected str' % (version, key)) if type(sub_dep_json) is not dict: raise RuntimeError( 'Invalid value %s for %s.dependencies, expected dict' % (sub_dep_json, key)) if resolved_url[0:4] == 'git+': # Get commit sha from URL string version = resolved_url.split('#')[-1] deps['%s===%s===%s' % (key, version, resolved_url)] = sub_deps Manifest.append_dependencies_to_tree(sub_deps, sub_dep_json)
def build_dependency_tree(module_path): """ given the path of a node module, calculate a dependency tree. each node in the tree represents a dependency, and contains name and version of the dependency. this should be run on a "fully materialized" module path, that is a path containing an npm module which has been "installed" and contains its dependencies on disk in descendent node_modules dirs """ dep_map = {} dep_map[module_path] = tree() def get_deps_tree(check_path): if check_path == '/': raise RuntimeError('Unable to locate dep tree') if dep_map.get(check_path, None) != None: return dep_map[check_path] return get_deps_tree(os.path.dirname(check_path)) for root, _, _ in os.walk(module_path): deps = get_deps_tree(root) try: stat = Npm.stat_module(root) new_deps = deps['%s@%s' % stat] dep_map[root] = new_deps except IOError: Log.verbose('%s is not a valid Node module. Skipping' % root) return dep_map[module_path]
def build_dependency_tree(json): deps = tree() Manifest.append_dependencies_to_tree(deps, json.get('dependencies', {})) Manifest.append_dependencies_to_tree(deps, json.get('devDependencies', {})) return deps
def __init__(self, json_metro): self.json = json_metro self.transfer = util.tree() for item in json_metro['stations']: if 'transitions' in item: for trans in item['transitions']: self.transfer[item['id']][trans['to']] = trans['minutes'] * 60
def end(self): self.rasp_variants = util.tree() for mr_id in self.tmp_rasp_variants: for day_week in self.tmp_rasp_variants[mr_id]: rv_startdate = sorted(self.tmp_rasp_variants[mr_id][day_week].keys())[-1] (srv_id, rv_id) = sorted(self.tmp_rasp_variants[mr_id][day_week][rv_startdate].keys())[-1] self.rasp_variants[mr_id][(srv_id, rv_id)] = self.tmp_rasp_variants[mr_id][day_week][rv_startdate][(srv_id, rv_id)]
def __init__(self, stations, json_schedule): self.stations = stations self.json_schedule = util.tree() for item in json_schedule: self.json_schedule[item['mr_id']][item['direction']] = { 'mask': item['mask'], 'schedule': item['schedule'] }
def __init__(self, json_metro): self.json = json_metro self.transfer = util.tree() for item in json_metro['stations']: if 'transitions' in item: for trans in item['transitions']: self.transfer[item['id']][ trans['to']] = trans['minutes'] * 60
def __init__(self, stations, json_metro): self.stations = stations self.time = util.tree() self.json = json_metro self.marshes_json = {} for line in json_metro['lines']: self.marshes_json[line['id']] = { 'name': line['ru']['name'], 'color': line['Color'], 'subtype': line['subtype']} for item in line['stations']: self.time[line['id']][item['from']][item['to']] = item['minutes'] * 60
def test_append_dependencies_to_tree_dicts_invalid_version_json(self): dep_json = { 'buffer': { 'version': False } } with self.assertRaises(RuntimeError) as context: Manifest.append_dependencies_to_tree(tree(), dep_json)
def synchro(self, file_descriptor): self.process_marshes() self.process_marshvariants() for mr_id in self.marshes: self.route = util.tree() for mv_id in self.marsh_variants[mr_id]: self.process_racecard(mr_id, mv_id) self.process_racecoord(mv_id) self.form_file(mr_id, file_descriptor) self.stations.form_file(file_descriptor)
def expand_graphite_wildcard_metric_name(conn, name, cache_ttl=60): """ :type conn: pyKairosDB.KairosDBConnection :param conn: the connection to the database :type name: string :param name: the graphite-like name which can include ".*." to provide wildcard expansion :type cache_ttl: int :param cache_ttl: how often to update the cache from KairosDB, in seconds :rtype: list :return: a list of unicode strings. Each unicode string contains an expanded metric name. KairosDB doesn't currently support wildcards, so get all metric names and expand them. Currently only ".*." or "\*." or ".\*" expansions are supported. Substring expansions aren't supported at this time. Graphite-web uses fnmatch or something similar, perhaps this should provide a list and re-use the same functionality. This function caches the created tree for cache_ttl seconds and refreshes when the cache has aged beyond the cache_ttl. """ if "*" not in name: return [u'{0}'.format(name)] if "." in name: name_list = [ u'{0}'.format(n) for n in name.split(".")] else: name_list = [ name ] # print "Name_list is {0}".format(name_list) ts = expand_graphite_wildcard_metric_name.cache_timestamp cache_tree = expand_graphite_wildcard_metric_name.cache_tree if ts == 0 or (time.time() - ts > cache_ttl): all_metric_name_list = metadata.get_all_metric_names(conn) cache_tree = tree() _make_graphite_name_cache(cache_tree, all_metric_name_list) expand_graphite_wildcard_metric_name.cache_tree = cache_tree expand_graphite_wildcard_metric_name.cache_timestamp = time.time() if name == "*": # special case for the root of the tree: return cache_tree.keys() if '*' in name and not '.' in name: return [ ctk for ctk in cache_tree.keys() if fnmatch.fnmatch(ctk, name)] expanded_name_list = util.metric_name_wildcard_expansion(cache_tree, name_list) # print "expanded_name_list is {0}".format(expanded_name_list) return_list = [ ".".join(en) for en in expanded_name_list] return list(set(return_list))
def test_append_dependencies_to_tree_dicts(self): dep_json = { 'buffer': { 'version': '4.0.0', 'dependencies': { 'bar': { 'version': '0.0.1', 'dependencies': { 'biz': { 'version': '0.0.2' }, 'fiz': { 'version': '9.0.0', 'from': 'git+http://github.com/fiz', 'resolved': 'git+http://github.com/fiz#abc123' } }, } } }, 'io': { 'version': '0.0.1' } } deps = tree() Manifest.append_dependencies_to_tree(deps, dep_json) expected = tree() buffer_str = 'buffer===4.0.0===https://registry.npmjs.org/buffer/-/buffer-4.0.0.tgz' bar_str = 'bar===0.0.1===https://registry.npmjs.org/bar/-/bar-0.0.1.tgz' biz_str = 'biz===0.0.2===https://registry.npmjs.org/biz/-/biz-0.0.2.tgz' io_str = 'io===0.0.1===https://registry.npmjs.org/io/-/io-0.0.1.tgz' fiz_str = 'fiz===abc123===git+http://github.com/fiz#abc123' expected[buffer_str][bar_str][biz_str] = tree() expected[buffer_str][bar_str][fiz_str] = tree() expected[io_str] = tree() self.assertEqual(deps, expected)
def __init__(self, stations, json_metro): self.stations = stations self.time = util.tree() self.json = json_metro self.marshes_json = {} for line in json_metro['lines']: self.marshes_json[line['id']] = { 'name': line['ru']['name'], 'color': line['Color'], 'subtype': line['subtype'] } for item in line['stations']: self.time[line['id']][item['from']][ item['to']] = item['minutes'] * 60
def end(self): self.rasp_time = util.tree() for direction in self.race_card: self.rasp_time[direction] = [] for item in self.race_card[direction]: self.rasp_time[direction].append({'st_id': item['st_id'], 'time': []}) for direction in self.tmp_rasp_time: for gr_id in sorted(self.tmp_rasp_time[direction].keys()): for rt_racenum in sorted(self.tmp_rasp_time[direction][gr_id].keys()): for index_st, rt_orderby in enumerate(sorted(self.tmp_rasp_time[direction][gr_id][rt_racenum].keys())): rt_time = self.tmp_rasp_time[direction][gr_id][rt_racenum][rt_orderby]['time'] st_id = self.tmp_rasp_time[direction][gr_id][rt_racenum][rt_orderby]['st_id'] self.find_best_place(direction, index_st, st_id, rt_time) for direction in self.rasp_time: for item in self.rasp_time[direction]: item['time'] = sorted(item['time'])
def return_single_df_data( index: tuple, df: pd.DataFrame, file_schema: list, ): filter_result = tree() prefix, storage_class = index.split('+') t_node_list = [f'{prefix}-{storage_class}'] storage_size = df['Size'].sum() storage_num = df['Size'].count() filter_result = add_tree( filter_result, t_node_list + [STORAGE_SIZE], storage_size, ) filter_result = add_tree( filter_result, t_node_list + [STORAGE_NUMBER], storage_num, ) if 'IsDeleteMarker' in file_schema: # * 历史版本统计 delete_marker_num = df[df['IsDeleteMarker'] == True]['Size'].count() filter_result = add_tree( filter_result, t_node_list + [DEL_MARKER_NUMBER], delete_marker_num, ) if 'IsLatest' in file_schema: # * 删除标记统计 hist_df = df[df['IsLatest'] == False] hist_ver_num = hist_df['Size'].count() hist_ver_size = hist_df['Size'].sum() filter_result = add_tree( filter_result, t_node_list + [HIST_VER_NUMBER], hist_ver_num, ) filter_result = add_tree( filter_result, t_node_list + [HIST_VER_SIZE], hist_ver_size, ) return filter_result
def append_dependencies_to_tree(deps, dep_json): for key in dep_json.keys(): value = dep_json.get(key, None); sub_deps = tree() version = value.get('version', None) resolved_url = value.get('resolved', None) sub_dep_json = value.get('dependencies', {}) if not resolved_url: resolved_url = Manifest.calculate_resolved_url(key, version) if type(version) not in [str, unicode]: raise RuntimeError('Invalid value %s for %s.version, expected str' % (version, key)) if type(sub_dep_json) is not dict: raise RuntimeError('Invalid value %s for %s.dependencies, expected dict' % (sub_dep_json, key)) if resolved_url[0:4] == 'git+': # Get commit sha from URL string version = resolved_url.split('#')[-1] deps['%s===%s===%s' % (key, version, resolved_url)] = sub_deps Manifest.append_dependencies_to_tree(sub_deps, sub_dep_json)
def clean(self, data): try: # FIXME data = super().clean(data) except ValidationError as e: if 'comma-separated' in str(e): pass else: raise e paths = tree() # FIXME implementation for statistic in data: statistic, measure = statistic.split(':', 1) if '(' in measure: dimensions = re.search(r'\(([A-Z0-9_,:|]+)\)', measure) measure = measure.split('(')[0] if dimensions: for dimension in dimensions.group(1).split(','): if ':' in dimension: dimension, value = dimension.split(':') paths[statistic][measure][dimension] = value.split( '|') else: paths[statistic][measure][dimension] else: paths[statistic][measure] # sort stuff for unique identification paths = { skey: { mkey: {d: sorted(v) for d, v in sorted(m.items())} for mkey, m in sorted(s.items()) } for skey, s in sorted(paths.items()) } return paths
def end(self): self.marsh_variants = util.tree() for mr_id in self.tmp_marsh_variants: mv_startdate = sorted(self.tmp_marsh_variants[mr_id].keys())[-1] mv_id = sorted(self.tmp_marsh_variants[mr_id][mv_startdate].keys())[-1] self.marsh_variants[mr_id][mv_id] = self.tmp_marsh_variants[mr_id][mv_startdate][mv_id]
def __init__(self, stations, json_schedule): self.stations = stations self.json_schedule = util.tree() for item in json_schedule: self.json_schedule[item['mr_id']][item['direction']] = {'mask': item['mask'], 'schedule': item['schedule']}
def __init__(self): self.tmp_race_coord = util.tree()
def test_append_dependencies_to_tree_dicts_invalid_dep_json(self): dep_json = {'buffer': {'version': '0.0.1', 'dependencies': False}} with self.assertRaises(RuntimeError) as context: Manifest.append_dependencies_to_tree(tree(), dep_json)
def end(self): self.race_coord = util.tree() for direction in self.tmp_race_coord: self.race_coord[direction] = [self.tmp_race_coord[direction][k] for k in sorted(self.tmp_race_coord[direction].keys())]
def __init__(self, srv_id, rv_id, race_card, logger=None): self.tmp_rasp_time = util.tree() self.srv_id = srv_id self.rv_id = rv_id self.race_card = race_card self.logger = logger
all_metric_name_list = metadata.get_all_metric_names(conn) cache_tree = tree() _make_graphite_name_cache(cache_tree, all_metric_name_list) expand_graphite_wildcard_metric_name.cache_tree = cache_tree expand_graphite_wildcard_metric_name.cache_timestamp = time.time() if name == "*": # special case for the root of the tree: return cache_tree.keys() if '*' in name and not '.' in name: return [ ctk for ctk in cache_tree.keys() if fnmatch.fnmatch(ctk, name)] expanded_name_list = util.metric_name_wildcard_expansion(cache_tree, name_list) # print "expanded_name_list is {0}".format(expanded_name_list) return_list = [ ".".join(en) for en in expanded_name_list] return list(set(return_list)) expand_graphite_wildcard_metric_name.cache_tree = tree() expand_graphite_wildcard_metric_name.cache_timestamp = 0 def leaf_or_branch(conn, name): """ :type conn: pyKairosDB.KairosDBConnection :param conn: Connection to the pyrosdb :type name: string :param name: The metric name or part of a name that we're checking for :rtype: str :return: Either the string "leaf" or "branch" Graphite wants to know if a name is a "leaf" or a "branch" in
conf = util.Configuration('synchro.conf') parser = argparse.ArgumentParser(description='Report on the state routes and telemetry.') parser.add_argument("--host-redis", dest='host_redis', help='Host name redis, default localhost', default='localhost') parser.add_argument("--port-redis", dest='port_redis', help='Number port redis, default 6379', type=int, default=6379) parser.add_argument("--db-redis", dest='db_redis', help='Number database redis, default 0', type=int, default=0) parser.add_argument("--host-es", dest='host_es', help='Host name ElasticSearch, default localhost', default='localhost') parser.add_argument("--port-es", dest='port_es', help='Number port ElasticSearch, default 9200', type=int, default=9200) parser.add_argument("--only", dest='region', help="Make a report, only the specified region", choices=conf.sections()) args = parser.parse_args() logger = logging.getLogger('elasticsearch') logger.addHandler(logging.NullHandler()) route_count = util.tree() redis_client = redis.StrictRedis( host = args.host_redis, port = args.port_redis, db = args.db_redis ) es_client = Elasticsearch([{'host': args.host_es, 'port': args.port_es}]) if args.region: conf.set_section(args.region) regions = [{'name': conf.section, 'index': conf.get('name-index'), 'group_code': conf.getint('group-code')}] query_telemetry = 'telemetry:tn:%d:*' % conf.getint('group-code') query_prediction = 'prediction:tn:route:%d:*' % conf.getint('group-code') else: regions =[] for region in conf.sections(): regions.append({'name': region, 'index': conf.conf.get(region, 'name-index'), 'group_code': conf.conf.getint(region, 'group-code')}) query_telemetry = 'telemetry:tn:*' query_prediction = 'prediction:tn:route:*'
def __init__(self, current_time, redis_client=None, url=None): self.tmp_marsh_variants = util.tree() self.current_time = current_time self.redis_client = redis_client self.url = url
#!/usr/bin/env python2 # -*- coding: utf-8 -*- '''Скрипт выводит отчет о изменениях за отчетный период, по умолчанию за прошедшие сутки''' import sys import redis import argparse import time import datetime import util util.conf_io() report = util.tree() parser = argparse.ArgumentParser(description='Report on the changes state routes and stations.') parser.add_argument("--host-redis", dest='host_redis', help='Host name redis, default localhost', default='localhost') parser.add_argument("--port-redis", dest='port_redis', help='Number port redis, default 6379', type=int, default=6379) parser.add_argument("--db-redis", dest='db_redis', help='Number database redis, default 0', type=int, default=0) parser.add_argument("--start", dest='start', help='Beginning of reporting period in format "2013-12-31 12:55:00", default еnd of the reporting period minus one day', default='') parser.add_argument("--finish", dest='finish', help='End of the reporting period in format "2013-12-31 12:55:00", default current time', default=time.strftime("%Y-%m-%d %H:%M:%S")) args = parser.parse_args() def sort_routes(item): item_split = item.strip().split(' ') name = item_split[0].replace('*', '') complex_id_split = item_split[-1].split('/') mr_id = int(complex_id_split[-2]) direction = int(complex_id_split[-1]) return (name, mr_id, direction)