def export_all_tracks(summary, detail, output_file): with open(detail, 'r') as f: data = json.load(f) with open(summary, 'r') as f: conservative_merger.merge(data, json.load(f)) export_activity(parse_activity_data(data), output_file)
def update_default_rules(self): """Concatinate application and global security group rules.""" app_ingress = self.properties['security_group']['ingress'] ingress = conservative_merger.merge(DEFAULT_SECURITYGROUP_RULES, app_ingress) resolved_ingress = self.resolve_self_references(ingress) self.log.info('Updated default rules:\n%s', ingress) return resolved_ingress
def get_openapi( *, title: str, version: str, openapi_version: str = "3.0.2", description: Optional[str] = None, routes: Sequence[BaseRoute], tags: Optional[List[Dict[str, Any]]] = None, servers: Optional[List[Dict[str, Union[str, Any]]]] = None, ) -> Dict: info = {"title": title, "version": version} if description: info["description"] = description output: Dict[str, Any] = {"openapi": openapi_version, "info": info} if servers: output["servers"] = servers components: Dict[str, Dict] = {} paths: Dict[str, Dict] = {} flat_models = get_flat_models_from_routes(routes) # ignore mypy error until enum schemas are released model_name_map = get_model_name_map(flat_models) # type: ignore # ignore mypy error until enum schemas are released definitions = get_model_definitions( flat_models=flat_models, model_name_map=model_name_map # type: ignore ) for route in routes: if isinstance(route, routing.APIRoute): result = get_openapi_path(route=route, model_name_map=model_name_map) if result: path, security_schemes, path_definitions = result if path: old_path = paths.get(route.path_format, {}) # New new_path = conservative_merger.merge(old_path, path) # New paths[route.path_format] = new_path # New # paths.setdefault(route.path_format, {}).update(path) # Old if security_schemes: components.setdefault("securitySchemes", {}).update(security_schemes) if path_definitions: definitions.update(path_definitions) if definitions: definitions.update(geojson) components["schemas"] = { k: definitions[k] for k in sorted(definitions) } if components: output["components"] = components output["paths"] = paths if tags: output["tags"] = tags return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True)
def convert_track_from_json(summary, detail, output_file): conservative_merger.merge(detail, summary) export_activity(parse_activity_data(detail), output_file)
def test_use_existing(base, nxt, expected): assert conservative_merger.merge(base, nxt) == expected
def parseconfig(cfg): import yaml raw_config = yaml.load(cfg) port, addr = utils.parseaddr(raw_config.get('bind', 'localhost:8080')) config = { 'port': port, 'addr': addr, 'debug': raw_config.get('debug', False), 'commands': raw_config.get('commands', ['tail', 'grep', 'awk']), 'allow-transfers': raw_config.get('allow-transfers', False), 'follow-names': raw_config.get('follow-names', False), 'relative-root': raw_config.get('relative-root', '/'), 'http-auth': raw_config.get('http-auth', False), 'users': raw_config.get('users', {}), 'wrap-lines': raw_config.get('wrap-lines', True), 'tail-lines': raw_config.get('tail-lines', 10), 'extra-files-dir': raw_config.get('extra-files-dir', '/etc/tailon/files.d/'), } extra_files = [] for file in os.listdir(config['extra-files-dir']): if file.endswith('.yaml') or file.endswith('.yml'): full_path = os.path.join(config['extra-files-dir'], file) with open(full_path, 'r') as f: _config = yaml.load(f.read()) extra_files.extend(_config.get('files')) if 'files' not in raw_config or not len(raw_config['files']): raise Exception('missing or empty "files" config entry') files = config['files'] = collections.OrderedDict() files['__ungrouped__'] = [] def helper(el, group='__ungrouped__', indict=False): for paths_or_group in el: if isinstance(paths_or_group, dict): if indict: raise RuntimeError( 'more than two sub-levels under "files"') group_name, j = list(paths_or_group.items())[0] helper(j, group_name, True) continue for path in glob.glob(paths_or_group): if not os.access(path, os.R_OK): log.info('skipping unreadable file: %r', path) continue d = files.setdefault(group, []) d.append(path) raw_config['files'] = conservative_merger.merge(raw_config['files'], extra_files) helper(raw_config['files']) return config
def parseconfig(cfg): import yaml raw_config = yaml.safe_load(cfg) print('raw-config: {}'.format(raw_config)) port, addr = utils.parseaddr(raw_config.get('bind', 'localhost:8080')) config = { 'port': port, 'addr': addr, 'debug': raw_config.get('debug', False), 'log-file': raw_config.get('log-file'), 'commands': raw_config.get('commands', ['ctail', 'tail', 'grep', 'awk']), 'allow-transfers': raw_config.get('allow-transfers', False), 'follow-names': raw_config.get('follow-names', False), 'relative-root': raw_config.get('relative-root', '/'), 'http-auth': raw_config.get('http-auth', False), 'users': raw_config.get('users', {}), 'wrap-lines': raw_config.get('wrap-lines', True), 'tail-lines': raw_config.get('tail-lines', 10), 'extra-files-dir': raw_config.get('extra-files-dir', '/etc/tailon/files.d/'), } extra_files = [] if os.path.isdir(config['extra-files-dir']): for file in os.listdir(config['extra-files-dir']): if file.endswith('.yaml') or file.endswith('.yml'): full_path = os.path.join(config['extra-files-dir'], file) with open(full_path, 'r') as f: _config = yaml.load(f.read()) extra_files.extend(_config.get('files')) if 'files' not in raw_config or not len(raw_config['files']): raise Exception('missing or empty "files" config entry') files = config['files'] = collections.OrderedDict() files['__ungrouped__'] = [] def helper(el, group='__ungrouped__', indict=False): ''' convert directories to list of files populate dictionary 'files' with '__ungrouped__' file list or 'group' file list ''' global file_utils for paths_or_group in el: if isinstance(paths_or_group, dict): #group if indict: raise RuntimeError( 'more than two sub-levels under "files"') group_name, j = list(paths_or_group.items())[0] log.info('group: {}, j: {}'.format(group, j)) if isinstance(j, dict): user = j.get('user') password = j.get('password') if user and password: config.setdefault('logins', {}) config['logins'][group_name] = (user, password) file_utils = utils.FileUtils(login=(user, password), use_directory_cache=True) helper(j.get('files', []), group_name, True) else: helper(j, group_name, True) continue elif utils.is_ipaddress(group): #ip address group for path in file_utils.listdir_netpath(group, paths_or_group, files_only=True): d = files.setdefault(group, []) if paths_or_group not in d and paths_or_group.endswith( '/'): d.append(paths_or_group) #add directory path log.info('adding: {}'.format(path)) d.append(path) else: #no group - it's a pathname for path in glob.glob(paths_or_group): if not os.access(path, os.R_OK): log.info('skipping unreadable file: %r', path) continue d = files.setdefault(group, []) d.append(path) raw_config['files'] = conservative_merger.merge(raw_config['files'], extra_files) helper(raw_config['files']) return config