def main(argv=None): import argparse parser = argparse.ArgumentParser( description='Process and dump prettified YAML to stdout.') parser.add_argument('path', nargs='?', metavar='path', help='Path to YAML to read (default: use stdin).') parser.add_argument( '-w', '--width', type=int, metavar='chars', help='Max line width hint to pass to pyyaml for the dump.' ' Only used to format scalars and collections (e.g. lists).') opts = parser.parse_args(argv or sys.argv[1:]) src = open(opts.path) if opts.path else sys.stdin try: data = yaml.safe_load(src) finally: src.close() pyaml_kwargs = dict() if opts.width: pyaml_kwargs['width'] = opts.width pyaml.pprint(data, **pyaml_kwargs)
def main(): import pyaml config = parse_args() pyaml.pprint(config, sort_dicts=False) print('-' * 80) for k, v in flatten_dict(config): print(f'{".".join(k)}:\t{v}')
def main(argv=None): import argparse parser = argparse.ArgumentParser( description='Process and dump prettified YAML to stdout.') parser.add_argument('path', nargs='?', metavar='path', help='Path to YAML to read (default: use stdin).') opts = parser.parse_args(argv or sys.argv[1:]) src = open(opts.path) if opts.path else sys.stdin try: data = yaml.safe_load(src) finally: src.close() pyaml.pprint(data)
def main(argv=None): import argparse parser = argparse.ArgumentParser( description='Process and dump prettified YAML to stdout.') parser.add_argument('path', nargs='?', metavar='path', help='Path to YAML to read (default: use stdin).') parser.add_argument('-w', '--width', type=int, metavar='chars', help='Max line width hint to pass to pyyaml for the dump.' ' Only used to format scalars and collections (e.g. lists).') opts = parser.parse_args(argv or sys.argv[1:]) src = open(opts.path) if opts.path else sys.stdin try: data = yaml.safe_load(src) finally: src.close() pyaml_kwargs = dict() if opts.width: pyaml_kwargs['width'] = opts.width pyaml.pprint(data, **pyaml_kwargs)
if isinstance(session['data']['val'], str): session['data']['val'] = session['data']['val'].split(':') else: raise ValueError(f'Invalid data specification: {session["data"]}') ex['history'].append(session) # Print config so far sort_dict(ex, [ 'name', 'tags', 'fullname', 'comment', 'completed_epochs', 'samples', 'data', 'model', 'optimizer', 'loss', 'history' ]) sort_dict(session, [ 'completed_epochs', 'samples', 'max_epochs', 'batch_size', 'seed', 'cpus', 'device', 'status', 'datetime_started', 'datetime_completed', 'data', 'logs', 'checkpoint', 'metric', 'misc', 'git', 'cuda' ]) pyaml.pprint(ex, safe=True, sort_dicts=False, force_embed=True, width=200) # endregion # region Building phase # Random seeds (set them after the random run id is generated) set_seeds(session['seed']) saver = Saver(Path(session['logs']['folder']).joinpath(ex['fullname'])) logger = SummaryWriter(saver.base_folder) # Model and optimizer def load_optimizer(config: Mapping, model: torch.nn.Module) -> torch.optim.Optimizer: special_keys = {'fn'} if 'fn' not in config: raise ValueError('Optimizer function not specified')
def pp(data): return pyaml.pprint(data.convert_to(data))
experiment.model.state_dict = Path( experiment.model.state_dict).expanduser().resolve().as_posix() if 'state_dict' in experiment.optimizer: experiment.optimizer.state_dict = Path( experiment.optimizer.state_dict).expanduser().resolve().as_posix() sort_dict( experiment, ['name', 'tags', 'epoch', 'samples', 'model', 'optimizer', 'sessions']) sort_dict(session, [ 'epochs', 'batch_size', 'losses', 'seed', 'cpus', 'device', 'samples', 'status', 'datetime_started', 'datetime_completed', 'data', 'log', 'checkpoint', 'git', 'gpus' ]) experiment.sessions.append(session) pyaml.pprint(experiment, sort_dicts=False, width=200) del session # endregion # region Building phase # Seeds (set them after the random run id is generated) set_seeds(experiment.session.seed) # Model model: torch.nn.Module = import_(experiment.model.fn)( *experiment.model.args, **experiment.model.kwargs) if 'state_dict' in experiment.model: model.load_state_dict(torch.load(experiment.model.state_dict)) model.to(experiment.session.device) # Optimizer
obj_filename = main_filename + 'Objects Data.yaml' act_filename = main_filename + 'Actors Data.yaml' cmd_filename = main_filename + 'Commands.yaml' player_filename = main_filename + 'Player.yaml' randoms_filename = main_filename + 'Random Dicts.yaml' warnings.simplefilter("ignore", error.ReusedAnchorWarning) if __name__ == '__main__': get_synonyms() exit() location_data = get_data(loc_filename) objects_data = get_data(obj_filename) pyaml.pprint(location_data, sort_dicts=False) pyaml.pprint(objects_data, sort_dicts=False) # player = Player() # print(player) # # location = Location(locations_data, player) #location.narrate() """ objects=[x for x in data[6].split(',') if len(x) > 1], # turns all items in data[6] into a list split by comma, only if word > 1 char verbs=[x for x in data[7].split(',') if len(x) > 1], negative_verbs=[x for x in data[8].split(',') if len(x) > 1], actors=[x for x in data[9].split(',') if len(x) > 1]) #narrative=loc_nar[int(data[1])].strip('\n'), # get index of readlines list from location number, remove newline char """
import docker, pyaml client = docker.from_env(version="auto") containers = client.containers.list() yml = {'version': '2', 'services': {}} for container in containers: service_name = container.attrs['Config']['Labels'][ 'com.docker.compose.service'] yml['services'][service_name] = {'environment': {}} envs = {} for env in container.attrs['Config']['Env']: chunks = env.split('=', 1) yml['services'][service_name]['environment'][chunks[0]] = chunks[1] pyaml.pprint(yml)
# Checks (some missing, others redundant) if model.fn is None: raise ValueError('Model constructor function not defined') if model.state_dict is None: raise ValueError(f'Model state dict is required to predict') if len(data) == 0: raise ValueError(f'No data to predict') if options.cpus < 0: raise ValueError(f'Invalid number of cpus: {options.cpus}') if options.output.exists() and not options.output.is_dir(): raise ValueError(f'Invalid output path {options.output}') pyaml.pprint({ 'model': model, 'options': options, 'data': data }, sort_dicts=False, width=200) # endregion # region Building phase # Model net: torch.nn.Module = import_(model.fn)(*model.args, **model.kwargs) net.load_state_dict(torch.load(model.state_dict)) net.to(options.device) # Output folder options.output.mkdir(parents=True, exist_ok=True) # endregion