def test_get_contents(self): self.assertEqual('file contents', Path(self.file_r, 'fr').get_content()) self.assertEqual( 'file contents', Path('file://' + self.tmpdir + '/' + self.file_r, 'fr').get_content())
def test_class_hidden_methods(self): path = Path(self.file_rw, 'frw') self.assertEqual(path(False), self.file_rw) self.assertEqual(path(True), os.path.join(self.tmpdir, self.file_rw)) self.assertEqual(path(), os.path.join(self.tmpdir, self.file_rw)) self.assertEqual(str(path), self.file_rw) self.assertTrue(path.__repr__().startswith('Path_frw('))
def test_init(self): path1 = Path(self.file_rw, 'frw') path2 = Path(path1) self.assertEqual(path1.cwd, path2.cwd) self.assertEqual(path1.abs_path, path2.abs_path) self.assertEqual(path1.rel_path, path2.rel_path) self.assertEqual(path1.is_url, path2.is_url) self.assertRaises(TypeError, lambda: Path(True))
def test_tilde_home(self): with unittest.mock.patch.dict(os.environ, {'HOME': self.tmpdir}): home = Path('~', 'dr') path = Path('~/' + self.file_rw, 'frw') self.assertEqual(str(home), '~') self.assertEqual(str(path), '~/' + self.file_rw) self.assertEqual(home(), self.tmpdir) self.assertEqual(path(), os.path.join(self.tmpdir, self.file_rw))
def test_urls(self): existing = 'http://example.com/existing-url' existing_body = 'url contents' nonexisting = 'http://example.com/non-existing-url' responses.add(responses.GET, existing, body=existing_body, status=200) responses.add(responses.HEAD, existing, status=200) responses.add(responses.HEAD, nonexisting, status=404) path = Path(existing, mode='ur') self.assertEqual(existing_body, path.get_content()) self.assertRaises(TypeError, lambda: Path(nonexisting, mode='ur'))
def test_file_access_mode(self): Path(self.file_rw, 'frw') Path(self.file_r, 'fr') Path(self.file_, 'f') Path(self.dir_file_rx, 'fr') self.assertRaises(TypeError, lambda: Path(self.file_rw, 'fx')) self.assertRaises(TypeError, lambda: Path(self.file_r, 'fw')) self.assertRaises(TypeError, lambda: Path(self.file_, 'fr')) self.assertRaises(TypeError, lambda: Path(self.dir_file_rx, 'fw')) self.assertRaises(TypeError, lambda: Path(self.dir_rx, 'fr')) self.assertRaises(TypeError, lambda: Path('file_ne', 'fr'))
def test_create_mode(self): Path(self.file_rw, 'fcrw') Path(os.path.join(self.tmpdir, 'file_c'), 'fc') Path(self.dir_rwx, 'dcrwx') Path(os.path.join(self.tmpdir, 'dir_c'), 'dc') self.assertRaises( TypeError, lambda: Path(os.path.join(self.dir_rx, 'file_c'), 'fc')) self.assertRaises( TypeError, lambda: Path(os.path.join(self.dir_rx, 'dir_c'), 'dc')) self.assertRaises(TypeError, lambda: Path(self.file_rw, 'dc')) self.assertRaises(TypeError, lambda: Path(self.dir_rwx, 'fc')) self.assertRaises( TypeError, lambda: Path(os.path.join(self.dir_rwx, 'ne', 'file_c'), 'fc'))
def test_default_path_unregistered_type(self): parser = ArgumentParser() parser.add_argument('--path', type=path_type('drw', skip_check=True), default=Path('test', mode='drw', skip_check=True)) cfg = parser.parse_args([]) self.assertEqual('path: test\n', parser.dump(cfg)) out = StringIO() parser.print_help(out) self.assertIn('(type: Path_drw_skip_check, default: test)', out.getvalue())
def test_dir_access_mode(self): Path(self.dir_rwx, 'drwx') Path(self.dir_rx, 'drx') Path(self.dir_x, 'dx') self.assertRaises(TypeError, lambda: Path(self.dir_rx, 'dw')) self.assertRaises(TypeError, lambda: Path(self.dir_x, 'dr')) self.assertRaises(TypeError, lambda: Path(self.file_r, 'dr'))
def state_dict_prop(self, state_dict: dict): """Replaces the current state dictionary with the one given. Args: state_dict: State dictionary to set. """ if state_dict is None: return elif isinstance(state_dict, (str, Path)): path = Path(state_dict, mode=get_config_read_mode(), cwd=self.cfg.cwd) state_dict = torch.load(path()) if not isinstance(state_dict, dict): raise ValueError('Expected state_dict to be a dictionary.') self.load_state_dict(state_dict)
def test_fsspec(self): def create_zip(zip_path, file_path): ziph = zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) ziph.write(file_path) ziph.close() existing = 'existing.txt' existing_body = 'existing content' nonexisting = 'non-existing.txt' with open(existing, 'w') as f: f.write(existing_body) zip1_path = 'file1.zip' zip2_path = 'file2.zip' create_zip(zip1_path, existing) create_zip(zip2_path, existing) os.chmod(zip2_path, 0) path = Path('zip://' + existing + '::file://' + zip1_path, mode='sr') self.assertEqual(existing_body, path.get_content()) with self.assertRaises(TypeError): Path('zip://' + nonexisting + '::file://' + zip1_path, mode='sr') with self.assertRaises(TypeError): Path('zip://' + existing + '::file://' + zip2_path, mode='sr') with self.assertRaises(ValueError): Path('zip://' + existing + '::file://' + zip1_path, mode='ds') with self.assertRaises(TypeError): Path('unsupported://' + existing, mode='sr') fsspec = import_fsspec('test_fsspec') nonexisting = 'nonexisting.txt' path = Path('memory://' + nonexisting, mode='sw') with fsspec.open(path(), 'w') as f: f.write(existing_body) self.assertEqual(existing_body, path.get_content())
def main(config_string): logger.debug("Parsing arguments...") parser = ArgumentParser() parser.add_argument('--cfg', action=ActionConfigFile) parser.add_argument('--lang', choices=['eng', 'nl'], nargs='?', default='nl', help='The language you want to hear.') parser.add_argument( '--duration_breath', type=int, default=0, help= 'Number of seconds that Tessa takes a breath before announcing the next exercise.' ) parser.add_argument('--max_times', type=int, default=3, help='The number of exercises per day.') parser.add_argument( '--warn_before', type=int, default=6, help='Seconds, before exercise starts, you get a warning.') parser.add_argument('--duration_exercise', action=ActionOperators(expr=('>', 15)), default=20, help='Seconds, duration of exercise.') parser.add_argument('--music.dir', action=ActionPath(mode='dr'), default=Path(os.path.join( str(libPath.home()), 'Music/iTunes/iTunes Media/Music'), mode='dr'), help='The root folder of your music files.') parser.add_argument( '--music.vol', type=float, default=0.19, help= 'The volume of the music (exponential scale, where 0 is silent, 1 is normal, up to 255 for very loud.' ) parser.add_argument( '--finished_work_at', type=int, default=19, help='When are you finished with work and want to stop exercises.') parser.add_argument( '--min_pause', type=int, default=0, help='Minutes, minimum how long your break between exercises takes.') parser.add_argument( '--max_pause', type=int, default=0, help='Minutes, maximum how long your break between exercises takes.') parser.add_argument( '--silent', action=ActionYesNo, default=False, help= 'Wether you want to run Tessa in silent mode. Music is played anyway.') parser.add_argument( '--skip_intro', action=ActionYesNo, default=True, help='If you want to restart Tessa and skip the intro.') args = parser.parse_string(config_string) speech = Speech(args) music = Music(args) start(args, speech, music)
class ModuleArchitecture: """Class for instantiating ModuleArchitecture objects.""" path = None jsonnet = None architecture = None propagators = 'default' blocks = None topological_predecessors = None @staticmethod def get_config_parser(): """Returns a ModuleArchitecture configuration parser.""" parser = ArgumentParser(error_handler=None, description=ModuleArchitecture.__doc__, version=__version__) parser.add_argument('--cfg', action=ActionConfigFile, help='Path to a configuration file.') # loading options # group_load = parser.add_argument_group('Loading related options') group_load.add_argument( '--validate', default=True, type=bool, help='Whether to validate architecture against narchi schema.') group_load.add_argument( '--propagate', default=True, type=bool, help='Whether to propagate shapes in architecture.') group_load.add_argument( '--propagated', default=False, type=bool, help='Whether architecture has already been propagated.') group_load.add_argument('--propagators', help='Overrides default propagators.') group_load.add_argument( '--ext_vars', action=ActionJsonnetExtVars(), help='External variables required to load jsonnet.') group_load.add_argument( '--cwd', help= 'Current working directory to load inner referenced files. Default None uses ' 'directory of main architecture file.') group_load.add_argument('--parent_id', default='', help='Identifier of parent module.') # output options # group_out = parser.add_argument_group('Output related options') group_out.add_argument('--overwrite', default=False, type=bool, help='Whether to overwrite existing files.') group_out.add_argument('--outdir', default='.', action=ActionPath(mode='dw'), help='Directory where to write output files.') group_out.add_argument( '--save_json', default=False, type=bool, help= 'Whether to write the architecture (up to the last successful step: jsonnet load, ' 'schema validation, parsing) in json format to the output directory.' ) return parser def __init__( self, architecture: Union[str, Path] = None, cfg: Union[str, dict, Namespace] = None, parser: ArgumentParser = None, ): """Initializer for ModuleArchitecture class. Args: architecture: Path to a jsonnet architecture file. cfg: Path to config file or config object. parser: Parser object in case it is an extension of get_config_parser(). """ if parser is None: parser = self.get_config_parser() self.parser = parser self.apply_config(cfg) if architecture is not None: self.load_architecture(architecture) def apply_config(self, cfg: Union[str, dict, Namespace]): """Applies a configuration to the ModuleArchitecture instance. Args: cfg: Path to config file or config object. """ if cfg is None: self.cfg = self.parser.get_defaults() elif isinstance(cfg, (str, Path)): self.cfg_file = cfg self.cfg = self.parser.parse_path(cfg) elif isinstance(cfg, Namespace): self.parser.check_config(cfg) self.cfg = cfg elif isinstance(cfg, dict): cfg = dict(cfg) if 'propagators' in cfg and isinstance(cfg['propagators'], dict): self.propagators = cfg.pop('propagators') if not hasattr(self, 'cfg'): self.cfg = self.parser.parse_object(cfg) else: self.cfg = self.parser.parse_object(cfg, cfg_base=self.cfg, defaults=False) else: raise ValueError(f'Unexpected configuration object: {cfg}') if self.propagators == 'default': self.propagators = import_object('narchi.blocks.propagators') def load_architecture(self, architecture: Optional[Union[str, Path]]): """Loads an architecture file. Args: architecture: Path to a jsonnet architecture file. """ self.path = None self.jsonnet = None self.architecture = None self.blocks = None self.topological_predecessors = None ## Initialize with given ModuleArchitecture ## if isinstance(architecture, ModuleArchitecture): self.path = architecture.path self.jsonnet = architecture.jsonnet self.blocks = architecture.blocks self.topological_predecessors = architecture.topological_predecessors self.cfg.propagated = architecture.cfg.propagated architecture = architecture.architecture ## Load jsonnet file or snippet ## if isinstance(architecture, (str, Path)): self.path = Path(architecture, mode=get_config_read_mode(), cwd=self.cfg.cwd) self.cfg.cwd = os.path.dirname(self.path()) self.jsonnet = self.path.get_content() architecture = ActionJsonnet(schema=None).parse( self.path, ext_vars=self.cfg.ext_vars) if not hasattr(architecture, '_id'): architecture._id = os.path.splitext( os.path.basename(self.path()))[0] if not isinstance(architecture, Namespace): raise ValueError( f'{type(self).__name__} expected architecture to be either a path or a namespace.' ) self.architecture = architecture ## Validate prior to propagation ## self.validate() ## Check inputs and outputs independent of blocks ## isect_ids = set(b._id for b in architecture.blocks).intersection( b._id for b in architecture.inputs + architecture.outputs) if isect_ids: raise ValueError( f'{type(self).__name__} inputs/outputs not allowed to be blocks {isect_ids}.' ) ## Create dictionary of blocks ## if not self.blocks and all( hasattr(architecture, x) for x in ['inputs', 'outputs', 'blocks']): if self.cfg.parent_id: architecture._id = self.cfg.parent_id add_ids_prefix(architecture, architecture.inputs + architecture.outputs, skip_io=False) self.blocks = get_blocks_dict(architecture.inputs + architecture.blocks) ## Propagate shapes ## if self.cfg.propagate: if not self.cfg.propagated: self.propagate() elif self.topological_predecessors is None: self.topological_predecessors = parse_graph( architecture.inputs, architecture) def validate(self): """Validates the architecture against the narchi or propagated schema.""" if not self.cfg.validate: return try: if self.cfg.propagated: propagated_validator.validate( namespace_to_dict(self.architecture)) else: narchi_validator.validate(namespace_to_dict(self.architecture)) except Exception as ex: self.write_json_outdir() source = 'Propagated' if self.cfg.propagated else 'Pre-propagated' raise type( ex )(f'{source} architecture failed to validate against schema :: {ex}' ) from ex def propagate(self): """Propagates the shapes of the neural network module architecture.""" if self.cfg.propagated: raise RuntimeError( f'Not possible to propagate an already propagated {type(self).__name__}.' ) architecture = self.architecture ## Parse graph getting node mapping in topological order ## topological_predecessors = parse_graph(architecture.inputs, architecture) output_ids = {b._id for b in architecture.outputs} if next(reversed(topological_predecessors)) not in output_ids: raise ValueError( f'In module[id={architecture._id}] expected one of output nodes {output_ids} to be the last in the graph.' ) ## Propagate shapes for the architecture blocks ## try: propagate_shapes(self.blocks, topological_predecessors, propagators=self.propagators, ext_vars=self.cfg.ext_vars, cwd=self.cfg.cwd, skip_ids=output_ids) except Exception as ex: self.write_json_outdir() raise ex for output_block in architecture.outputs: ## Get pre-output blocks ## pre_output_block_id = next( v[0] for k, v in topological_predecessors.items() if k == output_block._id) try: pre_output_block = next(b for b in architecture.blocks if b._id == pre_output_block_id) except StopIteration as ex: block_ids = {b._id for b in architecture.blocks} raise ValueError( f'In module[id={architecture._id}] pre-output block[id={pre_output_block_id}] not found among ids={block_ids}.' ) from ex ## Automatic output dimensions ## for dim, val in enumerate(output_block._shape): if val == auto_tag: output_block._shape[dim] = get_shape( 'out', pre_output_block)[dim] ## Check that output shape agrees ## if not shapes_agree(pre_output_block, output_block): self.write_json_outdir() raise ValueError( f'In module[id={architecture._id}] pre-output block[id={pre_output_block._id}] and output ' f'shape do not agree: {pre_output_block._shape.out} vs. {output_block._shape}.' ) ## Update properties ## self.topological_predecessors = topological_predecessors self.cfg.propagated = True ## Set propagated shape ## in_shape = architecture.inputs[0]._shape out_shape = architecture.outputs[0]._shape architecture._shape = create_shape(in_shape, out_shape) ## Validate result ## self.validate() ## Write json file if requested ## self.write_json_outdir() def write_json(self, json_path): """Writes the current state of the architecture in json format to the given path.""" with open(json_path if isinstance(json_path, str) else json_path(), 'w') as f: architecture = namespace_to_dict(self.architecture) f.write( json.dumps(architecture, indent=2, sort_keys=True, ensure_ascii=False)) def _check_overwrite(self, path): """Raises IOError if overwrite not set and path already exists.""" if not self.cfg.overwrite and os.path.isfile(path): raise IOError(f'Refusing to overwrite existing file: {path}') def write_json_outdir(self): """Writes the current state of the architecture in to the configured output directory.""" if not self.cfg.save_json or self.cfg.outdir is None or not hasattr( self, 'architecture'): return outdir = self.cfg.outdir if isinstance(self.cfg.outdir, str) else self.cfg.outdir() out_path = os.path.join(outdir, f'{self.architecture._id}.json') self._check_overwrite(out_path) self.write_json(out_path)
def test_complement_modes(self): self.assertRaises(TypeError, lambda: Path(self.file_rw, 'fW')) self.assertRaises(TypeError, lambda: Path(self.file_rw, 'fR')) self.assertRaises(TypeError, lambda: Path(self.dir_rwx, 'dX')) self.assertRaises(TypeError, lambda: Path(self.file_rw, 'F')) self.assertRaises(TypeError, lambda: Path(self.dir_rwx, 'D'))
def test_cwd(self): path = Path('file_rx', mode='fr', cwd=os.path.join(self.tmpdir, 'dir_x')) self.assertEqual(path.cwd, Path('file_rx', mode='fr', cwd=path.cwd).cwd)
def test_invalid_modes(self): self.assertRaises(ValueError, lambda: Path(self.file_rw, True)) self.assertRaises(ValueError, lambda: Path(self.file_rw, '≠')) self.assertRaises(ValueError, lambda: Path(self.file_rw, 'fd')) if url_support: self.assertRaises(ValueError, lambda: Path(self.file_rw, 'du'))
def load_architecture(self, architecture: Optional[Union[str, Path]]): """Loads an architecture file. Args: architecture: Path to a jsonnet architecture file. """ self.path = None self.jsonnet = None self.architecture = None self.blocks = None self.topological_predecessors = None ## Initialize with given ModuleArchitecture ## if isinstance(architecture, ModuleArchitecture): self.path = architecture.path self.jsonnet = architecture.jsonnet self.blocks = architecture.blocks self.topological_predecessors = architecture.topological_predecessors self.cfg.propagated = architecture.cfg.propagated architecture = architecture.architecture ## Load jsonnet file or snippet ## if isinstance(architecture, (str, Path)): self.path = Path(architecture, mode=get_config_read_mode(), cwd=self.cfg.cwd) self.cfg.cwd = os.path.dirname(self.path()) self.jsonnet = self.path.get_content() architecture = ActionJsonnet(schema=None).parse( self.path, ext_vars=self.cfg.ext_vars) if not hasattr(architecture, '_id'): architecture._id = os.path.splitext( os.path.basename(self.path()))[0] if not isinstance(architecture, Namespace): raise ValueError( f'{type(self).__name__} expected architecture to be either a path or a namespace.' ) self.architecture = architecture ## Validate prior to propagation ## self.validate() ## Check inputs and outputs independent of blocks ## isect_ids = set(b._id for b in architecture.blocks).intersection( b._id for b in architecture.inputs + architecture.outputs) if isect_ids: raise ValueError( f'{type(self).__name__} inputs/outputs not allowed to be blocks {isect_ids}.' ) ## Create dictionary of blocks ## if not self.blocks and all( hasattr(architecture, x) for x in ['inputs', 'outputs', 'blocks']): if self.cfg.parent_id: architecture._id = self.cfg.parent_id add_ids_prefix(architecture, architecture.inputs + architecture.outputs, skip_io=False) self.blocks = get_blocks_dict(architecture.inputs + architecture.blocks) ## Propagate shapes ## if self.cfg.propagate: if not self.cfg.propagated: self.propagate() elif self.topological_predecessors is None: self.topological_predecessors = parse_graph( architecture.inputs, architecture)