def handle_not_installed(): # for now just traverse as fs lgr.warning("%s is either not installed or lacks meta-data", subds) subfs = fs_extract(subds_path, rootds, basepath=rootds.path) # but add a custom type that it is a not installed subds subfs['type'] = 'uninitialized' # we need to kick it out from 'children' # TODO: this is inefficient and cruel -- "ignored" should be made # smarted to ignore submodules for the repo #if fs['nodes']: # fs['nodes'] = [c for c in fs['nodes'] if c['path'] != subds_rpath] return subfs
def _traverse_handle_subds( subds_rpath, rootds, recurse_datasets, recurse_directories, json): """A helper to deal with the subdataset node - recurse or just pick up may be alrady collected in it web meta """ subds_path = opj(rootds.path, subds_rpath) subds = Dataset(subds_path) subds_json = metadata_locator(path='.', ds_path=subds_path) def handle_not_installed(): # for now just traverse as fs lgr.warning("%s is either not installed or lacks meta-data", subds) subfs = fs_extract(subds_path, rootds, basepath=rootds.path) # but add a custom type that it is a not installed subds subfs['type'] = 'uninitialized' # we need to kick it out from 'children' # TODO: this is inefficient and cruel -- "ignored" should be made # smarted to ignore submodules for the repo #if fs['nodes']: # fs['nodes'] = [c for c in fs['nodes'] if c['path'] != subds_rpath] return subfs if not subds.is_installed(): subfs = handle_not_installed() elif recurse_datasets: subfs = ds_traverse(subds, json=json, recurse_datasets=recurse_datasets, recurse_directories=recurse_directories, parent=rootds) subfs.pop('nodes', None) #size_list.append(subfs['size']) # else just pick the data from metadata_file of each subdataset else: subfs = None lgr.info(subds.path) if exists(subds_json): with open(subds_json) as data_file: subfs = js.load(data_file) subfs.pop('nodes', None) # remove children subfs['path'] = subds_rpath # reassign the path #size_list.append(subfs['size']) else: # the same drill as if not installed lgr.warning("%s is installed but no meta-data yet", subds) subfs = handle_not_installed() # add URL field return subfs
def _traverse_handle_subds(subds_rpath, rootds, recurse_datasets, recurse_directories, json): """A helper to deal with the subdataset node - recurse or just pick up may be alrady collected in it web meta """ subds_path = opj(rootds.path, subds_rpath) subds = Dataset(subds_path) subds_json = metadata_locator(path='.', ds_path=subds_path) def handle_not_installed(): # for now just traverse as fs lgr.warning("%s is either not installed or lacks meta-data", subds) subfs = fs_extract(subds_path, rootds, basepath=rootds.path) # but add a custom type that it is a not installed subds subfs['type'] = 'uninitialized' # we need to kick it out from 'children' # TODO: this is inefficient and cruel -- "ignored" should be made # smarted to ignore submodules for the repo #if fs['nodes']: # fs['nodes'] = [c for c in fs['nodes'] if c['path'] != subds_rpath] return subfs if not subds.is_installed(): subfs = handle_not_installed() elif recurse_datasets: subfs = ds_traverse(subds, json=json, recurse_datasets=recurse_datasets, recurse_directories=recurse_directories, parent=rootds) subfs.pop('nodes', None) #size_list.append(subfs['size']) # else just pick the data from metadata_file of each subdataset else: subfs = None lgr.info(subds.path) if exists(subds_json): with open(subds_json) as data_file: subfs = js.load(data_file) subfs.pop('nodes', None) # remove children subfs['path'] = subds_rpath # reassign the path #size_list.append(subfs['size']) else: # the same drill as if not installed lgr.warning("%s is installed but no meta-data yet", subds) subfs = handle_not_installed() # add URL field return subfs