def test_multiple_absolute(self): """ Test a directory hierarchy with absolute paths """ # convert the files list to absolute paths test_input_files = [ os.path.join(self.working_dir, path) for path in self.file_contents.keys() ] with cd(self.working_dir): # encrypt all the test files encrypt( inputfiles=test_input_files, outputfile='sesame.encrypted', keys=[self.key], ) # delete the source files for path in self.file_contents.keys(): delete_path(path) # decrypt the test files decrypt( inputfile='sesame.encrypted', keys=[self.key], output_dir=os.getcwd(), # default in argparse ) for test_file_path in self.file_contents.keys(): # the file will be extracted on the absolute path test_file_path_abs = os.path.join(self.working_dir, test_file_path)[1:] # verify decrypted contents at the absolute extracted path with open(test_file_path_abs, 'r') as f: assert self.file_contents[test_file_path] == f.read()
def test_multiple_relative(self): """ Test a directory hierarchy with relative paths """ with cd(self.working_dir): # encrypt all the test files encrypt( inputfiles=self.file_contents.keys(), outputfile='sesame.encrypted', keys=[self.key], ) # delete the source files for path in self.file_contents.keys(): delete_path(path) # decrypt the test files decrypt( inputfile='sesame.encrypted', keys=[self.key], output_dir=os.getcwd(), # default in argparse ) for test_file_path in self.file_contents.keys(): # ensure files have been created assert os.path.exists(test_file_path) # verify decrypted contents with open(test_file_path, 'r') as f: assert self.file_contents[test_file_path] == f.read()
def main(args): utils.delete_path(args.log_dir) utils.delete_path(args.save_dir) utils.ensure_path(args.save_dir) utils.ensure_path(args.log_dir) utils.write_dict(vars(args), os.path.join(args.save_dir, 'arguments.csv')) torch.manual_seed(args.seed) cudnn.benchmark = True torch.cuda.manual_seed_all(args.seed) os.environ['CUDA_VISIBLE_DEVICES'] = args.gpus if args.mode == 'train': train(args) elif args.mode == 'test': test(args)
def __call__(self): data = self.load(self.entity.open()) # self.data represents the product of previous layers if self.data: data = utils.deepmerge(self.data, data) # Now apply any rules from config config = self.config if config: section = config.get(self.section) if section: dels = section.get('deletes', []) if self.prefix: namespace = data[self.prefix] else: namespace = data for key in dels: utils.delete_path(key, namespace) self.data = data if not self.target_file.parent.exists(): self.target_file.parent.makedirs_p() self.dump(data) return data
def del_gallery(list_of_gallery, local=False): "Deletes all galleries in the list recursively." assert isinstance(list_of_gallery, list), "Please provide a valid list of galleries to delete" for gallery in list_of_gallery: if local: if gallery.is_archive: s = delete_path(gallery.path) else: for chap in gallery.chapters: path = gallery.chapters[chap] s = delete_path(path) if not s: log_e('Failed to delete chapter {}:{}, {}'.format(chap, gallery.id, gallery.title.encode('utf-8', 'ignore'))) continue s = delete_path(gallery.path) if not s: log_e('Failed to delete gallery:{}, {}'.format(gallery.id, gallery.title.encode('utf-8', 'ignore'))) continue if gallery.profile != os.path.abspath(gui_constants.NO_IMAGE_PATH): try: os.remove(gallery.profile) except FileNotFoundError: pass executing = [["DELETE FROM series WHERE series_id=?", (gallery.id,)]] CommandQueue.put(executing) c = ResultQueue.get() del c ChapterDB.del_all_chapters(gallery.id) TagDB.del_gallery_mapping(gallery.id) HashDB.del_gallery_hashes(gallery.id) log_i('Successfully deleted: {}'.format(gallery.title.encode('utf-8', 'ignore'))) gui_constants.NOTIF_BAR.add_text('Successfully deleted: {}'.format(gallery.title))