def __health_check(self): to_delete = list() if self.__pre_processor is None: health_state = WordsDataSet.__health_state.format("") else: health_state = WordsDataSet.__health_state.format( f"_{self.__pre_processor.name}") health_path = os.path.join(self.__root_dir, health_state) if is_file(health_path): with open(health_path, 'r') as fp: to_delete = json_read(fp) else: for idx, word_meta in enumerate(self.__words): try: img, word = self[idx] stripped = right_strip(list(map(int, word)), 1) self.__check_word_length(stripped) except (cv2.error, ValueError) as e: logger.error(f"Corrupted file at index: {idx}") to_delete.append(idx) logger.debug(f"Write corrupted indices to '{health_path}'") with open(health_path, 'w') as fp: json_write(to_delete, fp) logger.info( f"WordsDataSet - Health Check: {len(to_delete)} indices={to_delete} not readable." ) self.__save_delete_indices(to_delete)
def _auth(self): """ Authenticate Amigo on GCP, fetching the credentials and saving it to a local file that can be used for the service discovery. """ # Set the credentials to be used by amigo local_cred_file = util.get_value(self.config, "local_cred_file") try: self.auth = Storage(local_cred_file) if util.is_file(util.get_value(self.config, "key_file")): creds = GoogleCredentials.get_application_default() self.auth.put(creds) return True except IOError: util.print_to_stderr( "Cannot open {0} to write, ensure you are running as root. ". format(local_cred_file)) except ApplicationDefaultCredentialsError: util.print_to_stderr("Cannot authenticate to GCP.") return False
def image_size(path_to_image: str) -> tuple: if not util.is_file(path_to_image): pfcs(f'image file e[{path_to_image}] does not exist!') return None with Image.open(path_to_image) as image: return image.size return None
def move_file(source_file, destination, create_dirs=False, new_filename=None, debug_print=True): "Custom file move method using mv command in the background" source_file = path_to_str(source_file) destination = path_to_str(destination) if not util.is_file(source_file): if debug_print: print( f'source {CSTR(source_file, "orange")} does not exist!') return False if not util.is_dir(destination) and create_dirs: os.makedirs(destination) if debug_print: print(f'move_file: created dir {CSTR(destination, "lblue")}') elif not util.is_dir(destination) and not create_dirs: if debug_print: print(f'destination {CSTR(destination, "red")} does not exists!') return False if debug_print: print(f'moving {CSTR(source_file, "lblue")}') if new_filename: command = f'mv \"{source_file}\" \"{destination}/{new_filename}\"' if debug_print: print( f'destination {CSTR(f"{destination}/{new_filename}", "lblue")}') else: command = f'mv \"{source_file}\" \"{destination}\"' if debug_print: print(f'destination {CSTR(destination, "lblue")}') if local_command(command, hide_output=True, print_info=False): if debug_print: print(CSTR('done!', 'lgreen')) return True if debug_print: print(CSTR('move failed!', 'red')) return False
def __availability_check(self): to_delete = [] for idx, word_meta in enumerate(self.__words): path = word_meta.path(self.__root_dir) if not is_file(path): logger.warning("File not found:", path) to_delete.append(idx) self.__save_delete_indices(to_delete)
def imdb_from_nfo(show_name: str): 'return the imdb-id from a tvshow.nfo, or None if unavailalble' if not util.is_dir(show_name): show_name = os.path.join(SHOW_DIR, show_name) if not util.is_dir(show_name): return None nfo_file = os.path.join(show_name, 'tvshow.nfo') if not util.is_file(nfo_file): return None return util.parse_imdbid_from_file(nfo_file)
def rename_file(source_file, destination): "Custom file move/rename method using mv command in the background" source_file = path_to_str(source_file) destination = path_to_str(destination) if not util.is_file(source_file): print( f'source {CSTR(source_file, "orange")} does not exist!') return False command = f'mv \"{source_file}\" \"{destination}\"' return local_command(command, hide_output=True, print_info=False)
def deserialize(bits): assert_type(bits, BitArray, 'deserialize') s = bits.tobytes() assert_type(s, str, 'deserialize') # print 'serialized payload from twitter: %s bytes -> %s bytes' % \ # (len(bits) / 8.0, len(s)) print 'serialized payload from twitter: %s bytes' % len(s) x = bson.loads(s) if not is_file(x) and not is_dir(x): raise ArgumentError('FATAL: bad type "%s"' % x['type']) return x
def _match_subdir(orig_path, sub_str: str): if not util.is_dir(orig_path): return "" for item in os.listdir(orig_path): if util.is_file(os.path.join(orig_path, item)): continue if is_ds_special_dir(item): continue if sub_str.lower() in item.lower(): return item # TODO: handle several matches return ""
def image_resize(path_to_image: str, width: int, heigh: int, new_file_name: None) -> bool: if not util.is_file(path_to_image): pfcs(f'image file e[{path_to_image}] does not exist!') return False old_img_path = Path(path_to_image) if not new_file_name: new_file_name = old_img_path.name.replace( old_img_path.suffix, "") + "_resized" + old_img_path.suffix elif isinstance(new_file_name, str): resized_img_path = Path(old_img_path.parent) / new_file_name elif isinstance(new_file_name, Path): resized_img_path = new_file_name else: pfcs(f'cannot determine output filename of' f'resized version of e[{path_to_image}]!') return False if util.is_file(resized_img_path): pfcs(f'output filename e[{resized_img_path}] already exist!') return False with Image.open(old_img_path) as image: resized_image = image.copy() resized_image.thumbnail((width, heigh)) resized_image.save(resized_img_path) return True
def ls(self): parser = argparse.ArgumentParser( description="List files in the specified directory") parser.add_argument("dir", nargs="?", default=self.curdir.get_dir()) args = parser.parse_args(self.args[1:]) args.dir = self.__normalize_dir(args.dir) try: for f in self.dropbox.files_list_folder(args.dir).entries: if util.is_file(f): print(coloured(f.name, 'white')) else: print(coloured(f.name, 'yellow')) except dropbox.exceptions.ApiError as e: print_error("sink ls: no such directory")
def __call__(self, meta, img_directory): root = os.path.dirname(img_directory) basename = os.path.basename(img_directory) deslant_path = meta.path( os.path.join(root, self.name.lower() + "_" + basename)) if not is_file(deslant_path): make_directories_for_file(deslant_path) original_path = meta.path(img_directory) image = cv2.imread(original_path) image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) image = self.__transform(image) cv2.imwrite(deslant_path, image) result = cv2.imread(deslant_path) return result
def unpack(payload, tweet_id, downloader, concealer, name_override=None, recur=False): assert_type(payload, dict, 'unpack') if is_file(payload): data, name, perms = payload['data'], payload['name'], int(payload['perms']) if name_override: name = name_override fatal_if_exists(name, 'file') print 'unpacked "%s" from tweet_id %s' % (name, tweet_id) write_file(name, data) print 'permissions: %s' % perms chmod(name, perms) print '' elif is_dir(payload): ids, name, perms = payload['ids'], payload['name'], int(payload['perms']) if name_override: name = name_override fatal_if_exists(name, 'directory') print 'unpacked "%s" with child tweet_ids %s' % (name, ids) write_dir(name) print 'permissions: %s' % perms chmod(name, perms) print '' if recur: chdir(name) for tweet_id in ids: payload = downloader(tweet_id) payload = concealer.reveal(payload) payload = deserialize(payload) unpack(payload, tweet_id, downloader, concealer, name_override=None, recur=recur) chdir('..')
def extract(compressed_file: 'full path', destination, create_dirs=True, overwrite=True): "Extract files with fancy color output" compressed_file = path_to_str(compressed_file) destination = path_to_str(destination) if not util.is_file(compressed_file): print( f'compressed_file {CSTR(compressed_file, "orange")} does not exist!') return False if not util.is_dir(destination): if create_dirs: os.makedirs(destination) print(f'extract: created dir {CSTR(destination, "lblue")}') else: print( f'extract: destination {CSTR(destination, "orange")} does not exist!') return False # just support rar for now file_name = util.filename_of_path(compressed_file) print(f'extracting {CSTR(file_name, "lblue")}') print(f'destination {CSTR(destination, "lblue")}') overwrite_arg = '-o+' if overwrite else '' command = shlex.split( f'unrar e {overwrite_arg} "{compressed_file}" "{destination}"') process = subprocess.Popen(command, stdout=subprocess.PIPE) while process.poll() is None: byte_line = process.stdout.readline() line = byte_line.decode() if '%' in line: percentage_done = util.parse_percent(line) print(f'\r{file_name} {CSTR(percentage_done, "lgreen")}', end='') print() if process.returncode == 0: print(CSTR('done!', 'lgreen')) return True print(CSTR('extract failed!', 'red')) return False
def test_is_file(): s = '\n' s += 'a.txt = ' + str(util.is_file('a.txt')) + '\n' s += 'b.txt = ' + str(util.is_file('b.txt')) + '\n' return s
parser.add_argument('checkpoint_file', nargs='?') parser.add_argument('--topk', action='store', default=3, type=int, help='top KK most likely classes') parser.add_argument('--category_names', action='store', help='Mapping of categories to real names') parser.add_argument('--gpu', action='store_true', default=False, help='Training via gpu') args = parser.parse_args() image_path = args.image_path checkpoint_file = args.checkpoint_file topk = args.topk category_names = args.category_names gpu = args.gpu if image_path == None: raise parser.error("Image file path was not specified") if checkpoint_file == None: raise parser.error("Model checkpoint file was not specified") if not util.is_file(checkpoint_file) or not checkpoint_file.endswith(".pth"): raise parser.error("Make sure your checkpoint file exist and has a .pth extension") model = util.load_model(checkpoint_file) probs, labels = util.predict(image_path, model, topk, gpu) if category_names != None: cat_to_names = util.get_category_names(category_names) util.show_flower_labels(probs, labels, cat_to_names) else: util.show_flower_labels(probs, labels)