def _run(self, in_dir, out_dir, convert_config): total_files = 0 count = 0 error_count = 0 to_process = list(get_input_output(in_dir, out_dir, self.working_dir)) for input_file, output_file, temp_file in to_process: total_files += 1 try: status = self.db.get(input_file, output_file) if status.should_process(): m_time = datetime.fromtimestamp( os.path.getmtime(input_file)) if not self.check_input_name(input_file): logger.warning( 'Invalid name, skipping: {}'.format(input_file)) elif m_time > (datetime.now() - timedelta(minutes=5)): logger.debug( 'File too recently modified, skipping: {}'.format( input_file)) else: logger.info('Starting {}'.format(input_file)) if self.process(in_dir, input_file, output_file, temp_file, status, convert_config): count += 1 self.db.save(status) else: logger.debug('Not processing: {}'.format(input_file)) except: logger.exception( 'Exception while processing {}'.format(input_file)) error_count += 1 return count, total_files, error_count
def subexecute(self, ns): import os from media_management_scripts.convert import convert_config_from_ns input_to_cmd = ns['input'] output = ns['output'] overwrite = ns['overwrite'] bulk = ns['bulk'] config = convert_config_from_ns(ns) if os.path.isdir(input_to_cmd): if bulk: os.makedirs(output, exist_ok=True) files = list(get_input_output(input_to_cmd, output)) self._bulk(files, lambda i, o: _bulk_convert(i, o, config, overwrite), ['Input', 'Output']) else: print( 'Cowardly refusing to convert a direction without --bulk flag' ) elif not overwrite and os.path.exists(output): print('Cowardly refusing to overwrite existing file: {}'.format( output)) else: convert_with_config(input_to_cmd, output, config, print_output=True, overwrite=overwrite)
def get_combinable_files(input_dir, output_dir, forced_language=None, lower_case=False): files = {} for f, o in get_input_output(input_dir, output_dir, filter=_filter): filename = os.path.basename(f) if lower_case: filename = filename.lower() no_ext, ext = os.path.splitext(f) lang = forced_language if not lang and ext in subtitle_exts: # subtitle m = LANG_PATTERN.search(filename) if m: no_ext, lang = os.path.splitext(no_ext) lang = lang[1::] l = files.get(no_ext, []) l.append((f, ext, lang, o)) files[no_ext] = l for k in sorted(files.keys()): l = files[k] video_file = None output_file = None srt_file = None language = None for file, ext, lang, o in l: if ext in subtitle_exts: srt_file = file language = lang else: video_file = file output_file = o output_file = output_file.replace(ext, '.mkv') yield video_file, srt_file, language, output_file
def _get_io(self, input_to_cmd, output_dir): from media_management_scripts.support.files import get_input_output for i, o in get_input_output(input_to_cmd, output_dir, filter=self._filter): noext, _ = os.path.splitext(o) o = noext + '.srt' yield i, o
def subexecute(self, ns): from media_management_scripts.utils import create_metadata_extractor from media_management_scripts.support.files import get_input_output, list_files from media_management_scripts.support.formatting import bitrate_to_str src_dir = ns['source'] dst_dir = ns['destination'] dst_files = list(list_files(dst_dir)) meta_db = ns.get('db', None) table = [] extractor = create_metadata_extractor(meta_db) for src_file, dst_file in get_input_output(src_dir, dst_dir): row = [] src_meta = extractor.extract(src_file) src_video = src_meta.video_streams[0] row.append(os.path.basename(src_file)) row.append(src_video.codec) row.append('{}x{}'.format(src_video.width, src_video.height)) row.append(bitrate_to_str(src_meta.bit_rate)) #row.append(dst_file) if os.path.exists(dst_file): dst_meta = extractor.extract(dst_file) dst_video = dst_meta.video_streams[0] row.append(dst_video.codec) row.append('{}x{}'.format(dst_video.width, dst_video.height)) row.append(bitrate_to_str(dst_meta.bit_rate)) else: row.append('') row.append('') row.append('') table.append(tuple(row)) columns = [ 'Source', 'Src Codec', 'Src Resolution', 'Src Bitrate', #'Destination', 'Dest Codec', 'Dest Resolution', 'Dest Bitrate' ] self._bulk_print(table, columns)
def main(input_dir, output_dir, config): files = list(get_input_output(input_dir, output_dir)) logger.info('{} files to process'.format(len(files))) did_process = True while did_process: did_process = False for input_file, output_file in files: if not os.path.exists(output_file): try: logger.info('Starting convert of {} -> {}'.format( input_file, output_file)) create_dirs(output_file) ret = convert_with_config(input_file, output_file, config) if ret == 0: did_process = True else: logger.error( 'Nonzero return code from ffmpeg: {}'.format(ret)) except Exception as e: logger.exception('Exception during convert')
def do_compare(input, output): count = 0 sum_percent = 0 bigger = [] not_converted = [] table = [['File', 'Original', 'Transcoded', 'Percent']] total_i = 0 total_o = 0 for i, o in get_input_output(input, output): name = os.path.basename(i) if os.path.exists(o): i_size = os.path.getsize(i) o_size = os.path.getsize(o) percent = o_size / float(i_size) * 100 if percent < 15: not_converted.append('{} (Too small, {:2f}%)'.format( name, percent)) elif o_size > i_size: bigger.append((name, o, i_size, o_size, percent)) else: count += 1 total_i += i_size total_o += o_size sum_percent += percent table.append([ name, sizeof_fmt(i_size), sizeof_fmt(o_size), _f_percent(percent) ]) else: not_converted.append(name) if count > 0: table.append(['', '', '', '']) per = total_o / float(total_i) * 100 table.append([ 'Total', sizeof_fmt(total_i), sizeof_fmt(total_o), _f_percent(per) ]) avg = sum_percent / count table.append(['Average', '', '', _f_percent(avg)]) t = Texttable(max_width=0) t.set_deco(Texttable.VLINES | Texttable.HEADER) t.set_cols_align(['l', 'r', 'r', 'r']) t.add_rows(table) print(t.draw()) print('{} Larger than original'.format(len(bigger))) for i, o, i_size, o_size, percent in bigger: print('{}: {} -> {} ({:.2f}%)'.format(i, sizeof_fmt(i_size), sizeof_fmt(o_size), percent)) if len(not_converted) > 0: print('Not Converted:') for i in not_converted: print(' {}'.format(i))
def get_existing_success(self, in_dir, out_dir): for input_file, output_file in get_input_output(in_dir, out_dir): if os.path.exists(output_file): status = self.db.get(input_file, output_file) if status.backup and status.convert: yield input_file