def test_PickleableObject(self): p1 = MyPoint(name='My point', x=6, y=-3) p1.write('test.pkl') p2 = MyPoint.read('test.pkl', store_path=True) self.dict_equal(p2.__dict__, {'y': -3, 'x': 6, '_pickle_path': 'test.pkl', 'name': 'My point'}) p2.write() p2.write('test2.pkl') os.remove('test.pkl') os.remove('test2.pkl') p2.write() self.false(os.path.exists('test2.pkl')) self.equal(p2._pickle_path, 'test.pkl') os.remove('test.pkl') p2.write('test2.pkl', store_path=True) self.false(os.path.exists('test.pkl')) self.equal(p2._pickle_path, 'test2.pkl') del p2._pickle_path with self.raises(ValueError): p2.write() os.remove('test2.pkl') remove('test3.pkl') p3 = MyPoint.read('test3.pkl', store_path=True, create_if_error=True, name='Default point', x=3, y=-6) self.dict_equal(p3.__dict__, {'x': 3, 'y': -6, '_pickle_path': 'test3.pkl', 'name': 'Default point'}) os.remove('test3.pkl') with self.raises(IOError): MyPoint.read('test3.pkl')
def download_ext_multi(resources, chunk_size=1024 * 1024, progress_callback=console.progress_bar, progress_stream=sys.stdout, progress_template='\r[{counter} of {total}] [{done}{todo}] {name}'): """ Download resources, showing a progress bar by default. Each element should be a `dict` with the url, path and name keys. Any extra item is passed to :func:`iter_download_to_file` as extra keyword arguments. """ for counter, resource in enumerate(sorted(resources, key=lambda r: r['name']), 1): kwargs, start_time = resource.copy(), time.time() url, path, name = kwargs.pop('url'), kwargs.pop('path'), kwargs.pop('name') callback = functools.partial( progress_callback, stream=progress_stream, template=progress_template.format( counter=counter, done='{done}', name=name, todo='{todo}', total=len(resources))) if not os.path.exists(path): makedirs(os.path.dirname(path)) try: for returned in iter_download_to_file( url, path, chunk_size=chunk_size, force=False, **kwargs ): callback(start_time, returned[0], returned[1]) except: remove(path) raise callback(start_time, 1, 1) progress_stream.write(os.linesep)
def test_encode(self): results = list(self.ffmpeg.encode(Media('small.mp4'), Media('ff_output.mp4', '-c:a copy -c:v copy'))) self.true(remove('ff_output.mp4')) self.equal(results[-1].state, EncodeState.SUCCESS) results = list(self.ffmpeg.encode(Media('small.mp4'), Media('ff_output.mp4', 'crazy_option'))) self.false(remove('ff_output.mp4')) self.equal(results[-1].state, EncodeState.FAILURE) results = list(self.ffmpeg.encode([Media('missing.mp4')], Media('ff_output.mp4', '-c:a copy -c:v copy'))) self.false(remove('ff_output.mp4')) self.equal(results[-1].state, EncodeState.FAILURE)
def test_kill_process_handle_missing(self): encoder = RaiseFFmpeg() with self.raises(ValueError): list( encoder.encode('small.mp4', 'ff_output.mp4', '-c:a copy -c:v copy')) self.true(remove('ff_output.mp4'))
def run(self): # pylint:disable=no-self-use from pytoolbox import filesystem project = Path(__file__).resolve().parent source = project / 'docs' / 'source' # Cleanup previously generated restructured files for path in filesystem.find_recursive(source, r'^pytoolbox.*\.rst$', regex=True): filesystem.remove(path) subprocess.run([ 'sphinx-apidoc', '--force', '--module-first', '--separate', '-o', source, project / 'pytoolbox' ], check=True) filesystem.remove(project / 'docs' / 'build' / 'html', recursive=True) subprocess.run(['make', 'html'], cwd=project / 'docs', check=True)
def test_encode(self): results = list( self.ffmpeg.encode(Media('small.mp4'), Media('ff_output.mp4', '-c:a copy -c:v copy'))) self.true(remove('ff_output.mp4')) self.equal(results[-1].state, EncodeState.SUCCESS) results = list( self.ffmpeg.encode(Media('small.mp4'), Media('ff_output.mp4', 'crazy_option'))) self.false(remove('ff_output.mp4')) self.equal(results[-1].state, EncodeState.FAILURE) results = list( self.ffmpeg.encode([Media('missing.mp4')], Media('ff_output.mp4', '-c:a copy -c:v copy'))) self.false(remove('ff_output.mp4')) self.equal(results[-1].state, EncodeState.FAILURE)
def download_ext_multi( resources, chunk_size=1024 * 1024, progress_callback=console.progress_bar, progress_stream=sys.stdout, progress_template='\r[{counter} of {total}] [{done}{todo}] {name}'): """ Download resources, showing a progress bar by default. Each element should be a `dict` with the url, path and name keys. Any extra item is passed to :func:`iter_download_to_file` as extra keyword arguments. """ for counter, resource in enumerate( sorted(resources, key=lambda r: r['name']), 1): kwargs = resource.copy() start_time = time.time() url = kwargs.pop('url') path = kwargs.pop('path') name = kwargs.pop('name') callback = functools.partial(progress_callback, stream=progress_stream, template=progress_template.format( counter=counter, done='{done}', name=name, todo='{todo}', total=len(resources))) if not os.path.exists(path): filesystem.makedirs(os.path.dirname(path)) try: for returned in iter_download_to_file(url, path, chunk_size=chunk_size, force=False, **kwargs): callback(start_time, returned[0], returned[1]) except Exception: filesystem.remove(path) raise callback(start_time, 1, 1) progress_stream.write(os.linesep)
def test_ffmpeg_encode(static_ffmpeg, small_mp4, tmp_path): encoder = static_ffmpeg() results = list(encoder.encode( ffmpeg.Media(small_mp4), ffmpeg.Media(tmp_path / 'output.mp4', '-c:a copy -c:v copy'))) assert filesystem.remove(tmp_path / 'output.mp4') is True assert results[-1].state == ffmpeg.EncodeState.SUCCESS results = list(encoder.encode( ffmpeg.Media(small_mp4), ffmpeg.Media(tmp_path / 'output.mp4', 'crazy_option'))) assert filesystem.remove(tmp_path / 'output.mp4') is False assert results[-1].state == ffmpeg.EncodeState.FAILURE results = list(encoder.encode( [ffmpeg.Media('missing.mp4')], ffmpeg.Media(tmp_path / 'output.mp4', '-c:a copy -c:v copy'))) assert filesystem.remove(tmp_path / 'output.mp4') is False assert results[-1].state == ffmpeg.EncodeState.FAILURE
def test_PickleableObject(self): p1 = MyPoint(name='My point', x=6, y=-3) p1.write('test.pkl') p2 = MyPoint.read('test.pkl', store_path=True) self.dict_equal(p2.__dict__, { 'y': -3, 'x': 6, '_pickle_path': 'test.pkl', 'name': 'My point' }) p2.write() p2.write('test2.pkl') os.remove('test.pkl') os.remove('test2.pkl') p2.write() self.false(os.path.exists('test2.pkl')) self.equal(p2._pickle_path, 'test.pkl') os.remove('test.pkl') p2.write('test2.pkl', store_path=True) self.false(os.path.exists('test.pkl')) self.equal(p2._pickle_path, 'test2.pkl') del p2._pickle_path with self.raises(ValueError): p2.write() os.remove('test2.pkl') remove('test3.pkl') p3 = MyPoint.read('test3.pkl', store_path=True, create_if_error=True, name='Default point', x=3, y=-6) self.dict_equal(p3.__dict__, { 'x': 3, 'y': -6, '_pickle_path': 'test3.pkl', 'name': 'Default point' }) os.remove('test3.pkl') with self.raises(IOError): MyPoint.read('test3.pkl')
def test_pickleable_object(): point_1 = MyPoint(name='My point', x=6, y=-3) point_1.write('test.pkl') point_2 = MyPoint.read('test.pkl', store_path=True) assert point_2.__dict__ == {'y': -3, 'x': 6, '_pickle_path': 'test.pkl', 'name': 'My point'} point_2.write() point_2.write('test2.pkl') os.remove('test.pkl') os.remove('test2.pkl') point_2.write() assert os.path.exists('test2.pkl') is False assert point_2._pickle_path == 'test.pkl' # pylint:disable=protected-access os.remove('test.pkl') point_2.write('test2.pkl', store_path=True) assert os.path.exists('test.pkl') is False assert point_2._pickle_path == 'test2.pkl' # pylint:disable=protected-access del point_2._pickle_path with pytest.raises(ValueError): point_2.write() os.remove('test2.pkl') filesystem.remove('test3.pkl') point_3 = MyPoint.read( 'test3.pkl', store_path=True, create_if_error=True, name='Default point', x=3, y=-6) assert point_3.__dict__ == { 'x': 3, 'y': -6, '_pickle_path': 'test3.pkl', 'name': 'Default point' } os.remove('test3.pkl') with pytest.raises(IOError): MyPoint.read('test3.pkl')
def process_role(role, roles): directory = ROLES_TARGET_DIRECTORY / f'ansible-role-{role}' if not directory.exists(): print('Create role', role) try: rsync(LIBRARY_DIRECTORY, directory, destination_is_dir=True) os.chdir(directory) paths = PATHS + [f'roles/{r}' for r in roles - {role}] print('\tFiltering') subprocess.check_output([ 'git', 'filter-branch', '--force', '--index-filter', f"git rm --cached --ignore-unmatch -r {' '.join(paths)}", '--prune-empty', '--tag-name-filter', 'cat', '--', '--all' ]) for path in (Path('roles') / role).glob('*'): print('\tMove directory', path.name) subprocess.check_output(['git', 'mv', path, path.name]) subprocess.check_output(['git', 'clean', '-f', '-d']) print('\tGenerate README') filesystem.from_template(README_TEMPLATE, 'README.md', values={ 'has_meta': Path('meta').exists(), 'role': role }, jinja2=True) subprocess.check_output(['git', 'add', 'README.md']) subprocess.check_output(['git', 'commit', '-m', MESSAGE]) print('\tJob done!') except Exception: filesystem.remove(directory, recursive=True) raise print('Push role', role) os.chdir(directory) url = create_github_repo(role) subprocess.check_call(['git', 'remote', 'remove', 'origin']) subprocess.check_call(['git', 'remote', 'add', 'origin', url]) subprocess.check_call(['git', 'push', '--all'])
def test_ffmpeg_kill_process_handle_missing(static_ffmpeg, small_mp4, tmp_path): class SomeError(Exception): pass class RaiseEncodeStatistics(static_ffmpeg.statistics_class): @staticmethod def end(returncode): raise SomeError('This is the error.') encoder = static_ffmpeg() encoder.statistics_class = RaiseEncodeStatistics with pytest.raises(SomeError): list(encoder.encode(small_mp4, tmp_path / 'out.mp4', out_options='-c:a copy -c:v copy')) assert filesystem.remove(tmp_path / 'out.mp4') is True
def main(): signal.signal(signal.SIGINT, lambda *args: sys.exit(0)) log = setup_log() parser = argparse.ArgumentParser(epilog='Archive stuff on S3.') parser.add_argument('--config', action=FullPaths, required=True, type=is_file) parser.add_argument('--simulate', action='store_true') parser.add_argument('--verbosity', choices=(0, 1, 2), default=0, type=int) args = parser.parse_args() def log_it(verbosity, level, message, **extra): if args.verbosity >= verbosity: extra['level'] = LEVEL_MAP[level] getattr(log, level)(message, extra=extra) s3 = boto3.client('s3') log_it(1, 'info', 'Process started') try: with open(args.config) as config_file: config = yaml.load(config_file) if config['enabled']: log_it(1, 'info', 'Its time to transfer!') if args.simulate: log_it(1, 'warning', 'Simulation mode enabled') for transfer in config['transfers']: name = transfer['name'] log_it(1, 'info', 'Handling transfer', transfer=name) bucket = transfer['bucket'] delete = transfer['delete'] directory = transfer['directory'] prefix = transfer['prefix'].format(host_fqdn=socket.getfqdn()) processed_bytes = processed_count = skipped_bytes = skipped_count = 0 for source_path in filesystem.find_recursive( directory, transfer['patterns'], unix_wildcards=False ): target_path = os.path.join(prefix, os.path.relpath(source_path, directory)) target_obj = aws.s3.load_object_meta(s3, bucket, target_path, fail=False) with open(source_path, 'rb') as source_file: # Retrieve metadata from source and target source_size = filesystem.get_size(source_path) target_size = None if target_obj is None else target_obj['ContentLength'] target_md5 = None if target_obj is None else target_obj['ETag'].strip('"') source_md5 = crypto.checksum( source_path, is_path=True, algorithm='md5', chunk_size=1024 * 1024) changed = source_md5 != target_md5 log_it( 2, 'info', 'File', transfer=name, changed=changed, source_md5=source_md5, source_path=source_path, source_size=source_size, target_md5=target_md5, target_path=target_path, target_size=target_size) if changed: processed_bytes += source_size processed_count += 1 else: skipped_bytes += source_size skipped_count += 1 if not args.simulate: aws.s3.write_object(s3, bucket, target_path, source_file) if delete: filesystem.remove(source_path) log_it( 1, 'info', 'Summary', transfer=name, processed_bytes=processed_bytes, processed_count=processed_count, skipped_bytes=skipped_bytes, skipped_count=skipped_count) else: log.warning('Process is disabled') except Exception as e: log.exception(e) finally: log_it(1, 'info', 'Process ended')
def test_kill_process_handle_missing(self): encoder = RaiseFFmpeg() with self.raises(ValueError): list(encoder.encode('small.mp4', 'ff_output.mp4', '-c:a copy -c:v copy')) self.true(remove('ff_output.mp4'))