def generate_from_template(docx_template, out_dir, namelines_list, contexts_list): for nameline, context in tqdm(list(zip(namelines_list, contexts_list)), disable=False): nameline = slugify(nameline) doc = DocxTemplate(docx_template) with cd(out_dir): # подставляем свои данные в шаблон формата docx doc.render(context) doc.save('docx/%s.docx' % nameline) # перегоняем docx в pdf # чтобы команда работала, libreoffice должен быть закрыт(!) # https://stackoverflow.com/a/43907693 subprocess.call(['libreoffice', '--headless', '--convert-to', 'pdf', '--outdir', './pdf', './docx/%s.docx' % nameline], stdout=subprocess.DEVNULL) # создаем превьюшки в png # если установлен wand и он не падает # (из-за отсутствия libmagickwand-dev ghostscript) with funcy.suppress(ImportError): from wand.image import Image from wand.color import Color from wand.exceptions import WandError with funcy.suppress(WandError): with Image(filename='pdf/%s.pdf' % nameline, resolution=300, background=Color('white')) as img: img.save(filename='png/%s.png' % nameline)
def project(self): # 1. configuration option passed explicitly # 2. from notebooks dir # 3. from cwd if self.ballet_yml_path: return Project.from_path(self.ballet_yml_path) path = NotebookApp.instance().notebook_dir with fy.suppress(Exception): return Project.from_path(path) with fy.suppress(Exception): return Project.from_cwd() raise ConfigurationError('Could not detect Ballet project')
def pull_to(self, path, to_info): """ Pull the corresponding file or directory specified by `path` and checkout it into `to_info`. It works with files tracked by Git and DVC, and also local files outside the repository. """ out = None path_info = PathInfo(self.root_dir) / path with suppress(OutputNotFoundError): (out, ) = self.find_outs_by_path(fspath(path_info), strict=False) try: if out and out.use_cache: self._pull_cached(out, path_info, to_info) return # Check if it is handled by Git (it can't have an absolute path) if os.path.isabs(path): raise FileNotFoundError fs_copy(fspath(path_info), fspath(to_info)) except FileNotFoundError: raise PathMissingError(path, self.url)
def is_authenticated(self): if not self._is_authenticated: with fy.suppress(Exception): _ = self.username self._is_authenticated = True return self._is_authenticated
def s3_success(): # defensively prevent duplicates # if s3_success is called multiple times for same upload, ignore if db.session.query(FileMapper) \ .filter_by(s3_upload_video_key=request.form.get('key')).first(): return {} video = Video( user_id=current_user.id, title=request.form.get('name').split('.')[0], uploaded_at=dt.datetime.utcnow(), ) video.file_mapper = FileMapper( s3_upload_bucket=request.form.get('bucket'), s3_upload_video_key=request.form.get('key'), ) db.session.add(video) db.session.commit() # start the transcoding job start_transcoder_job.delay(video.id) # if the bucket policy were public, we wouldn't need this with suppress(Exception): s3_make_public( video.file_mapper.s3_upload_bucket, video.file_mapper.s3_upload_video_key ) return jsonify( video_id=video.id, )
def publish_to_ethereum(video_id): """ Publish the last uploaded video """ video = db.session.query(Video).filter_by( id=video_id, user_id=current_user.id, ).first() if not video: return redirect(url_for('.publish_list_uploads')) if video.published_at: return redirect(f'/v/{video.id}') # special case - if its their first video, publish it for free num_videos = db.session.query(Video).filter_by( user_id=current_user.id, ).count() eth_address = null_address with suppress(Exception): wallet = db.session.query(Wallet).filter_by( user_id=current_user.id, ).order_by(desc(Wallet.created_at)).first() eth_address = wallet.default_address if num_videos == 0: video.published_at = dt.datetime.utcnow() video.eth_address = eth_address db.session.add(video) db.session.commit() return redirect(f'/v/{video.id}') return render_template( 'publish-to-ethereum.html', video=video, )
def prepare_gene_plot(analysis, mygene_sym): filename = '%s/%s/%s.png' % (PLOTS_DIR, analysis.id, mygene_sym) if not os.path.exists(filename): with suppress(OSError): os.makedirs(os.path.dirname(filename)) write_gene_plot(filename, mygene_sym, analysis.fold_changes.frame) return filename
def dump_error(name, files): path = os.path.join(settings.BASE_DIR, '_errors', name) with suppress(OSError): os.makedirs(path) for filename, data in files.items(): with open(os.path.join(path, filename), 'w') as f: f.write(data)
def create_all_records(process_big=True): dataset_id_list = ex.get_datasets()['dataset'].tolist() biglist = ['124_153_svhn_cropped', '31_urbansound', 'bone_image_classification', 'bone_image_collection'] if not process_big: dataset_id_list = [l for l in dataset_id_list if l not in biglist] for dataset_id in tqdm(dataset_id_list): with fy.suppress(Exception): yield create_record(dataset_id)
def check(): """Show latest generated check. If no rendered checks found, returns textual message. """ list_of_prints = glob.glob(os.path.abspath('resources/*.png')) with suppress(ValueError): latest_print = max(list_of_prints, key=os.path.getctime) return send_file(latest_print) return 'No checks genereated yet. Yikes!'
def kiosk_ip(printer_ip): """Kiosk route with ip address (of the printer) set in url. If value stated in url is not a valid ip, return 422 error. """ with suppress(ValueError): ipaddress.ip_address(printer_ip) return app.send_static_file('kiosk.html') abort(422) # invalid IP-address
def _reflink_linux(src, dst): import fcntl # pylint: disable=import-error from funcy import suppress FICLONE = 0x40049409 try: with open(src, "rb") as s, open(dst, "wb+") as d: fcntl.ioctl(d.fileno(), FICLONE, s.fileno()) except OSError: with suppress(OSError): os.unlink(dst) raise
def nonnegative(call, name=None): """Warn if the function's return value is negative and set it to 0""" result = call() with suppress(TypeError): if result < 0: result = 0.0 # Format a nice log message if name is None: try: pieces = call._func.__name__.split('_')[1:] name = ''.join(map(str.capitalize, pieces)) except RuntimeError: name = 'Result' logger.warning('%s should be non-negative.', name) return result
def start_new_feature(contrib_dir=None, **cc_kwargs): """Start a new feature within a ballet project By default, will prompt the user for input using cookiecutter's input interface. Renders the feature template into a temporary directory, then copies the feature files into the proper path within the contrib directory. Args: contrib_dir: directory under which to place contributed features **cc_kwargs: options for the cookiecutter template Raises: ballet.exc.BalletError: the new feature has the same name as an existing one """ if contrib_dir is None: project = Project.from_path(pathlib.Path.cwd().resolve()) contrib_dir = project.config.get('contrib.module_path') # inject default username into context default_username = detect_github_username(project) cc_kwargs.setdefault('extra_context', {}) cc_kwargs['extra_context'].update({'_default_username': default_username}) with tempfile.TemporaryDirectory() as tempdir: # render feature template output_dir = tempdir cc_kwargs['output_dir'] = output_dir rendered_dir = render_feature_template(**cc_kwargs) # clean pyc files from rendered dir for path in pwalk(rendered_dir, topdown=False): if path.suffix == '.pyc': path.unlink() if path.name == '__pycache__': with fy.suppress(OSError): path.rmdir() # copy into contrib dir src = rendered_dir dst = contrib_dir result = synctree(src, dst, onexist=_fail_if_feature_exists) _log_start_new_feature_success(result) return result
def delete_video_sql(video_id): video = db.session.query(Video).filter_by(id=video_id).one() assert video.published_at is None, 'Cannot delete a published video' # cancel transcoding jobs for job in db.session.query(TranscoderJob).filter_by(video_id=video.id): if job.status in (TranscoderStatus.pending, TranscoderStatus.processing): with suppress(Exception): cancel_job(job.id) # delete orphaned (non-cascading entries) db.session.query(TranscoderJob).filter_by(video_id=video_id).delete() db.session.query(VideoFrameAnalysis).filter_by(video_id=video_id).delete() db.session.delete(video) db.session.commit()
def __init__(self, gdata, current_page_num): self._current_page_num = current_page_num self.data = gdata # Defined in self.view_image self._selected_image_num: int # Defined in child classes self._main_path: str pure.print_multiple_imgs(self.data.current_page_illusts) print(f'Page {self._current_page_num}') # Make sure the following work: # Gallery -> next page -> image prompt -> back -> prev page if len(self.data.all_pages_cache) == 1: # Prefetch the next page on first gallery load with funcy.suppress(LastPageException): self._prefetch_next_page()
def pull_to(self, path, to_info): try: out = None with suppress(OutputNotFoundError): out = self.find_out_by_relpath(path) if out and out.use_cache: self._pull_cached(out, to_info) return # Git handled files can't have absolute path if os.path.isabs(path): raise FileNotFoundError fs_copy(os.path.join(self.root_dir, path), fspath(to_info)) except FileNotFoundError: raise PathMissingError(path, self.url)
def test_wrap_prop(): calls = [] # Not using @contextmanager to not make this a decorator class Manager: def __init__(self, name): self.name = name def __enter__(self): calls.append(self.name) return self def __exit__(self, *args): pass class A(object): @wrap_prop(Manager('p')) @property def prop(self): return 1 @wrap_prop(Manager('cp')) @cached_property def cached_prop(self): return 1 a = A() assert a.prop and calls == ['p'] assert a.prop and calls == ['p', 'p'] assert a.cached_prop and calls == ['p', 'p', 'cp'] assert a.cached_prop and calls == ['p', 'p', 'cp'] # Wrap __set__ for data props a = A() calls[:] = [] with suppress(AttributeError): a.prop = 2 assert calls == ['p'] # Do not wrap __set__ for non-data props a.cached_property = 2 assert calls == ['p']
def _synctree( src: pathlib.Path, dst: pathlib.Path, onexist: Callable[[pathlib.Path], None]) -> List[Tuple[pathlib.Path, str]]: result = [] cleanup = [] try: for _root, dirnames, filenames in os.walk(src): root = pathlib.Path(_root) relative_dir = root.relative_to(src) for dirname in dirnames: dstdir = dst.joinpath(relative_dir, dirname) if dstdir.exists(): if not dstdir.is_dir(): raise BalletError else: logger.debug(f'Making directory: {dstdir!s}') dstdir.mkdir() result.append((dstdir, 'dir')) cleanup.append(partial(os.rmdir, dstdir)) for filename in filenames: srcfile = root.joinpath(filename) dstfile = dst.joinpath(relative_dir, filename) if dstfile.exists(): onexist(dstfile) else: logger.debug(f'Copying file to destination: {dstfile!s}') copyfile(srcfile, dstfile) result.append((dstfile, 'file')) cleanup.append(partial(os.unlink, dstfile)) except Exception: with suppress(Exception): for f in reversed(cleanup): f() raise return result
def s3_thumb_success(): video = Video.query.filter_by( user_id=current_user.id, id=request.form.get('video_id'), ).first_or_404() video.file_mapper.s3_upload_thumbnail_key = request.form.get('key') db.session.add(video) db.session.commit() # start the thumbnail resizing process_thumbnails.delay(video.id) # if the bucket policy were public, we wouldn't need this with suppress(Exception): s3_make_public( video.file_mapper.s3_upload_bucket, video.file_mapper.s3_upload_thumbnail_key ) return jsonify( video_id=video.id, )
def s3_avatar_success(): channel = Channel.query.filter_by( user_id=current_user.id, id=request.form.get('channel_id'), ).first_or_404() profile = channel.profile or {} profile['s3_avatar_key'] = request.form.get('key') channel.profile = profile db.session.add(channel) db.session.commit() # start the thumbnail resizing process_avatar.delay(channel.id) # if the bucket policy were public, we wouldn't need this with suppress(Exception): s3_make_public(app.config['S3_UPLOADS_BUCKET'], profile['s3_avatar_key']) return jsonify( channel_id=channel.id, )
def _maybe_join_thread(self): """Implements abstractmethod: Wait for parse_thread to join (if any)""" with funcy.suppress(AttributeError): self.parse_thread.join()
def _prefetch_next_image(self): with funcy.suppress(IndexError): next_img_url = self.next_img_url if next_img_url: download.async_download_spinner(self.download_path, [next_img_url])
db = RegistrationDB(drop_first=True) random.seed(42) rows = get_from_csv('input/data_2018_05_10.csv') for row in rows: team_kwargs = { 'name': row['team'], 'tid': lambda: random.choice(ascii_uppercase) } participant_kwargs = { 'last_name': row['last'], 'first_name': row['first'], 'school': row['school'], 'team': row['team'], 'classname': random.choice([7, 8, 9]), # 'time_checked': func.now(), 'json': { 'fav_color': 'green', 'height': 168, }, } while True: with suppress(IntegrityError): db.add_instance(team_kwargs, participant_kwargs) break
def start_new_feature(contrib_dir: Pathy = None, branching: bool = True, **cc_kwargs) -> List[Tuple[pathlib.Path, str]]: """Start a new feature within a ballet project If run from default branch, by default will attempt to switch to a new branch for this feature, given by `<username>/feature-<featurename>`. By default, will prompt the user for input using cookiecutter's input interface. Renders the feature template into a temporary directory, then copies the feature files into the proper path within the contrib directory. Args: contrib_dir: directory under which to place contributed features branching: whether to attempt to manage branching **cc_kwargs: options for the cookiecutter template Raises: ballet.exc.BalletError: the new feature has the same name as an existing one """ if contrib_dir is not None: try: project = Project.from_path(contrib_dir, ascend=True) default_username = detect_github_username(project) except ConfigurationError: default_username = '******' else: project = Project.from_cwd() contrib_dir = project.config.get('contrib.module_path') default_username = detect_github_username(project) # inject default username into context cc_kwargs.setdefault('extra_context', {}) cc_kwargs['extra_context'].update({'_default_username': default_username}) with tempfile.TemporaryDirectory() as tempdir: # render feature template output_dir = tempdir cc_kwargs['output_dir'] = output_dir rendered_dir = render_feature_template(**cc_kwargs) # clean pyc files from rendered dir for path in pwalk(rendered_dir, topdown=False): if path.suffix == '.pyc': path.unlink() if path.name == '__pycache__': with fy.suppress(OSError): path.rmdir() # copy into contrib dir src = rendered_dir dst = contrib_dir result = synctree(src, dst, onexist=_fail_if_feature_exists) target_branch = None if branching and project.on_master: # try to set the target branch name paths = [path for path, kind in result if kind == 'file'] for path in paths: parts = pathlib.Path(path).parts subpackage, module = parts[-2], parts[-1] user_match = fy.re_find(SUBPACKAGE_NAME_REGEX, subpackage) feature_match = fy.re_find(FEATURE_MODULE_NAME_REGEX, module) if feature_match: username = user_match['username'] featurename = feature_match['featurename'].replace('_', '-') target_branch = f'{username}/feature-{featurename}' if target_branch is not None: switch_to_new_branch(project.repo, target_branch) _log_start_new_feature_success(result) _log_switch_to_new_branch(target_branch) return result
def _safe_delete_remote(repo, name): with funcy.suppress(Exception): repo.delete_remote(name)
def __getitem__(self, key): lazy_item = super(LazyModelCollection, self).__getitem__(key) with suppress(Exception): lazy_item.get() return lazy_item
def get_prep_value(self, value): return ','.join(map(str, value)) class Weird(models.Model): date_field = models.DateField(default=date(2000, 1, 1)) datetime_field = models.DateTimeField(default=datetime(2000, 1, 1, 10, 10)) time_field = models.TimeField(default=time(10, 10)) list_field = IntegerArrayField(default=lambda: []) custom_field = CustomField(default=CustomValue('default')) objects = models.Manager() customs = CustomManager() # contrib.postgres ArrayField with suppress(ImportError): from django.contrib.postgres.fields import ArrayField class TaggedPost(models.Model): name = models.CharField(max_length=200) tags = ArrayField(models.IntegerField()) # 16 class Profile(models.Model): user = models.ForeignKey(User) tag = models.IntegerField() # Proxy model class Video(models.Model):
class Migration(migrations.Migration): dependencies = [ # ('contenttypes', '0002_remove_content_type_name'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='All', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('tag', models.IntegerField(null=True)), ], ), migrations.CreateModel( name='Brand', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ], ), migrations.CreateModel( name='BrandT', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ], ), migrations.CreateModel( name='CacheOnSaveModel', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=32)), ], ), migrations.CreateModel( name='Category', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=128)), ], ), migrations.CreateModel( name='Contained', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=30)), ], ), migrations.CreateModel( name='DbAgnostic', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ], ), migrations.CreateModel( name='DbBinded', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ], ), migrations.CreateModel( name='Extra', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('tag', models.IntegerField(unique=True, db_column='custom_column_name')), ], ), migrations.CreateModel( name='GenericContainer', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('object_id', models.PositiveIntegerField()), ('name', models.CharField(max_length=30)), ('content_type', models.ForeignKey(to='contenttypes.ContentType')), ], ), migrations.CreateModel( name='Label', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('text', models.CharField(default='', max_length=127, blank=True)), ], ), migrations.CreateModel( name='Labeling', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('tag', models.IntegerField()), ('brand', models.ForeignKey(to='tests.BrandT')), ], ), migrations.CreateModel( name='LabelT', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('text', models.CharField(default='', max_length=127, blank=True)), ], ), migrations.CreateModel( name='Local', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('tag', models.IntegerField(null=True)), ], ), migrations.CreateModel( name='Media', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=128)), ], ), migrations.CreateModel( name='Point', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('x', models.DecimalField(default=0.0, max_digits=8, decimal_places=6, blank=True)), ], ), migrations.CreateModel( name='Post', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=128)), ('visible', models.BooleanField(default=True)), ('category', models.ForeignKey(related_name='posts', to='tests.Category')), ], ), migrations.CreateModel( name='Product', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=32)), ], ), migrations.CreateModel( name='ProductReview', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('status', models.IntegerField()), ('product', models.ForeignKey(related_name='reviews', to='tests.Product', null=True)), ], ), migrations.CreateModel( name='Profile', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('tag', models.IntegerField()), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Video', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=128)), ], ), migrations.CreateModel( name='Weird', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('date_field', models.DateField(default=datetime.date(2000, 1, 1))), ('datetime_field', models.DateTimeField( default=datetime.datetime(2000, 1, 1, 10, 10))), ('time_field', models.TimeField(default=datetime.time(10, 10))), ('list_field', tests.models.IntegerArrayField(default=list)), ('custom_field', tests.models.CustomField( default=tests.models.custom_value_default)), ('binary_field', models.BinaryField()), ], ), migrations.CreateModel( name='Movie', fields=[ ('media_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='tests.Media')), ('year', models.IntegerField()), ], bases=('tests.media', ), ), migrations.CreateModel( name='PremiumBrand', fields=[ ('brand_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='tests.Brand')), ('extra', models.CharField(default='', max_length=127, blank=True)), ], bases=('tests.brand', ), ), migrations.AddField( model_name='labeling', name='label', field=models.ForeignKey(to='tests.LabelT'), ), migrations.AddField( model_name='extra', name='post', field=models.OneToOneField(to='tests.Post'), ), migrations.AddField( model_name='extra', name='to_tag', field=models.ForeignKey(to='tests.Extra', to_field='tag', null=True), ), migrations.AddField( model_name='brandt', name='labels', field=models.ManyToManyField(related_name='brands', through='tests.Labeling', to='tests.LabelT'), ), migrations.AddField( model_name='brand', name='labels', field=models.ManyToManyField(related_name='brands', to='tests.Label'), ), migrations.CreateModel( name='VideoProxy', fields=[], options={ 'proxy': True, }, bases=('tests.video', ), ), ] from funcy import suppress with suppress(ImportError): import django.contrib.postgres.fields operations.append( migrations.CreateModel( name='TaggedPost', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=200)), ('tags', django.contrib.postgres.fields.ArrayField( base_field=models.IntegerField(), size=None)), ], )) if os.environ.get('CACHEOPS_DB') == 'postgis': import django.contrib.gis.db.models.fields operations.append( migrations.CreateModel( name='Geometry', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('point', django.contrib.gis.db.models.fields.PointField( blank=True, default=None, dim=3, geography=True, null=True, srid=4326)), ], ))
class Weird(models.Model): date_field = models.DateField(default=date(2000, 1, 1)) datetime_field = models.DateTimeField(default=datetime(2000, 1, 1, 10, 10)) time_field = models.TimeField(default=time(10, 10)) list_field = IntegerArrayField(default=lambda: []) custom_field = CustomField(default=CustomValue('default')) if hasattr(models, 'BinaryField'): binary_field = models.BinaryField() objects = models.Manager() customs = CustomManager() # contrib.postgres ArrayField with suppress(ImportError): from django.contrib.postgres.fields import ArrayField class TaggedPost(models.Model): name = models.CharField(max_length=200) tags = ArrayField(models.IntegerField()) # 16 class Profile(models.Model): user = models.ForeignKey(User) tag = models.IntegerField() # Proxy model class Video(models.Model):