def __call__(self, value): value = value.strip() try: yaml_load(stream=value) except yaml.error.YAMLError: raise ValidationError(_('Enter a valid YAML value.'), code='invalid')
def clean(self): try: yaml_load(stream=self.cleaned_data['arguments']) except yaml.YAMLError: raise ValidationError( _( '"%s" not a valid entry.' ) % self.cleaned_data['arguments'] )
def clean(cls, form_data, request): try: yaml_load( stream=form_data['action_data']['transformation_arguments'] ) except yaml.YAMLError: raise ValidationError( _( '"%s" not a valid entry.' ) % form_data['action_data']['transformation_arguments'] ) return form_data
def smart_yaml_load(value): if isinstance(value, dict_type): return value else: return yaml_load( stream=value or '{}', )
def get_upload_filename(self, instance, filename): generator_klass = BaseDocumentFilenameGenerator.get( name=self.filename_generator_backend) generator_instance = generator_klass(**yaml_load( stream=self.filename_generator_backend_arguments or '{}')) return generator_instance.upload_to(instance=instance, filename=filename)
def render(self, context_string=None): """ context_string allows the management command to pass context to this method as a JSON string """ context = {} context.update(self.get_context_defaults()) context.update(self.get_settings_context()) context.update(self.get_variables_context()) # get_context goes last to server as the override context.update(self.get_context()) if context_string: context.update( yaml_load(stream=context_string) ) if self.template_string: template = Template(template_string=self.template_string) return template.render(context=Context(dict_=context)) else: return loader.render_to_string( template_name=self.get_template_name(), context=context )
def cache_value(self): environment_value = os.environ.get('MAYAN_{}'.format(self.global_name)) if environment_value: self.environment_variable = True try: self.raw_value = yaml_load(stream=environment_value) except yaml.YAMLError as exception: raise type(exception)( 'Error interpreting environment variable: {} with ' 'value: {}; {}'.format( self.global_name, environment_value, exception ) ) else: try: # Try the config file self.raw_value = self.get_config_file_content()[self.global_name] except KeyError: try: # Try the Django settings variable self.raw_value = getattr( settings, self.global_name ) except AttributeError: # Finally set to the default value self.raw_value = self.default else: # Found in the config file, try to migrate the value self.migrate() self.yaml = Setting.serialize_value(self.raw_value) self.loaded = True
def _get_value(self): value = os.environ.get(self.environment_name) if value: value = yaml_load(stream=value) else: value = self.default return yaml_dump( data=value, allow_unicode=True, default_flow_style=True, width=999 ).replace('...\n', '').replace('\n', '')
def load_environment_value(self): value = self._get_environment_value() if value: try: return yaml_load(stream=value) except yaml.YAMLError as exception: exit('Error loading setting environment value: {}; {}'.format( self.name, exception)) else: raise SettingNamespaceSingleton.SettingNotFound
def read_configuration_file(filepath): try: with open(filepath) as file_object: file_object.seek(0, os.SEEK_END) if file_object.tell(): file_object.seek(0) try: return yaml_load(stream=file_object) except yaml.YAMLError as exception: exit('Error loading configuration file: {}; {}'.format( filepath, exception)) except IOError as exception: if exception.errno == errno.ENOENT: # No config file, return empty dictionary return {} else: raise
def documents_storage_backend_arguments_0001(self, value): return yaml_load(stream=value or '{}', )
def sources_staging_file_cache_storage_backend_arguments_0001(self, value): return yaml_load(stream=value or '{}', )
def _process_message(source, message): counter = 1 document_ids = [] metadata_dictionary = {} # Messages are tree based, do nested processing of message parts until # a message with no children is found, then work out way up. if message.parts: for part in message.parts: part_document_ids, part_metadata_dictionary = EmailBaseModel._process_message( source=source, message=part, ) document_ids.extend(part_document_ids) metadata_dictionary.update(part_metadata_dictionary) else: # Treat inlines as attachments, both are extracted and saved as # documents if message.is_attachment() or message.is_inline(): # Reject zero length attachments if len(message.body) == 0: return document_ids, metadata_dictionary label = message.detected_file_name or 'attachment-{}'.format( counter) counter = counter + 1 with ContentFile(content=message.body, name=label) as file_object: if label == source.metadata_attachment_name: metadata_dictionary = yaml_load( stream=file_object.read()) logger.debug('Got metadata dictionary: %s', metadata_dictionary) else: documents = source.handle_upload( document_type=source.document_type, file_object=file_object, expand=(source.uncompress == SOURCE_UNCOMPRESS_CHOICE_Y)) for document in documents: document_ids.append(document.pk) else: # If it is not an attachment then it should be a body message part. # Another option is to use message.is_body() if message.detected_content_type == 'text/html': label = 'email_body.html' else: label = 'email_body.txt' # ------------------------------------------------ if source.store_body: with ContentFile(content=force_bytes(message.body), name=label) as file_object: documents = source.handle_upload( document_type=source.document_type, expand=SOURCE_UNCOMPRESS_CHOICE_N, file_object=file_object) for document in documents: document_ids.append(document.pk) return document_ids, metadata_dictionary
def get_for_object(self, obj, as_classes=False, maximum_layer_order=None, only_stored_layer=None, user=None): """ as_classes == True returns the transformation classes from .classes ready to be feed to the converter class """ Layer.update() StoredLayer = apps.get_model(app_label='converter', model_name='StoredLayer') content_type = ContentType.objects.get_for_model(model=obj) transformations = self.filter(enabled=True, object_layer__content_type=content_type, object_layer__object_id=obj.pk, object_layer__enabled=True) access_layers = StoredLayer.objects.all() exclude_layers = StoredLayer.objects.none() if maximum_layer_order: access_layers = StoredLayer.objects.filter( order__lte=maximum_layer_order) exclude_layers = StoredLayer.objects.filter( order__gt=maximum_layer_order) for stored_layer in access_layers: try: layer_class = stored_layer.get_layer() except KeyError: """ This was a class defined but later erased. Ignore it. """ else: access_permission = layer_class.permissions.get( 'access_permission', None) if access_permission: try: AccessControlList.objects.check_access( obj=obj, permissions=(access_permission, ), user=user) except PermissionDenied: access_layers = access_layers.exclude( pk=stored_layer.pk) for stored_layer in exclude_layers: exclude_permission = stored_layer.get_layer().permissions.get( 'exclude_permission', None) if exclude_permission: try: AccessControlList.objects.check_access( obj=obj, permissions=(exclude_permission, ), user=user) except PermissionDenied: pass else: exclude_layers = exclude_layers.exclude(pk=stored_layer.pk) if only_stored_layer: transformations = transformations.filter( object_layer__stored_layer=only_stored_layer) transformations = transformations.filter( object_layer__stored_layer__in=access_layers) transformations = transformations.exclude( object_layer__stored_layer__in=exclude_layers) if as_classes: result = [] for transformation in transformations: try: transformation_class = BaseTransformation.get( transformation.name) except KeyError: # Non existant transformation, but we don't raise an error logger.error('Non existant transformation: %s for %s', transformation.name, obj) else: try: # Some transformations don't require arguments # return an empty dictionary as ** doesn't allow None if transformation.arguments: kwargs = yaml_load( stream=transformation.arguments, ) else: kwargs = {} result.append(transformation_class(**kwargs)) except Exception as exception: logger.error( 'Error while parsing transformation "%s", ' 'arguments "%s", for object "%s"; %s', transformation, transformation.arguments, obj, exception) return result else: return transformations
def deserialize_value(value): return yaml_load(stream=value)
def ocr_backend_arguments_0001(self, value): return yaml_load(stream=value or '{}', )
def get_widget_kwargs(self): return yaml_load(stream=self.widget_kwargs)
def converter_graphics_backend_arguments_0001(self, value): return yaml_load( stream=value or '{}', )
def file_metadata_drivers_arguments_0001(self, value): return yaml_load(stream=value or '{}', )