def __call__(self, data): if self.max_size is not None and data.size > self.max_size: params = { 'max_size': filesizeformat(self.max_size), 'size': filesizeformat(data.size), } raise ValidationError(self.error_messages['max_size'], 'max_size', params) if self.min_size is not None and data.size < self.min_size: params = { 'min_size': filesizeformat(self.mix_size), 'size': filesizeformat(data.size) } raise ValidationError(self.error_messages['min_size'], 'min_size', params) if self.content_types: content_type = magic.from_buffer(data.read(), mime=True) data.seek(0) if content_type not in self.content_types: params = {'content_type': content_type} raise ValidationError(self.error_messages['content_type'], 'content_type', params) if content_type == b'application/x-tar': plugin_handler = PluginInformationHandler(data) plugin_handler.validate_tar() data.seek(0)
def get_required_plugins(self): plugin_handler = PluginInformationHandler( self.get_full_path_to_archive()) info_file = plugin_handler.get_info() return [ in_info_file_specified for in_info_file_specified in info_file['requires'] ]
def validate_required_plugins(self, plugin): plugin_handler = PluginInformationHandler( self.get_full_path_to_archive()) info_file = plugin_handler.get_info() available_plugins = [] for in_info_file_specified in info_file['requires']: if in_info_file_specified['name'] == plugin.name: available_plugins = plugin_handler.find_required_plugins( in_info_file_specified) if plugin not in available_plugins: raise ValidationError( 'Plugin %s can not be used as substitution!' % str(plugin))
def get_substitution_plugin_for(self, plugin): # Read the information in again from the tar archive plugin_handler = PluginInformationHandler( self.get_full_path_to_archive()) info_file = plugin_handler.get_info() fitting_plugin = None # Go through all plugins that are required, to find the one that must be substituted for info_req_plugins in info_file['requires']: if info_req_plugins['name'] == plugin.name: # Get all plugins that match the statement in the json file substitution_plugins = plugin_handler.find_required_plugins( info_req_plugins) # Delete the plugin that is possibly already there as match available_plugins = [ avail_plugin for avail_plugin in substitution_plugins if avail_plugin != plugin ] if available_plugins: # Find the best plugin fitting_plugin = max(available_plugins, key=lambda x: x.version) return fitting_plugin
def load_from_json(self, archive): plugin_handler = PluginInformationHandler(archive) info_json = plugin_handler.get_info() schema_json = plugin_handler.get_schema() self.name = info_json['name'] self.author = info_json['author'] self.version = info_json['version'] self.plugin_type = info_json['plugin_type'] self.description = info_json['description'] self.linux_libraries = ','.join(info_json['linux_libraries']) self.archive = archive # Save the schema of the plugin in the mongodb handler.add_schema(schema_json, self) if settings.DATABASES['mongodb']['SHARDING']: handler.create_and_shard_collections( info_json['created_collections']) # Save plugin, afterwards we can add the dependencies try: self.full_clean() except ValidationError as e: raise e else: self.save() # Add dependencies for db_plugin in plugin_handler.find_fitting_plugins(): self.requires.add(db_plugin) # Add arguments of the plugin in the database for argument in plugin_handler.get_arguments(): argument.plugin = self try: argument.full_clean() except ValidationError as e: raise e else: argument.save()
def load_from_json(self, archive): self.load_with_information_handler(PluginInformationHandler(archive), archive)