def process_image(self, environment, zip_temp_file, out_dir): try: subprocess.check_output(['unzip', '-n', zip_temp_file.name, '-d', out_dir], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: self.get_logger().warn('Unexpected unzip error during processing of ' '%s in Instance[pk=%s]: %s', environment, self.instance.pk, exc.output, exc_info=1) raise ObjectProcessingError('Error processing zip file.') finally: os.unlink(zip_temp_file.name) # Now process squashfs fs_file = tempfile.NamedTemporaryFile(delete=False) fs_file.close() try: subprocess.check_output(['mksquashfs', out_dir, fs_file.name, '-comp', 'xz', '-noappend'], stderr=subprocess.STDOUT) if environment.fs_file: environment.fs_file.delete(save=False) with open(fs_file.name, mode='rb') as fd: environment.fs_file.save('squashfs.img', File(fd), save=False) except subprocess.CalledProcessError as exc: self.get_logger().error('Unexpected mksquashfs error during processing of ' '%s in Instance[pk=%s]: %s', environment, self.instance.pk, exc.output, exc_info=1) raise ObjectProcessingError('Error processing image file.') finally: os.unlink(fs_file.name)
def process(self, socket, dependencies, installed_objects): processor_class = self.get_processor_class(dependencies[0]) dep_name = getattr(processor_class, 'verbose_type', processor_class.socket_type.capitalize()) data = {} for dependency in dependencies: lineno = dependency.get('lineno') if 'name' in dependency: dep_name = '{}[{}]'.format(dep_name, dependency['name']) try: proc_data = processor_class(socket, dependency, dependencies, installed_objects).process() if proc_data is not None: data.update(proc_data) except SyncanoException as ex: raise ObjectProcessingError( 'Dependency {} processing error: {}'.format( dep_name, ex.detail), lineno) except serializers.ValidationError as ex: raise ObjectProcessingError( 'Dependency {} validation error. {}'.format( dep_name, format_error(ex.detail)), lineno) return data, processor_class.yaml_type
def load_socket_spec(self, socket_spec): """ Load socket spec. """ try: return marked_load(socket_spec) except yaml.YAMLError as ex: raise ObjectProcessingError('Error decoding socket: {}.'.format(force_text(str(ex), errors='ignore')))
def get_processor_class(self, dependency): type_ = dependency['type'] if type_ not in self.processors: raise ObjectProcessingError( 'Invalid dependency type: "{}".'.format(type_)) return self.processors[type_]
def read_file(self, path): try: return self.zip_file.read(path) except KeyError: raise SocketMissingFile(path) except (BadZipfile, zlib.error) as ex: raise ObjectProcessingError('Error unzipping "{}": {}.'.format( path, force_text(str(ex), errors='ignore')))
def zip_file(self): if self._zip_file is None: self._zip_file = ZipFile(self.socket.zip_file.file, 'r') if len(self._zip_file.filelist ) > settings.SOCKETS_MAX_ZIP_FILE_FILES: raise ObjectProcessingError( 'Error processing zip: Too many files.') return self._zip_file
def add_socket_for_installation(self, socket, dependencies, is_partial): endpoints_count = len([dep for dep in dependencies if dep['type'] == 'endpoint']) self.socket_install['endpoints_count'] += endpoints_count self.socket_install['dependencies_count'] += len(dependencies) - endpoints_count self.socket_install['data'] += [(socket, dependencies, is_partial)] # Check for some more or less sane values of max dependencies/endpoints/sockets for key, max_val, err_msg in ( ('dependencies_count', self.sockets_max_dependencies, 'Too many dependencies to be installed (max: {}).'), ('endpoints_count', self.sockets_max_endpoints, 'Too many endpoints defined (max: {}).'), ): if self.socket_install[key] > max_val: raise ObjectProcessingError(err_msg.format(max_val))
def download_socket_zip(self, socket): with tempfile.NamedTemporaryFile() as fp: try: download_file(socket.install_url, timeout=15, max_size=settings.SOCKETS_MAX_ZIP_FILE_SIZE, out=fp) except RequestException as ex: raise ObjectProcessingError( 'Error downloading socket "{}" specification zip file: {}.'.format( socket.name, force_text(str(ex)), errors='ignore')) fp.seek(0) socket.zip_file.save(os.path.basename(socket.install_url), File(fp), save=False)
def merge_class_schema_field(self, field, dep_fields, ref_fields, ref_props): installed_class = self.socket.installed.get(self.yaml_type, {}).get(self.name, {}) socket_pk = self.socket.pk field_name = field['name'] # Add reference to existing field. if field_name in ref_fields and socket_pk not in ref_fields[field_name]: ref_fields[field_name].append(socket_pk) # Add reference to field props. field_props = { prop for prop, val in dep_fields[field_name].items() if val is True } for prop, prop_sockets in ref_props.get(field_name, {}).items(): if prop in field_props: if socket_pk not in prop_sockets: prop_sockets.append(socket_pk) field_props.remove(prop) elif socket_pk in prop_sockets: prop_sockets.remove(socket_pk) # Add remaining new field props if field_props and field_name not in ref_props: ref_props[field_name] = {} for prop in field_props: ref_props[field_name][prop] = [socket_pk] # Check if types are compatible or if we previously installed that field type. # Otherwise, raise error for a conflict. if field['type'] != dep_fields[field_name]['type'] \ and installed_class.get(field_name) != field['type']: raise ObjectProcessingError( 'Class conflict. ' 'Class with name "{}" already exists with conflicting schema ' '(contains field: "{}" of different type).'.format( self.name, field_name)) field.update(dep_fields[field_name]) del dep_fields[field_name]
def add_size(self, size): self.socket.size += size if self.socket.size > self.max_socket_size: raise ObjectProcessingError( 'Socket total size exceeds maximum ({}).'.format(filesizeformat(self.max_socket_size)))