Exemplo n.º 1
0
    def update_element(self, element):
        """
        Extract some informations from element to prepare the repository
        :param element: Element to add to the repository
        :return: Unicode string containing meta-data

        ar -x control.tar.gz
        tar -xf control.tar.gz control
        """
        archive_file = storage(settings.STORAGE_ARCHIVE).get_file(element.archive_key)
        ar_file = ArFile(element.filename, mode='r', fileobj=archive_file)
        control_file, control_file_name = self.get_subfile(ar_file, 'control.tar.')
        if control_file is None:
            raise InvalidRepositoryException('No control file found in .deb package')
        mode = 'r:*'
        if control_file_name.endswith('.xz') or control_file_name.endswith('.lzma'):
            control_file_content = control_file.read()
            control_file_content_uncompressed = lzma.decompress(control_file_content)
            control_file.close()
            control_file = io.BytesIO(control_file_content_uncompressed)
            mode = 'r'
        tar_file = tarfile.open(name='control', mode=mode, fileobj=control_file)
        control_data = tar_file.extractfile('./control')
        # poulating different informations on the element
        control_data_value = control_data.read().decode('utf-8')
        control_data.close()
        tar_file.close()
        ar_file.close()
        archive_file.close()
        element.extra_data = control_data_value
        parsed_data = parse_control_data(control_data_value)
        element.archive = parsed_data['Package']
        element.version = parsed_data['Version']
        element.official_link = parsed_data.get('Homepage', '')
        element.long_description = parsed_data.get('Description', '')
Exemplo n.º 2
0
    def update_element(self, element):
        """
        Extract some informations from element to prepare the repository
        :param element: Element to add to the repository
        :return: Unicode string containing meta-data

        ar -x control.tar.gz
        tar -xf control.tar.gz control
        """
        archive_file = storage(settings.STORAGE_ARCHIVE).get_file(element.archive_key)
        py_archive = self.open_file(element.filename, archive_file)
        if py_archive is None:
            raise InvalidRepositoryException(_('Unable to open file'))
        try:
            control_data_value = py_archive.get_pkg_info()
            if not control_data_value:
                raise InvalidRepositoryException(_('No control data in archive'))
            element.extra_data = control_data_value
            control_data = parse_control_data(control_data_value, continue_line='        ', skip_after_blank=True)
            for key, attr in (('Name', 'archive'), ('Version', 'version'), ('Home-page', 'official_link'),
                              ('Description', 'long_description')):
                if key in control_data:
                    setattr(element, attr, control_data.get(key, ''))
            element.archive = element.archive.replace('-', '').replace('_', '')
            element.name = element.archive
        finally:
            py_archive.close()
            archive_file.close()
Exemplo n.º 3
0
    def update_element(self, element):
        """
        Extract some informations from element to prepare the repository
        :param element: Element to add to the repository
        :return: Unicode string containing meta-data
        """
        if element.archive:
            element.name = element.archive.rpartition('.')[2]
        if element.filename.endswith('.jar') and False:
            archive_file = storage(settings.STORAGE_ARCHIVE).get_file(element.archive_key)

            compressed_file = zipfile.ZipFile(archive_file)
            prefix = os.path.commonprefix(compressed_file.namelist())
            control_data_file = compressed_file.open(os.path.join(prefix, 'META-INF', 'MANIFEST.MF'))

            control_data_value = control_data_file.read().decode('utf-8')
            control_data_file.close()
            compressed_file.close()
            archive_file.close()
            element.extra_data = control_data_value
            control_data = parse_control_data(control_data_value, continue_line=' ')
            for key, attr in (('Bundle-SymbolicName', 'name'), ('Bundle-Version', 'version'),
                              ('Implementation-Title', 'archive'), ('Implementation-Version', 'version'),
                              ('Name', 'name'),):
                if key in control_data:  # archive : PackageName, name : Organization Name
                    setattr(element, attr, control_data.get(key, ''))
Exemplo n.º 4
0
    def update_element(self, element):
        """
        Extract some informations from element to prepare the repository
        :param element: Element to add to the repository
        :return: Unicode string containing meta-data

        ar -x control.tar.gz
        tar -xf control.tar.gz control
        """
        archive_file = storage(settings.STORAGE_ARCHIVE).get_file(element.archive_key)
        py_archive = self.open_file(element.filename, archive_file)
        if py_archive is None:
            raise InvalidRepositoryException(_('Unable to open file'))
        try:
            control_data_value = py_archive.get_pkg_info()
            if not control_data_value:
                raise InvalidRepositoryException(_('No control data in archive'))
            element.extra_data = control_data_value
            control_data = parse_control_data(control_data_value, continue_line='        ', skip_after_blank=True)
            for key, attr in (('Name', 'archive'), ('Version', 'version'), ('Home-page', 'official_link'),
                              ('Description', 'long_description')):
                if key in control_data:
                    setattr(element, attr, control_data.get(key, ''))
            element.archive = element.archive.replace('-', '').replace('_', '')
            element.name = element.archive
        finally:
            py_archive.close()
            archive_file.close()
Exemplo n.º 5
0
    def update_element(self, element):
        """
        Extract some informations from element to prepare the repository
        :param element: Element to add to the repository
        :return: Unicode string containing meta-data
        """
        if element.archive:
            element.name = element.archive.rpartition('.')[2]
        if element.filename.endswith('.jar') and False:
            archive_file = storage(settings.STORAGE_ARCHIVE).get_file(
                element.archive_key)

            compressed_file = zipfile.ZipFile(archive_file)
            prefix = os.path.commonprefix(compressed_file.namelist())
            control_data_file = compressed_file.open(
                os.path.join(prefix, 'META-INF', 'MANIFEST.MF'))

            control_data_value = control_data_file.read().decode('utf-8')
            control_data_file.close()
            compressed_file.close()
            archive_file.close()
            element.extra_data = control_data_value
            control_data = parse_control_data(control_data_value,
                                              continue_line=' ')
            for key, attr in (
                ('Bundle-SymbolicName', 'name'),
                ('Bundle-Version', 'version'),
                ('Implementation-Title', 'archive'),
                ('Implementation-Version', 'version'),
                ('Name', 'name'),
            ):
                if key in control_data:  # archive : PackageName, name : Organization Name
                    setattr(element, attr, control_data.get(key, ''))
Exemplo n.º 6
0
    def update_element(self, element):
        """
        Extract some informations from element to prepare the repository
        :param element: Element to add to the repository
        :return: Unicode string containing meta-data

        ar -x control.tar.gz
        tar -xf control.tar.gz control
        """
        archive_file = storage(settings.STORAGE_ARCHIVE).get_file(
            element.archive_key)
        ar_file = ArFile(element.filename, mode='r', fileobj=archive_file)
        control_file, control_file_name = self.get_subfile(
            ar_file, 'control.tar.')
        if control_file is None:
            raise InvalidRepositoryException(
                'No control file found in .deb package')
        mode = 'r:*'
        if control_file_name.endswith('.xz') or control_file_name.endswith(
                '.lzma'):
            control_file_content = control_file.read()
            control_file_content_uncompressed = lzma.decompress(
                control_file_content)
            control_file.close()
            control_file = io.BytesIO(control_file_content_uncompressed)
            mode = 'r'
        tar_file = tarfile.open(name='control',
                                mode=mode,
                                fileobj=control_file)
        control_data = tar_file.extractfile('./control')
        # poulating different informations on the element
        control_data_value = control_data.read().decode('utf-8')
        control_data.close()
        tar_file.close()
        ar_file.close()
        archive_file.close()
        element.extra_data = control_data_value
        parsed_data = parse_control_data(control_data_value)
        element.archive = parsed_data['Package']
        element.version = parsed_data['Version']
        element.official_link = parsed_data.get('Homepage', '')
        element.long_description = parsed_data.get('Description', '')
Exemplo n.º 7
0
    def generate_indexes(self, repository, states=None, validity=365):
        default_architectures = {'amd64', }
        uid = self.storage_uid % repository.id
        repo_slug = repository.slug
        root_url = reverse('repository:%s:index' % self.archive_type, kwargs={'rid': repository.id, })
        if repository.is_private:
            root_url = 'authb-%s' % root_url
        if states is None:
            states = list(ArchiveState.objects.filter(repository=repository).order_by('name'))
        states = [state for state in states if
                  Element.objects.filter(repository=repository, states=state).count() > 0]

        all_states_architectures = set()
        all_states = set()
        open_files = {}
        complete_file_list = {}
        root = 'dists/%(repo)s/' % {'repo': repo_slug}
        # list all available architectures (required to add architecture-independent packages to all archs)
        for element in Element.objects.filter(repository=repository):
            control_data = parse_control_data(element.extra_data)
            architecture = control_data.get('Architecture', 'all')
            all_states_architectures.add(architecture)
            # build the following files:
        #   * dists/(group)/(state)/binary-(architecture)/Packages
        #   * dists/(group)/(state)/binary-(architecture)/Release
        # prepare data for:
        #   * dists/(group)/Contents-(architecture)
        if not all_states_architectures or all_states_architectures == {'all'}:
            all_states_architectures = default_architectures
        for state in states:
            state_architectures = set()
            all_states.add(state.name)
            for element in Element.objects.filter(repository=repository, states=state).order_by('filename'):
                control_data = parse_control_data(element.extra_data)
                architecture = control_data.get('Architecture', 'all')
                section = control_data.get('Section', 'contrib')
                package_file_list = ["%- 100s%s\n" % (x, section) for x in self.file_list(element, uid)]
                if architecture == 'all':
                    elt_architectures = default_architectures
                else:
                    elt_architectures = {architecture, }
                state_architectures |= elt_architectures
                for architecture in elt_architectures:
                    complete_file_list.setdefault(architecture, [])
                    complete_file_list[architecture] += package_file_list

                    filename = 'dists/%(repo)s/%(state)s/binary-%(architecture)s/Packages' % {
                        'repo': repo_slug, 'state': state.name, 'architecture': architecture, }
                    if filename not in open_files:
                        open_files[filename] = tempfile.TemporaryFile(mode='w+b', dir=settings.FILE_UPLOAD_TEMP_DIR)
                    package_file = open_files[filename]
                    package_file.write(element.extra_data.encode('utf-8'))
                    for key, attr in (('MD5sum', 'md5'), ('SHA1', 'sha1'), ('SHA256', 'sha256'),
                                      ('Size', 'filesize')):
                        if key not in control_data:
                            package_file.write("{0}: {1}\n".format(key, getattr(element, attr)).encode('utf-8'))
                    package_url = reverse('repository:%s:get_file' % self.archive_type,
                                          kwargs={'rid': repository.id, 'repo_slug': repo_slug,
                                                  'filename': element.filename, 'state_slug': state.slug,
                                                  'folder': element.filename[0:1], })
                    package_url = os.path.relpath(package_url, root_url)
                    package_file.write("Filename: {0}\n".format(package_url).encode('utf-8'))
                    package_file.write("\n".encode('utf-8'))
            if len(state_architectures) == 0:
                state_architectures = default_architectures
                # we process elements
            for architecture in state_architectures:
                filename = 'dists/%(repo)s/%(state)s/binary-%(architecture)s/Release' % {
                    'repo': repo_slug, 'state': state.slug, 'architecture': architecture,
                }
                open_files[filename] = tempfile.TemporaryFile(mode='w+b', dir=settings.FILE_UPLOAD_TEMP_DIR)
                content = render_to_string('repositories/aptitude/architecture_release.txt',
                                           {'architecture': architecture, 'repository': repository, 'state': state, })
                open_files[filename].write(content.encode('utf-8'))
            # build the following files:
        #   * dists/(group)/Contents-(architecture)
        for architecture, file_list in complete_file_list.items():
            file_list.sort()
            filename = 'dists/%(repo)s/Contents-%(architecture)s' % {'repo': repo_slug,
                                                                     'architecture': architecture, }
            open_files[filename] = tempfile.TemporaryFile(mode='w+b', dir=settings.FILE_UPLOAD_TEMP_DIR)
            for info in file_list:
                open_files[filename].write(info.encode('utf-8'))
            # build the following files:
        #   * dists/(group)/Contents-(architecture).gz/.bz2/.xz
        #   * dists/(group)/(state)/binary-(architecture)/Packages.gz/.bz2/.xz
        #   * dists/(group)/(state)/binary-(architecture)/Release.gz/.bz2/.xz
        # store all files in the cache
        hash_controls = self.compress_files(open_files, root, uid)
        #   * dists/(group)/Release
        # store all files in the cache
        release_file = tempfile.TemporaryFile(mode='w+b', dir=settings.FILE_UPLOAD_TEMP_DIR)
        now = datetime.datetime.now(utc)
        now_str = now.strftime('%a, %d %b %Y %H:%M:%S UTC')  # 'Mon, 29 Nov 2010 08:12:51 UTC'
        until = (now + datetime.timedelta(validity)).strftime('%a, %d %b %Y %H:%M:%S UTC')
        content = render_to_string('repositories/aptitude/state_release.txt',
                                   {'architectures': all_states_architectures, 'until': until,
                                    'states': all_states, 'repository': repository, 'date': now_str})
        release_file.write(content.encode('utf-8'))
        for hash_value, index in (('MD5Sum', 1), ('SHA1', 2), ('SHA256', 3)):
            release_file.write("{0}:\n".format(hash_value).encode('utf-8'))
            for line in hash_controls:
                release_file.write((" %s % 8d %s\n" % (line[index], line[4], line[0])).encode('utf-8'))
        release_file.flush()
        release_file.seek(0)
        filename = 'dists/%(repo)s/Release' % {'repo': repo_slug, }
        storage(settings.STORAGE_CACHE).store_descriptor(uid, filename, release_file)
        # build the following files:
        #   * dists/(group)/Release.gpg
        # store all files in the cache
        release_file.seek(0)
        signature_content = GPGSigner().sign_file(release_file, detach=True)
        release_file.seek(0)
        inrelease_content = GPGSigner().sign_file(release_file, detach=False)
        release_file.close()

        signature_file = tempfile.TemporaryFile(mode='w+b', dir=settings.FILE_UPLOAD_TEMP_DIR)
        signature_file.write(signature_content.encode('utf-8'))
        signature_file.flush()
        signature_file.seek(0)
        filename = 'dists/%(repo)s/Release.gpg' % {'repo': repo_slug, }
        storage(settings.STORAGE_CACHE).store_descriptor(uid, filename, signature_file)
        signature_file.close()

        inrelease_file = tempfile.TemporaryFile(mode='w+b', dir=settings.FILE_UPLOAD_TEMP_DIR)
        inrelease_file.write(inrelease_content.encode('utf-8'))
        inrelease_file.flush()
        inrelease_file.seek(0)
        filename = 'dists/%(repo)s/InRelease' % {'repo': repo_slug, }
        storage(settings.STORAGE_CACHE).store_descriptor(uid, filename, inrelease_file)
        inrelease_file.close()
Exemplo n.º 8
0
    def generate_indexes(self, repository, states=None, validity=365):
        default_architectures = {
            'amd64',
        }
        uid = self.storage_uid % repository.id
        repo_slug = repository.slug
        root_url = reverse('%s:index' % self.archive_type,
                           kwargs={
                               'rid': repository.id,
                           })
        if repository.is_private:
            root_url = 'authb-%s' % root_url
        if states is None:
            states = list(
                ArchiveState.objects.filter(
                    repository=repository).order_by('name'))
        states = [
            state for state in states if Element.objects.filter(
                repository=repository, states=state).count() > 0
        ]

        all_states_architectures = set()
        all_states = set()
        open_files = {}
        complete_file_list = {}
        root = 'dists/%(repo)s/' % {'repo': repo_slug}
        # list all available architectures (required to add architecture-independent packages to all archs)
        for element in Element.objects.filter(repository=repository):
            control_data = parse_control_data(element.extra_data)
            architecture = control_data.get('Architecture', 'all')
            all_states_architectures.add(architecture)
            # build the following files:
        #   * dists/(group)/(state)/binary-(architecture)/Packages
        #   * dists/(group)/(state)/binary-(architecture)/Release
        # prepare data for:
        #   * dists/(group)/Contents-(architecture)
        if not all_states_architectures or all_states_architectures == {'all'}:
            all_states_architectures = default_architectures
        for state in states:
            state_architectures = set()
            all_states.add(state.name)
            for element in Element.objects.filter(
                    repository=repository, states=state).order_by('filename'):
                control_data = parse_control_data(element.extra_data)
                architecture = control_data.get('Architecture', 'all')
                section = control_data.get('Section', 'contrib')
                package_file_list = [
                    "%- 100s%s\n" % (x, section)
                    for x in self.file_list(element, uid)
                ]
                if architecture == 'all':
                    elt_architectures = default_architectures
                else:
                    elt_architectures = {
                        architecture,
                    }
                state_architectures |= elt_architectures
                for architecture in elt_architectures:
                    complete_file_list.setdefault(architecture, [])
                    complete_file_list[architecture] += package_file_list

                    filename = 'dists/%(repo)s/%(state)s/binary-%(architecture)s/Packages' % {
                        'repo': repo_slug,
                        'state': state.name,
                        'architecture': architecture,
                    }
                    if filename not in open_files:
                        open_files[filename] = tempfile.TemporaryFile(
                            mode='w+b', dir=settings.TEMP_ROOT)
                    package_file = open_files[filename]
                    package_file.write(element.extra_data.encode('utf-8'))
                    for key, attr in (('MD5sum', 'md5'), ('SHA1', 'sha1'),
                                      ('SHA256', 'sha256'), ('Size',
                                                             'filesize')):
                        if key not in control_data:
                            package_file.write("{0}: {1}\n".format(
                                key, getattr(element, attr)).encode('utf-8'))
                    package_url = reverse('%s:get_file' % self.archive_type,
                                          kwargs={
                                              'rid': repository.id,
                                              'repo_slug': repo_slug,
                                              'filename': element.filename,
                                              'state_slug': state.slug,
                                              'folder': element.filename[0:1],
                                          })
                    package_url = os.path.relpath(package_url, root_url)
                    package_file.write(
                        "Filename: {0}\n".format(package_url).encode('utf-8'))
                    package_file.write("\n".encode('utf-8'))
            if len(state_architectures) == 0:
                state_architectures = default_architectures
                # we process elements
            for architecture in state_architectures:
                filename = 'dists/%(repo)s/%(state)s/binary-%(architecture)s/Release' % {
                    'repo': repo_slug,
                    'state': state.slug,
                    'architecture': architecture,
                }
                open_files[filename] = tempfile.TemporaryFile(
                    mode='w+b', dir=settings.TEMP_ROOT)
                content = render_to_string(
                    'repositories/aptitude/architecture_release.txt', {
                        'architecture': architecture,
                        'repository': repository,
                        'state': state,
                    })
                open_files[filename].write(content.encode('utf-8'))
            # build the following files:
        #   * dists/(group)/Contents-(architecture)
        for architecture, file_list in complete_file_list.items():
            file_list.sort()
            filename = 'dists/%(repo)s/Contents-%(architecture)s' % {
                'repo': repo_slug,
                'architecture': architecture,
            }
            open_files[filename] = tempfile.TemporaryFile(
                mode='w+b', dir=settings.TEMP_ROOT)
            open_files[filename].write(
                render_to_string('repositories/aptitude/contents.txt').encode(
                    'utf-8'))
            for info in file_list:
                open_files[filename].write(info.encode('utf-8'))
            # build the following files:
        #   * dists/(group)/Contents-(architecture).gz/.bz2/.xz
        #   * dists/(group)/(state)/binary-(architecture)/Packages.gz/.bz2/.xz
        #   * dists/(group)/(state)/binary-(architecture)/Release.gz/.bz2/.xz
        # store all files in the cache
        hash_controls = self.compress_files(open_files, root, uid)
        #   * dists/(group)/Release
        # store all files in the cache
        release_file = tempfile.TemporaryFile(mode='w+b',
                                              dir=settings.TEMP_ROOT)
        now = datetime.datetime.now(tz)
        now_str = now.strftime(
            '%a, %d %b %Y %H:%M:%S %z')  # 'Mon, 29 Nov 2010 08:12:51 UTC'
        until = (
            now +
            datetime.timedelta(validity)).strftime('%a, %d %b %Y %H:%M:%S %z')
        content = render_to_string(
            'repositories/aptitude/state_release.txt', {
                'architectures': all_states_architectures,
                'until': until,
                'states': all_states,
                'repository': repository,
                'date': now_str
            })
        release_file.write(content.encode('utf-8'))
        for hash_value, index in (('MD5Sum', 1), ('SHA1', 2), ('SHA256', 3)):
            release_file.write("{0}:\n".format(hash_value).encode('utf-8'))
            for line in hash_controls:
                release_file.write(
                    (" %s % 8d %s\n" %
                     (line[index], line[4], line[0])).encode('utf-8'))
        release_file.flush()
        release_file.seek(0)
        filename = 'dists/%(repo)s/Release' % {
            'repo': repo_slug,
        }
        storage(settings.STORAGE_CACHE).store_descriptor(
            uid, filename, release_file)
        # build the following files:
        #   * dists/(group)/Release.gpg
        # store all files in the cache
        release_file.seek(0)
        signature = GPGSigner().sign_file(release_file)
        release_file.close()

        gpg_file = tempfile.TemporaryFile(mode='w+b', dir=settings.TEMP_ROOT)
        gpg_file.write(signature.encode('utf-8'))
        gpg_file.flush()
        gpg_file.seek(0)
        filename = 'dists/%(repo)s/Release.gpg' % {
            'repo': repo_slug,
        }
        storage(settings.STORAGE_CACHE).store_descriptor(
            uid, filename, gpg_file)
        gpg_file.close()