Example #1
0
    def _writeArchMetadata(self, base_path, distro, arch, file_hashes,
                           file_sizes):
        dir_path = '{0}/{1}/binary-{2}'.format(base_path, self.component, arch)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        file_path = '{0}/binary-{1}/Release'.format(self.component, arch)
        full_path = os.path.join(base_path, file_path)
        wrk = open(full_path, 'w')
        wrk.write('Component: {0}\n'.format(self.component))
        wrk.write('Origin: Packrat\n')
        wrk.write('Label: {0}\n'.format(self.repo_description))
        wrk.write('Architecture: {0}\n'.format(arch))
        wrk.write('Description: {0} of {1}\n'.format(self.repo_description,
                                                     self.mirror_description))
        wrk.close()
        file_hashes[file_path] = hashFile(full_path)
        file_sizes[file_path] = os.path.getsize(full_path)

        file_path = '{0}/binary-{1}/Packages'.format(self.component, arch)
        full_path = os.path.join(base_path, file_path)
        wrk = open(full_path, 'w')
        try:
            filename_list = self.entry_list[distro][arch]
        except KeyError:
            filename_list = []
        for filename in filename_list:
            logging.debug('apt: Writing package "%s"', filename)
            (deb_path, sha1, sha256, md5, size, field_order,
             fields) = self.entry_list[distro][arch][filename]

            for field in field_order:
                if field in ('Filename', 'Size', 'SHA256', 'SHA1', 'MD5sum',
                             'Description'):
                    continue
                wrk.write('{0}: {1}\n'.format(field, fields[field]))

            wrk.write('Filename: {0}\n'.format(deb_path))
            wrk.write('Size: {0}\n'.format(size))
            wrk.write('SHA256: {0}\n'.format(sha256))
            wrk.write('SHA1: {0}\n'.format(sha1))
            wrk.write('MD5sum: {0}\n'.format(md5))
            wrk.write('Description: {0}\n'.format(fields['Description']))
            wrk.write('\n')

        wrk.close()
        file_hashes[file_path] = hashFile(full_path)
        file_sizes[file_path] = os.path.getsize(full_path)
Example #2
0
  def loadFile( self, filename, temp_file, distro, distro_version, arch ):
    if arch == 'all':
      arch = 'noarch'

    dir_path = '{0}/{1}/{2}/{3}/{4}'.format( self.root_dir, distro, self.component, distro_version, arch )
    if not os.path.exists( dir_path ):
        os.makedirs( dir_path )

    file_path = os.path.join( dir_path, filename )
    if self.gpg_key:
      logging.info( 'yum: signing "%s"', temp_file )
      if not rpm.addSign( path=temp_file, keyid=self.gpg_key ):
        raise Exception( 'Error Signing "{0}"'.format( temp_file ) )

#  the stock python3-rpm addSign is broken on bionic:
#  so, get and empty dir and:
#  apt source python3-rpm
#  apply patch from this bug: https://bugs.launchpad.net/ubuntu/+source/rpm/+bug/1776815
#  dpkg-buildpackage -b
#  cd ..
#  dpkg -i python3-rpm_4.14.1+dfsg1-2_amd64.deb
#  all set

    shutil.move( temp_file, file_path )
    ( _, sha256, _ ) = hashFile( file_path )
    return sha256
Example #3
0
    def addEntry(self, type, filename, distro, distro_version, arch):
        logging.debug('json: Got Entry for package: "%s" arch: "%s"', filename,
                      arch)
        (package, _) = _splitFileName(filename)
        file_path = os.path.join(package, filename)
        full_file_path = os.path.join(self.root_dir, file_path)
        size = os.path.getsize(full_file_path)
        (_, sha256, _) = hashFile(full_file_path)

        if arch not in self.entry_list:
            self.entry_list[arch] = {}

        self.entry_list[arch][filename] = (file_path, type, sha256, size)
Example #4
0
  def addEntry( self, type, filename, distro, distro_version, arch ):
    if type != 'python':
      logging.warning( 'pypi: New entry not a python, skipping...' )
      return

    logging.debug( 'pypi: Got Entry for package: "%s"', filename )
    ( simple_dir, _ ) = filename.split( '-', 1 )
    package_dir = simple_dir[ 0:6 ]
    package_path = '{0}/packages/{1}/{2}'.format( self.root_dir, package_dir, filename )
    ( _, _, md5 ) = hashFile( package_path )

    try:
      self.entry_list[ simple_dir ][ filename ] = ( package_path, md5 )
    except KeyError:
      self.entry_list[ simple_dir ] = { filename: ( package_path, md5 ) }
Example #5
0
    def addEntry(self, type, filename, distro, distro_version, arch):
        if type != 'deb':
            logging.warning('apt: New entry not a deb, skipping...')
            return

        if distro != 'debian':
            logging.warning('apt: Not a debian distro, skipping...')
            return

        if distro_version not in self.entry_list:
            self.entry_list[distro_version] = {}
            for tmp in self.arch_list:
                self.entry_list[distro_version][tmp] = {}

        logging.debug(
            'apt: Got Entry for package: "%s" arch: "%s" distro: "%s"',
            filename, arch, distro_version)
        (pool_dir, _) = filename.split('_', 1)
        pool_dir = pool_dir[0:6]
        deb_path = 'pool/{0}/{1}'.format(pool_dir, filename)
        full_deb_path = os.path.join(self.root_dir, deb_path)
        deb = Deb(full_deb_path)
        (field_order, fields) = deb.getControlFields()

        if arch == 'x86_64':
            arch = 'amd64'

        if arch != fields['Architecture']:
            logging.warning('apt: New entry arch mismatched, skipping...')
            return

        if fields['Architecture'] == 'i386':
            arch_list = ('i386', )
        elif fields['Architecture'] == 'amd64':
            arch_list = ('amd64', )
        elif fields['Architecture'] == 'all':
            arch_list = ('i386', 'amd64')

        size = os.path.getsize(full_deb_path)
        (sha1, sha256, md5) = hashFile(full_deb_path)
        for arch in arch_list:
            self.entry_list[distro_version][arch][filename] = (deb_path, sha1,
                                                               sha256, md5,
                                                               size,
                                                               field_order,
                                                               fields)
Example #6
0
    def checkFiles(
        self, file_list
    ):  # checks sha256 and file existance, removes file and entry upon problem
        for (filename, distro, distro_version, arch, sha256,
             signed256) in file_list:
            file_path_list = self.manager.filePaths(filename, distro,
                                                    distro_version, arch)

            if len(file_path_list) > 1:
                return  # don't have support for checking sha256 of origional file  if that file  has been split up yet

            file_path = file_path_list[0]
            (_, file_sha256, _) = hashFile(file_path)
            if file_sha256 is None:  # file dosen't exist, no point trying to delete it
                continue

            if signed256 is not None:
                sha256 = signed256

            if sha256 != file_sha256:
                logging.info(
                    'libRepo: hash for "%s" is "%s" expected "%s", removing.',
                    file_path, file_sha256, sha256)
                logging.debug(
                    'libRepo: Acquiring update lock for repo during checkFiles-bad file removal "%s"',
                    self.name)
                self.update_lock.acquire()
                self.manager.removeEntry(filename, distro, distro_version,
                                         arch)
                if file_sha256 is not None:
                    os.unlink(file_path)

                self.update_lock.release()
                logging.debug(
                    'libRepo: Released update lock for repo during checkFiles-bad file removal "%s"',
                    self.name)
Example #7
0
  def _writeArchMetadata( self, base_path, distro, distro_version ):
    timestamp = int( time.time() )
    repo_files = []
    dir_path = '{0}/repodata'.format( base_path )
    if not os.path.exists( dir_path ):
      os.makedirs( dir_path )

    try:
      filename_list = self.entry_list[ distro ][ distro_version ]
    except KeyError:
      filename_list = []

    other_full_path = '{0}/other.xml'.format( dir_path )
    other_fd = open( other_full_path, 'w' )
    other_fd.write( '<?xml version="1.0" encoding="UTF-8"?>\n' )
    other_fd.write( '<otherdata xmlns="http://linux.duke.edu/metadata/other" packages="{0}">\n'.format( len( filename_list ) ) )

    filelists_full_path = '{0}/filelists.xml'.format( dir_path )
    filelists_fd = open( filelists_full_path, 'w' )
    filelists_fd.write( '<?xml version="1.0" encoding="UTF-8"?>\n' )
    filelists_fd.write( '<filelists xmlns="http://linux.duke.edu/metadata/filelists" packages="{0}">\n'.format( len( filename_list ) ) )

    primary_full_path = '{0}/primary.xml'.format( dir_path )
    primary_fd = open( primary_full_path, 'w' )
    primary_fd.write( '<?xml version="1.0" encoding="UTF-8"?>\n' )
    primary_fd.write( '<metadata packages="{0}" xmlns="http://linux.duke.edu/metadata/common" xmlns:rpm="http://linux.duke.edu/metadata/rpm">\n'.format( len( filename_list ) ) )

    for filename in filename_list:
      ( full_rpm_path, arch ) = self.entry_list[ distro ][ distro_version ][ filename ]
      pkg = YumLocalPackage( filename=full_rpm_path, relpath='{0}/{1}'.format( arch, filename ) )
      other_fd.write( pkg.xml_dump_other_metadata() )
      filelists_fd.write( pkg.xml_dump_filelists_metadata() )
      primary_fd.write( pkg.xml_dump_primary_metadata() )

    other_fd.write( '</otherdata>\n' )
    other_fd.close()
    filelists_fd.write( '</filelists>\n' )
    filelists_fd.close()
    primary_fd.write( '</metadata>\n' )
    primary_fd.close()

    ( sha1orig, sha256orig, md5orig ) = hashFile( other_full_path )
    ( sha1, sha256, md5 ) = hashFile( other_full_path )  # techincially the .gzed one
    repo_files.append( { 'type': 'other', 'href': 'other.xml', 'checksum': sha256, 'open-checksum': sha256orig } )

    ( sha1orig, sha256orig, md5orig ) = hashFile( filelists_full_path )
    ( sha1, sha256, md5 ) = hashFile( filelists_full_path )
    repo_files.append( { 'type': 'filelists', 'href': 'filelists.xml', 'checksum': sha256, 'open-checksum': sha256orig } )

    ( sha1orig, sha256orig, md5orig ) = hashFile( primary_full_path )
    ( sha1, sha256, md5 ) = hashFile( primary_full_path )
    repo_files.append( { 'type': 'primary', 'href': 'primary.xml', 'checksum': sha256, 'open-checksum': sha256orig } )

    repomod_full_path = '{0}/repomd.xml'.format( dir_path )
    repomod_fd = open( repomod_full_path, 'w' )
    repomod_fd.write( '<?xml version="1.0" encoding="UTF-8"?>\n' )
    repomod_fd.write( '<repomd xmlns="http://linux.duke.edu/metadata/repo">\n' )
    for file in repo_files:
      repomod_fd.write( '  <data type="{0}">\n'.format( file[ 'type' ] ) )
      repomod_fd.write( '    <location href="repodata/{0}"/>\n'.format( file[ 'href' ] ) )
      repomod_fd.write( '    <timestamp>{0}</timestamp>\n'.format( timestamp ) )
      repomod_fd.write( '    <checksum type="sha256">{0}</checksum>\n'.format( file[ 'checksum' ] ) )
      repomod_fd.write( '    <open-checksum type="sha256">{0}</open-checksum>\n'.format( file[ 'open-checksum' ] ) )
      repomod_fd.write( '  </data>\n' )
    repomod_fd.write( '</repomd>\n' )
    repomod_fd.close()