Esempio n. 1
0
    def calculate_hashes(self):
        def write_archive_hash(urls, file_name):
            name, version = utils.parse_pkgname(file_name)
            for url in utils.parse_url_tag(urls, name, version):
                archive_name = os.path.basename(url)
                archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name)
                if not os.access(archive_path, os.F_OK):
                    fetcher.URLFetcher().run([url])
                sha1 = utils.sha1sum(archive_path)
                shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))

        excepts = ('hashes')
        shelltools.remove_file("hashes")
        if len(self.files) == 0:
            self.files = os.listdir(self.current_dir)

        for f in self.files:
            if f in excepts:
                continue
            if f.endswith(cst.spec_suffix):
                out.normal("processing %s" % f)
                shelltools.echo("hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f)))
                content = utils.import_script(f)
                if "src_url" in utils.metadata_parser(content["metadata"]).keys():
                    write_archive_hash(utils.metadata_parser(content["metadata"])["src_url"], f)
                elif "src_url" in content.keys():
                    write_archive_hash(content["src_url"], f)
                else:
                    lpms.terminate("src_url was not defined in spec")
                del content
            elif os.path.isdir(f):
                for l in os.listdir(os.path.join(self.current_dir, f)):
                    path = os.path.join(f, l)
                    out.normal("processing %s" % path)
                    shelltools.echo("hashes", "%s %s %s" %  (path, utils.sha1sum(path), os.path.getsize(path)))
Esempio n. 2
0
 def import_repo_news(self, repo):
     '''Imports news of given repository'''
     my_news_dir = os.path.join(cst.repos, repo, cst.news_dir)
     if not os.path.isdir(my_news_dir):
         return
     
     for news in os.listdir(my_news_dir):
         local = utils.import_script(os.path.join(my_news_dir, news))
         try:
             metadata = utils.metadata_parser(local["metadata"], keys=metadata_keys)
         except IndexError:
             out.warn("Syntax errors found in %s" % os.path.join(my_news_dir, news))
             continue
         self.data.append((repo, metadata, local["message"]))
Esempio n. 3
0
    def import_repo_news(self, repo):
        '''Imports news of given repository'''
        my_news_dir = os.path.join(cst.repos, repo, cst.news_dir)
        if not os.path.isdir(my_news_dir):
            return

        for news in os.listdir(my_news_dir):
            local = utils.import_script(os.path.join(my_news_dir, news))
            try:
                metadata = utils.metadata_parser(local["metadata"],
                                                 keys=metadata_keys)
            except IndexError:
                out.warn("Syntax errors found in %s" %
                         os.path.join(my_news_dir, news))
                continue
            self.data.append((repo, metadata, local["message"]))
Esempio n. 4
0
    def calculate_hashes(self):
        def write_archive_hash(urls, file_name):
            name, version = utils.parse_pkgname(file_name)
            for url in utils.parse_url_tag(urls, name, version):
                archive_name = os.path.basename(url)
                archive_path = os.path.join(conf.LPMSConfig().src_cache,
                                            archive_name)
                if not os.access(archive_path, os.F_OK):
                    fetcher.URLFetcher().run([url])
                sha1 = utils.sha1sum(archive_path)
                shelltools.echo(
                    "hashes", "%s %s %s" %
                    (archive_name, sha1, os.path.getsize(archive_path)))

        excepts = ('hashes')
        shelltools.remove_file("hashes")
        if len(self.files) == 0:
            self.files = os.listdir(self.current_dir)

        for f in self.files:
            if f in excepts:
                continue
            if f.endswith(cst.spec_suffix):
                out.normal("processing %s" % f)
                shelltools.echo(
                    "hashes",
                    "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f)))
                content = utils.import_script(f)
                if "src_url" in utils.metadata_parser(
                        content["metadata"]).keys():
                    write_archive_hash(
                        utils.metadata_parser(content["metadata"])["src_url"],
                        f)
                elif "src_url" in content.keys():
                    write_archive_hash(content["src_url"], f)
                else:
                    lpms.terminate("src_url was not defined in spec")
                del content
            elif os.path.isdir(f):
                for l in os.listdir(os.path.join(self.current_dir, f)):
                    path = os.path.join(f, l)
                    out.normal("processing %s" % path)
                    shelltools.echo(
                        "hashes", "%s %s %s" %
                        (path, utils.sha1sum(path), os.path.getsize(path)))