def calculate_hashes(self): def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path))) excepts = ('hashes') shelltools.remove_file("hashes") if len(self.files) == 0: self.files = os.listdir(self.current_dir) for f in self.files: if f in excepts: continue if f.endswith(cst.spec_suffix): out.normal("processing %s" % f) shelltools.echo("hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f))) content = utils.import_script(f) if "src_url" in utils.metadata_parser(content["metadata"]).keys(): write_archive_hash(utils.metadata_parser(content["metadata"])["src_url"], f) elif "src_url" in content.keys(): write_archive_hash(content["src_url"], f) else: lpms.terminate("src_url was not defined in spec") del content elif os.path.isdir(f): for l in os.listdir(os.path.join(self.current_dir, f)): path = os.path.join(f, l) out.normal("processing %s" % path) shelltools.echo("hashes", "%s %s %s" % (path, utils.sha1sum(path), os.path.getsize(path)))
def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))
def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo( "hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))
def calculate_hashes(self): def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo( "hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path))) excepts = ('hashes') shelltools.remove_file("hashes") if len(self.files) == 0: self.files = os.listdir(self.current_dir) for f in self.files: if f in excepts: continue if f.endswith(cst.spec_suffix): out.normal("processing %s" % f) shelltools.echo( "hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f))) content = utils.import_script(f) if "src_url" in utils.metadata_parser( content["metadata"]).keys(): write_archive_hash( utils.metadata_parser(content["metadata"])["src_url"], f) elif "src_url" in content.keys(): write_archive_hash(content["src_url"], f) else: lpms.terminate("src_url was not defined in spec") del content elif os.path.isdir(f): for l in os.listdir(os.path.join(self.current_dir, f)): path = os.path.join(f, l) out.normal("processing %s" % path) shelltools.echo( "hashes", "%s %s %s" % (path, utils.sha1sum(path), os.path.getsize(path)))
def echo(content, target): shelltools.echo(content, fix_target_path(target))