def fetch(self, url): self.prefix.log("fetch:", url) f = util.retrieve_url(url, self.top_dir) if os.path.isfile(f): click.echo("Extracting archive {0} ...".format(f)) util.extract_ar(archive=f, dst=self.top_dir) return next(util.get_dirs(self.top_dir))
def fetch(self, url, hash=None): self.prefix.log("fetch:", url) f = util.retrieve_url(url, self.top_dir) if os.path.isfile(f): if hash and not util.check_hash(f, hash): raise util.BuildError("Hash doesn't match for {0}: {1}".format(url, hash)) click.echo("Extracting archive {0} ...".format(f)) util.extract_ar(archive=f, dst=self.top_dir) return next(util.get_dirs(self.top_dir))
def fetch(self, url, hash=None, copy=False, insecure=False): self.prefix.log("fetch:", url) if insecure: url = url.replace('https', 'http') f = util.retrieve_url(url, self.top_dir, copy=copy, insecure=insecure, hash=hash) if os.path.isfile(f): click.echo("Extracting archive {0} ...".format(f)) util.extract_ar(archive=f, dst=self.top_dir) return next(util.get_dirs(self.top_dir))
def fetch(self, url, fname, hash=None, copy=False, insecure=False, pkg=None): self.prefix.log("fetch:", url) if insecure: url = url.replace('https', 'http') if pkg is not None: f = os.path.join(self.arch_dir, pkg['archive']) if not os.path.isfile(f): f = util.retrieve_url(url, self.arch_dir, copy=copy, insecure=insecure, hash=hash) if os.path.isfile(f): click.echo("Extracting archive {0} ...".format(f)) temp_dir = os.path.abspath('temp') util.delete_dir(temp_dir) os.mkdir(temp_dir) util.extract_ar(archive=f, dst=temp_dir) dirs = [ o for o in os.listdir(temp_dir) if os.path.isdir(os.path.join(temp_dir, o)) ] if len(dirs) != 1: raise Exception('wrong count') util.delete_dir(os.path.join(self.src_dir, fname)) os.rename(os.path.join(temp_dir, dirs[0]), os.path.join(self.src_dir, fname)) util.delete_dir(temp_dir) return os.path.join(self.src_dir, fname) return next(util.get_dirs( self.top_dir)) # list of dirs dirs, found in top_dir