def fetch(self, url): self.prefix.log("fetch:", url) f = util.retrieve_url(url, self.top_dir) if os.path.isfile(f): click.echo("Extracting archive {0} ...".format(f)) util.extract_ar(archive=f, dst=self.top_dir) return next(util.get_dirs(self.top_dir))
def fetch(self, url, hash=None): self.prefix.log("fetch:", url) f = util.retrieve_url(url, self.top_dir) if os.path.isfile(f): if hash and not util.check_hash(f, hash): raise util.BuildError("Hash doesn't match for {0}: {1}".format(url, hash)) click.echo("Extracting archive {0} ...".format(f)) util.extract_ar(archive=f, dst=self.top_dir) return next(util.get_dirs(self.top_dir))
def fetch(self, url, hash=None, copy=False, insecure=False): self.prefix.log("fetch:", url) if insecure: url = url.replace('https', 'http') f = util.retrieve_url(url, self.top_dir, copy=copy, insecure=insecure, hash=hash) if os.path.isfile(f): click.echo("Extracting archive {0} ...".format(f)) util.extract_ar(archive=f, dst=self.top_dir) return next(util.get_dirs(self.top_dir))
def fetch(self): fn = os.path.join(self.config.arch_root(), self.archive) if os.path.isfile(fn): click.echo('Using cached archive {0} '.format(fn)) else: try: click.echo('Fetching file {0} ...'.format(self.url)) urlretrieve(self.url, fn) click.echo('Ok.') except ConnectionError as e: click.echo(str(e)) sys.exit(2) temp_dir = os.path.abspath('temp') re_extract = False exists = os.path.isdir(temp_dir) if not exists or re_extract: click.echo("Extracting archive {0} ...".format(fn)) if exists: util.delete_dir(temp_dir) os.mkdir(temp_dir) util.extract_ar(archive=fn, dst=temp_dir) else: click.echo("Extracting skipped: using temporary") dirs = [ o for o in os.listdir(temp_dir) if os.path.isdir(os.path.join(temp_dir, o)) ] if len(dirs) != 1: raise Exception('Wrong dir count') temp_src_dir = os.path.join(temp_dir, dirs[0]) self.choose_builder(temp_src_dir) src_dir = self.src_dir() util.delete_dir(src_dir) os.rename(temp_src_dir, src_dir) util.delete_dir(temp_dir) self.stage = FETCHED self.save() click.echo("Sources placed to {0}".format(src_dir))
def fetch(self, url, fname, hash=None, copy=False, insecure=False, pkg=None): self.prefix.log("fetch:", url) if insecure: url = url.replace('https', 'http') if pkg is not None: f = os.path.join(self.arch_dir, pkg['archive']) if not os.path.isfile(f): f = util.retrieve_url(url, self.arch_dir, copy=copy, insecure=insecure, hash=hash) if os.path.isfile(f): click.echo("Extracting archive {0} ...".format(f)) temp_dir = os.path.abspath('temp') util.delete_dir(temp_dir) os.mkdir(temp_dir) util.extract_ar(archive=f, dst=temp_dir) dirs = [ o for o in os.listdir(temp_dir) if os.path.isdir(os.path.join(temp_dir, o)) ] if len(dirs) != 1: raise Exception('wrong count') util.delete_dir(os.path.join(self.src_dir, fname)) os.rename(os.path.join(temp_dir, dirs[0]), os.path.join(self.src_dir, fname)) util.delete_dir(temp_dir) return os.path.join(self.src_dir, fname) return next(util.get_dirs( self.top_dir)) # list of dirs dirs, found in top_dir