def download(self, ud, d): """Fetch urls""" fetchcmd = self.basecmd if 'downloadfilename' in ud.parm: dldir = d.getVar("DL_DIR", True) bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) fetchcmd += " -O " + dldir + os.sep + ud.localfile uri = ud.url.split(";")[0] if os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri) else: fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri) self._runwget(ud, d, fetchcmd, False) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): raise FetchError( "The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) if os.path.getsize(ud.localpath) == 0: os.remove(ud.localpath) raise FetchError( "The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) return True
def _npm_view(): args = [] args.append(("json", "true")) args.append(("registry", ud.registry)) pkgver = shlex.quote(ud.package + "@" + ud.version) cmd = ud.basecmd + " view %s" % pkgver env = NpmEnvironment(d) check_network_access(d, cmd, ud.registry) view_string = env.run(cmd, args=args) if not view_string: raise FetchError("Unavailable package %s" % pkgver, ud.url) try: view = json.loads(view_string) error = view.get("error") if error is not None: raise FetchError(error.get("summary"), ud.url) if ud.version == "latest": bb.warn("The npm package %s is using the latest " \ "version available. This could lead to " \ "non-reproducible builds." % pkgver) elif ud.version != view.get("version"): raise ParameterError("Invalid 'version' parameter", ud.url) return view except Exception as e: raise FetchError("Invalid view from npm: %s" % str(e), ud.url)
def download(self, ud, d, retries=3): """Fetch urls""" # If were reaching the account transaction limit we might be refused a connection, # retrying allows us to avoid false negatives since the limit changes over time fetchcmd = self.basecmd + ' --retry-connrefused --waitretry=5' # We need to provide a localpath to avoid wget using the SAS # ud.localfile either has the downloadfilename or ud.path localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) bb.utils.mkdirhier(os.path.dirname(localpath)) fetchcmd += " -O %s" % shlex.quote(localpath) if ud.user and ud.pswd: fetchcmd += " --user=%s --password=%s --auth-no-challenge" % ( ud.user, ud.pswd) # Check if a Shared Access Signature was given and use it az_sas = d.getVar('AZ_SAS') if az_sas: azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas) else: azuri = '%s%s%s' % ('https://', ud.host, ud.path) if os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again. fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri) else: fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri) try: self._runwget(ud, d, fetchcmd, False) except FetchError as e: # Azure fails on handshake sometimes when using wget after some stress, producing a # FetchError from the fetcher, if the artifact exists retyring should succeed if 'Unable to establish SSL connection' in str(e): logger.debug2( 'Unable to establish SSL connection: Retries remaining: %s, Retrying...' % retries) self.download(ud, d, retries - 1) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): raise FetchError( "The fetch command returned success for url %s but %s doesn't exist?!" % (azuri, ud.localpath), azuri) if os.path.getsize(ud.localpath) == 0: os.remove(ud.localpath) raise FetchError( "The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (azuri), azuri) return True
def _buildp4command(self, ud, d, command, depot_filename=None): """ Build a p4 commandline. Valid commands are "changes", "print", and "files". depot_filename is the full path to the file in the depot including the trailing '#rev' value. """ p4opt = "" if ud.user: p4opt += ' -u "%s"' % (ud.user) if ud.pswd: p4opt += ' -P "%s"' % (ud.pswd) if ud.host and not ud.usingp4config: p4opt += ' -p %s' % (ud.host) if hasattr(ud, 'revision') and ud.revision: pathnrev = '%s@%s' % (ud.path, ud.revision) else: pathnrev = '%s' % (ud.path) if depot_filename: if ud.keepremotepath: # preserve everything, remove the leading // filename = depot_filename.lstrip('/') elif ud.module: # remove everything up to the module path modulepath = ud.module.rstrip('/...') filename = depot_filename[depot_filename.rfind(modulepath):] elif ud.pathisdir: # Remove leading (visible) path to obtain the filepath filename = depot_filename[len(ud.path) - 1:] else: # Remove everything, except the filename filename = depot_filename[depot_filename.rfind('/'):] filename = filename[:filename.find('#')] # Remove trailing '#rev' if command == 'changes': p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev) elif command == 'print': if depot_filename is not None: p4cmd = '%s%s print -o "p4/%s" "%s"' % ( ud.basecmd, p4opt, filename, depot_filename) else: raise FetchError( 'No depot file name provided to p4 %s' % command, ud.url) elif command == 'files': p4cmd = '%s%s files //%s' % (ud.basecmd, p4opt, pathnrev) else: raise FetchError('Invalid p4 command %s' % command, ud.url) return p4cmd
def download(self, ud, d): """Fetch url""" jsondepobj = {} shrinkobj = {} lockdown = {} if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): dest = d.getVar("DL_DIR") bb.utils.mkdirhier(dest) runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) return if ud.parm.get("noverify", None) != '1': shwrf = d.getVar('NPM_SHRINKWRAP') logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) if shwrf: try: with open(shwrf) as datafile: shrinkobj = json.load(datafile) except Exception as e: raise FetchError( 'Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) elif not ud.ignore_checksums: logger.warning( 'Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) lckdf = d.getVar('NPM_LOCKDOWN') logger.debug(2, "NPM lockdown file is %s" % lckdf) if lckdf: try: with open(lckdf) as datafile: lockdown = json.load(datafile) except Exception as e: raise FetchError( 'Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e))) elif not ud.ignore_checksums: logger.warning( 'Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname) if ('name' not in shrinkobj): self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud) else: self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj) with open(ud.localpath, 'w') as outfile: json.dump(jsondepobj, outfile)
def download(self, ud, d): """Fetch urls""" fetchcmd = self.basecmd localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp" bb.utils.mkdirhier(os.path.dirname(localpath)) fetchcmd += " -O %s" % shlex.quote(localpath) if ud.user and ud.pswd: fetchcmd += " --auth-no-challenge" if ud.parm.get("redirectauth", "1") == "1": # An undocumented feature of wget is that if the # username/password are specified on the URI, wget will only # send the Authorization header to the first host and not to # any hosts that it is redirected to. With the increasing # usage of temporary AWS URLs, this difference now matters as # AWS will reject any request that has authentication both in # the query parameters (from the redirect) and in the # Authorization header. fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd) uri = ud.url.split(";")[0] if os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri) else: fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri) self._runwget(ud, d, fetchcmd, False) # Remove the ".tmp" and move the file into position atomically # Our lock prevents multiple writers but mirroring code may grab incomplete files os.rename(localpath, localpath[:-4]) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): raise FetchError( "The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) if os.path.getsize(ud.localpath) == 0: os.remove(ud.localpath) raise FetchError( "The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) return True
def _buildosccommand(self, ud, d, command): """ Build up an ocs commandline based on ud command is "fetch", "update", "info" """ basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc" proto = ud.parm.get('protocol', 'https') options = [] config = "-c %s" % self.generate_config(ud, d) if getattr(ud, 'revision', ''): options.append("-r %s" % ud.revision) coroot = self._strip_leading_slashes(ud.path) if command == "fetch": osccmd = "%s %s -A %s://%s co %s/%s %s" % ( basecmd, config, proto, ud.host, coroot, ud.module, " ".join(options)) elif command == "update": osccmd = "%s %s -A %s://%s up %s" % (basecmd, config, proto, ud.host, " ".join(options)) elif command == "api_source": osccmd = "%s %s -A %s://%s api source/%s/%s" % ( basecmd, config, proto, ud.host, coroot, ud.module) else: raise FetchError("Invalid osc command %s" % command, ud.url) return osccmd
def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, True) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, True) else: fetchcmd = data.getVar("FETCHCOMMAND", d, True) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError( "The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
def _buildosccommand(self, ud, d, command): """ Build up an ocs commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_osc}', d) proto = ud.parm.get('protocol', 'ocs') options = [] config = "-c %s" % self.generate_config(ud, d) if ud.revision: options.append("-r %s" % ud.revision) coroot = self._strip_leading_slashes(ud.path) if command == "fetch": osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) elif command == "update": osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) else: raise FetchError("Invalid osc command %s" % command, ud.url) return osccmd
def _buildbzrcommand(self, ud, d, command): """ Build up an bzr commandline based on ud command is "fetch", "update", "revno" """ basecmd = data.expand('${FETCHCMD_bzr}', d) proto = ud.parm.get('protocol', 'http') bzrroot = ud.host + ud.path options = [] if command == "revno": bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) else: if ud.revision: options.append("-r %s" % ud.revision) if command == "fetch": bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) elif command == "update": bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) else: raise FetchError("Invalid bzr command %s" % command, ud.url) return bzrcmd
def download(self, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: logger.error() raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup = [tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def _build_ccase_command(self, ud, command): """ Build up a commandline based on ud command is: mkview, setcs, rmview """ options = [] if "rcleartool" in ud.basecmd: options.append("-server %s" % ud.server) basecmd = "%s %s" % (ud.basecmd, command) if command == 'mkview': if not "rcleartool" in ud.basecmd: # Cleartool needs a -snapshot view options.append("-snapshot") options.append("-tag %s" % ud.viewname) options.append(ud.viewdir) elif command == 'rmview': options.append("-force") options.append("%s" % ud.viewdir) elif command == 'setcs': options.append("-overwrite") options.append(ud.configspecfile) else: raise FetchError("Invalid ccase command %s" % command) ccasecmd = "%s %s" % (basecmd, " ".join(options)) self.debug("ccasecmd = %s" % ccasecmd) return ccasecmd
def download(self, ud, d): """Fetch url""" # Make a fresh view bb.utils.mkdirhier(ud.ccasedir) self._write_configspec(ud, d) cmd = self._build_ccase_command(ud, 'mkview') logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname) bb.fetch2.check_network_access(d, cmd, ud.url) try: runfetchcmd(cmd, d) except FetchError as e: if "CRCLI2008E" in e.msg: raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake.")) else: raise e # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec cmd = self._build_ccase_command(ud, 'setcs'); logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname) bb.fetch2.check_network_access(d, cmd, ud.url) output = runfetchcmd(cmd, d, workdir=ud.viewdir) logger.info("%s", output) # Copy the configspec to the viewdir so we have it in our source tarball later shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname)) # Clean clearcase meta-data before tar runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath]) # Clean up so we can create a new view next time self.clean(ud, d);
def download(self, uri, ud, d, checkonly = False): """Fetch urls""" basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" if not checkonly and 'downloadfilename' in ud.parm: dldir = d.getVar("DL_DIR", True) bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) basecmd += " -O " + dldir + os.sep + ud.localfile if checkonly: fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'") elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'") else: fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'") uri = uri.split(";")[0] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) return True
def _p4listfiles(self, ud, d): """ Return a list of the file names which are present in the depot using the 'p4 files' command, including trailing '#rev' file revision indicator """ p4cmd = self._buildp4command(ud, d, 'files') bb.fetch2.check_network_access(d, p4cmd, ud.url) p4fileslist = runfetchcmd(p4cmd, d, True) p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()] if not p4fileslist: raise FetchError('Unable to fetch listing of p4 files from %s@%s' % (ud.host, ud.path)) count = 0 filelist = [] for filename in p4fileslist: item = filename.split(' - ') lastaction = item[1].split() logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0])) if lastaction[0] == 'delete': continue filelist.append(item[0]) return filelist
def urldata_init(self, ud, d): """ init ClearCase specific variable within url data """ ud.proto = "https" if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] if not ud.proto in ('http', 'https'): raise ParameterError("Invalid protocol type", ud.url) ud.vob = '' if 'vob' in ud.parm: ud.vob = ud.parm['vob'] else: msg = ud.url+": vob must be defined so the fetcher knows what to get." raise MissingParameterError('vob', msg) if 'module' in ud.parm: ud.module = ud.parm['module'] else: ud.module = "" ud.basecmd = d.getVar("FETCHCMD_ccrc") or "/usr/bin/env cleartool || rcleartool" if d.getVar("SRCREV") == "INVALID": raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.") ud.label = d.getVar("SRCREV", False) ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC") ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path) ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""), ud.module.replace("/", "."), ud.label.replace("/", ".")) ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) ud.csname = "%s-config-spec" % (ud.identifier) ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) ud.configspecfile = os.path.join(ud.ccasedir, ud.csname) ud.localfile = "%s.tar.gz" % (ud.identifier) self.debug("host = %s" % ud.host) self.debug("path = %s" % ud.path) self.debug("server = %s" % ud.server) self.debug("proto = %s" % ud.proto) self.debug("type = %s" % ud.type) self.debug("vob = %s" % ud.vob) self.debug("module = %s" % ud.module) self.debug("basecmd = %s" % ud.basecmd) self.debug("label = %s" % ud.label) self.debug("ccasedir = %s" % ud.ccasedir) self.debug("viewdir = %s" % ud.viewdir) self.debug("viewname = %s" % ud.viewname) self.debug("configspecfile = %s" % ud.configspecfile) self.debug("localfile = %s" % ud.localfile) ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
def _buildsvncommand(self, ud, d, command): """ Build up an svn commandline based on ud command is "fetch", "update", "info" """ proto = ud.parm.get('protocol', 'svn') svn_ssh = None if proto == "svn+ssh" and "ssh" in ud.parm: svn_ssh = ud.parm["ssh"] svnroot = ud.host + ud.path options = [] options.append("--no-auth-cache") if ud.user: options.append("--username %s" % ud.user) if ud.pswd: options.append("--password %s" % ud.pswd) if command == "info": svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) elif command == "log1": svncmd = "%s log --limit 1 %s %s://%s/%s/" % ( ud.basecmd, " ".join(options), proto, svnroot, ud.module) else: suffix = "" # externals may be either 'allowed' or 'nowarn', but not both. Allowed # will not issue a warning, but will log to the debug buffer what has likely # been downloaded by SVN. if not ("externals" in ud.parm and ud.parm["externals"] == "allowed"): options.append("--ignore-externals") if ud.revision: options.append("-r %s" % ud.revision) suffix = "@%s" % (ud.revision) if command == "fetch": transportuser = ud.parm.get("transportuser", "") svncmd = "%s co %s %s://%s%s/%s%s %s" % ( ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec) elif command == "update": svncmd = "%s update %s" % (ud.basecmd, " ".join(options)) else: raise FetchError("Invalid svn command %s" % command, ud.url) if svn_ssh: svncmd = "SVN_SSH=\"%s\" %s" % (svn_ssh, svncmd) return svncmd
def _buildhgcommand(self, ud, d, command): """ Build up an hg commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_hg}', d) proto = ud.parm.get('protocol', 'http') host = ud.host if proto == "file": host = "/" ud.host = "localhost" if not ud.user: hgroot = host + ud.path else: if ud.pswd: hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path else: hgroot = ud.user + "@" + host + ud.path if command == "info": return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) options = [] # Don't specify revision for the fetch; clone the entire repo. # This avoids an issue if the specified revision is a tag, because # the tag actually exists in the specified revision + 1, so it won't # be available when used in any successive commands. if ud.revision and command != "fetch": options.append("-r %s" % ud.revision) if command == "fetch": cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) elif command == "pull": # do not pass options list; limiting pull to rev causes the local # repo not to contain it and immediately following "update" command # will crash if ud.user and ud.pswd: cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % ( basecmd, ud.user, ud.pswd, proto) else: cmd = "%s pull" % (basecmd) elif command == "update": cmd = "%s update --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" -C %s" % ( basecmd, ud.user, ud.pswd, proto, " ".join(options)) else: raise FetchError("Invalid hg command %s" % command, ud.url) return cmd
def _latest_revision(self, ud, d, name): """ Fetch latest revision for the given package """ api_source_cmd = self._buildosccommand(ud, d, "api_source") output = runfetchcmd(api_source_cmd, d) match = re.match(r'<directory ?.* rev="(\d+)".*>', output) if match is None: raise FetchError("Unable to parse osc response", ud.url) return match.groups()[0]
def urldata_init(self, ud, d): """ Initialize perforce specific variables within url data. If P4CONFIG is provided by the env, use it. If P4PORT is specified by the recipe, use its values, which may override the settings in P4CONFIG. """ ud.basecmd = d.getVar('FETCHCMD_p4') if not ud.basecmd: ud.basecmd = "/usr/bin/env p4" ud.dldir = d.getVar('P4DIR') if not ud.dldir: ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4') path = ud.url.split('://')[1] path = path.split(';')[0] delim = path.find('@') if delim != -1: (ud.user, ud.pswd) = path.split('@')[0].split(':') ud.path = path.split('@')[1] else: ud.path = path ud.usingp4config = False p4port = d.getVar('P4PORT') if p4port: logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) ud.host = p4port else: logger.debug( 1, 'Trying to use P4CONFIG to automatically set P4PORT...') ud.usingp4config = True p4cmd = '%s info | grep "Server address"' % ud.basecmd bb.fetch2.check_network_access(d, p4cmd, ud.url) ud.host = runfetchcmd(p4cmd, d, True) ud.host = ud.host.split(': ')[1].strip() logger.debug(1, 'Determined P4PORT to be: %s' % ud.host) if not ud.host: raise FetchError('Could not determine P4PORT from P4CONFIG') if ud.path.find('/...') >= 0: ud.pathisdir = True else: ud.pathisdir = False cleanedpath = ud.path.replace('/...', '').replace('/', '.') cleanedhost = ud.host.replace(':', '.') ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath) ud.setup_revisions(d) ud.localfile = d.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision))
def _get_url(view): tarball_url = view.get("dist", {}).get("tarball") if tarball_url is None: raise FetchError("Invalid 'dist.tarball' in view", ud.url) uri = URI(tarball_url) uri.params["downloadfilename"] = ud.localfile integrity = view.get("dist", {}).get("integrity") shasum = view.get("dist", {}).get("shasum") if integrity is not None: checksum_name, checksum_expected = npm_integrity(integrity) uri.params[checksum_name] = checksum_expected elif shasum is not None: uri.params["sha1sum"] = shasum else: raise FetchError("Invalid 'dist.integrity' in view", ud.url) return str(uri)
def download(self, urldata, d): """Fetch urls (no-op for Local method)""" # no need to fetch local files, we'll deal with them in place. if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): locations = [] filespath = d.getVar('FILESPATH') if filespath: locations = filespath.split(":") msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) raise FetchError(msg) return True
def _latest_revision(self, ud, d, name): """ Return the latest upstream scm revision number """ p4cmd = self._buildp4command(ud, d, "changes") bb.fetch2.check_network_access(d, p4cmd, ud.url) tip = runfetchcmd(p4cmd, d, True) if not tip: raise FetchError('Could not determine the latest perforce changelist') tipcset = tip.split(' ')[1] logger.debug(1, 'p4 tip found to be changelist %s' % tipcset) return tipcset
def download(self, ud, d): """ Fetch urls Assumes localpath was called first """ cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath) bb.fetch2.check_network_access(d, cmd, ud.url) runfetchcmd(cmd, d) # Additional sanity checks copied from the wget class (although there # are no known issues which mean these are required, treat the aws cli # tool with a little healthy suspicion). if not os.path.exists(ud.localpath): raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath)) if os.path.getsize(ud.localpath) == 0: os.remove(ud.localpath) raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path)) return True
def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None): if fetchedlist is None: fetchedlist = [] pkgfullname = pkg if version != '*' and not '/' in version: pkgfullname += "@'%s'" % version logger.debug(2, "Calling getdeps on %s" % pkg) fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry) output = runfetchcmd(fetchcmd, d, True) pdata = self._parse_view(output) if not pdata: raise FetchError("The command '%s' returned no output" % fetchcmd) if optional: pkg_os = pdata.get('os', None) if pkg_os: if not isinstance(pkg_os, list): pkg_os = [pkg_os] blacklist = False for item in pkg_os: if item.startswith('!'): blacklist = True break if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os: logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg) return #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile)) outputurl = pdata['dist']['tarball'] data[pkg] = {} data[pkg]['tgz'] = os.path.basename(outputurl) if outputurl in fetchedlist: return self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False) fetchedlist.append(outputurl) dependencies = pdata.get('dependencies', {}) optionalDependencies = pdata.get('optionalDependencies', {}) dependencies.update(optionalDependencies) depsfound = {} optdepsfound = {} data[pkg]['deps'] = {} for dep in dependencies: if dep in optionalDependencies: optdepsfound[dep] = dependencies[dep] else: depsfound[dep] = dependencies[dep] for dep, version in optdepsfound.items(): self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist) for dep, version in depsfound.items(): self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
def _buildsvncommand(self, ud, d, command): """ Build up an svn commandline based on ud command is "fetch", "update", "info" """ proto = ud.parm.get('protocol', 'svn') svn_ssh = None if proto == "svn+ssh" and "ssh" in ud.parm: svn_ssh = ud.parm["ssh"] svnroot = ud.host + ud.path options = [] options.append("--no-auth-cache") if ud.user: options.append("--username %s" % ud.user) if ud.pswd: options.append("--password %s" % ud.pswd) if command == "info": svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) elif command == "log1": svncmd = "%s log --limit 1 %s %s://%s/%s/" % ( ud.basecmd, " ".join(options), proto, svnroot, ud.module) else: suffix = "" if ud.revision: options.append("-r %s" % ud.revision) suffix = "@%s" % (ud.revision) if command == "fetch": transportuser = ud.parm.get("transportuser", "") svncmd = "%s co %s %s://%s%s/%s%s %s" % ( ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec) elif command == "update": svncmd = "%s update %s" % (ud.basecmd, " ".join(options)) else: raise FetchError("Invalid svn command %s" % command, ud.url) if svn_ssh: svncmd = "SVN_SSH=\"%s\" %s" % (svn_ssh, svncmd) return svncmd
def download(self, ud, d): """ Get the list of files, fetch each one """ filelist = self._p4listfiles(ud, d) if not filelist: raise FetchError('No files found in depot %s@%s' % (ud.host, ud.path)) bb.utils.remove(ud.pkgdir, True) bb.utils.mkdirhier(ud.pkgdir) for afile in filelist: p4fetchcmd = self._buildp4command(ud, d, 'print', afile) bb.fetch2.check_network_access(d, p4fetchcmd, ud.url) runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir) runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
def checkstatus(self, fetch, ud, d): """ Check the status of a URL """ cmd = '%s ls s3://%s%s' % (ud.basecmd, ud.host, ud.path) bb.fetch2.check_network_access(d, cmd, ud.url) output = runfetchcmd(cmd, d) # "aws s3 ls s3://mybucket/foo" will exit with success even if the file # is not found, so check output of the command to confirm success. if not output: raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path)) return True
def _buildhgcommand(self, ud, d, command): """ Build up an hg commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_hg}', d) proto = ud.parm.get('proto', 'http') host = ud.host if proto == "file": host = "/" ud.host = "localhost" if not ud.user: hgroot = host + ud.path else: hgroot = ud.user + "@" + host + ud.path if command == "info": return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) options = [] if ud.revision: options.append("-r %s" % ud.revision) if command == "fetch": cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) elif command == "pull": # do not pass options list; limiting pull to rev causes the local # repo not to contain it and immediately following "update" command # will crash cmd = "%s pull" % (basecmd) elif command == "update": cmd = "%s update -C %s" % (basecmd, " ".join(options)) else: raise FetchError("Invalid hg command %s" % command, ud.url) return cmd
def _buildsvncommand(self, ud, d, command): """ Build up an svn commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_svn}', d) proto = ud.parm.get('proto', 'svn') svn_rsh = None if proto == "svn+ssh" and "rsh" in ud.parm: svn_rsh = ud.parm["rsh"] svnroot = ud.host + ud.path options = [] if ud.user: options.append("--username %s" % ud.user) if ud.pswd: options.append("--password %s" % ud.pswd) if command == "info": svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module) else: suffix = "" if ud.revision: options.append("-r %s" % ud.revision) suffix = "@%s" % (ud.revision) if command == "fetch": svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module) elif command == "update": svncmd = "%s update %s" % (basecmd, " ".join(options)) else: raise FetchError("Invalid svn command %s" % command, ud.url) if svn_rsh: svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) return svncmd