def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, True) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, True) else: fetchcmd = data.getVar("FETCHCOMMAND", d, True) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
def download(self, uri, ud, d, checkonly = False): """Fetch urls""" if checkonly: fetchcmd = data.getVar("CHECKCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget --spider -t 5 --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'") elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -c -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'") else: fetchcmd = data.getVar("FETCHCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'") uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) return True
def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, True) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, True) else: fetchcmd = data.getVar("FETCHCOMMAND", d, True) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError( "The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
def download(self, uri, ud, d, checkonly=False): """Fetch urls""" basecmd = d.getVar( "FETCHCMD_wget", True ) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" if 'downloadfilename' in ud.parm: basecmd += " -O ${DL_DIR}/" + ud.localfile if checkonly: fetchcmd = d.getVar( "CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'") elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = d.getVar( "RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'") else: fetchcmd = d.getVar( "FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'") uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError( "The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) return True
def get_recipe_upstream_version(rd): """ Get upstream version of recipe using bb.fetch2 methods with support for http, https, ftp and git. bb.fetch2 exceptions can be raised, FetchError when don't have network access or upstream site don't response. NoMethodError when uri latest_versionstring method isn't implemented. Returns a dictonary with version, repository revision, current_version, type and datetime. Type can be A for Automatic, M for Manual and U for Unknown. """ from bb.fetch2 import decodeurl from datetime import datetime ru = {} ru['current_version'] = rd.getVar('PV') ru['version'] = '' ru['type'] = 'U' ru['datetime'] = '' ru['revision'] = '' # XXX: If don't have SRC_URI means that don't have upstream sources so # returns the current recipe version, so that upstream version check # declares a match. src_uris = rd.getVar('SRC_URI') if not src_uris: ru['version'] = ru['current_version'] ru['type'] = 'M' ru['datetime'] = datetime.now() return ru # XXX: we suppose that the first entry points to the upstream sources src_uri = src_uris.split()[0] uri_type, _, _, _, _, _ = decodeurl(src_uri) (pv, pfx, sfx) = get_recipe_pv_without_srcpv(rd.getVar('PV'), uri_type) ru['current_version'] = pv manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") if manual_upstream_version: # manual tracking of upstream version. ru['version'] = manual_upstream_version ru['type'] = 'M' manual_upstream_date = rd.getVar("CHECK_DATE") if manual_upstream_date: date = datetime.strptime(manual_upstream_date, "%b %d, %Y") else: date = datetime.now() ru['datetime'] = date elif uri_type == "file": # files are always up-to-date ru['version'] = pv ru['type'] = 'A' ru['datetime'] = datetime.now() else: ud = bb.fetch2.FetchData(src_uri, rd) if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": revision = ud.method.latest_revision(ud, rd, 'default') upversion = pv if revision != rd.getVar("SRCREV"): upversion = upversion + "-new-commits-available" else: pupver = ud.method.latest_versionstring(ud, rd) (upversion, revision) = pupver if upversion: ru['version'] = upversion ru['type'] = 'A' if revision: ru['revision'] = revision ru['datetime'] = datetime.now() return ru
def get_recipe_upstream_version(rd): """ Get upstream version of recipe using bb.fetch2 methods with support for http, https, ftp and git. bb.fetch2 exceptions can be raised, FetchError when don't have network access or upstream site don't response. NoMethodError when uri latest_versionstring method isn't implemented. Returns a dictonary with version, type and datetime. Type can be A for Automatic, M for Manual and U for Unknown. """ from bb.fetch2 import decodeurl from datetime import datetime ru = {} ru['version'] = '' ru['type'] = 'U' ru['datetime'] = '' pv = rd.getVar('PV', True) # XXX: If don't have SRC_URI means that don't have upstream sources so # returns the current recipe version, so that upstream version check # declares a match. src_uris = rd.getVar('SRC_URI', True) if not src_uris: ru['version'] = pv ru['type'] = 'M' ru['datetime'] = datetime.now() return ru # XXX: we suppose that the first entry points to the upstream sources src_uri = src_uris.split()[0] uri_type, _, _, _, _, _ = decodeurl(src_uri) manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True) if manual_upstream_version: # manual tracking of upstream version. ru['version'] = manual_upstream_version ru['type'] = 'M' manual_upstream_date = rd.getVar("CHECK_DATE", True) if manual_upstream_date: date = datetime.strptime(manual_upstream_date, "%b %d, %Y") else: date = datetime.now() ru['datetime'] = date elif uri_type == "file": # files are always up-to-date ru['version'] = pv ru['type'] = 'A' ru['datetime'] = datetime.now() else: ud = bb.fetch2.FetchData(src_uri, rd) pupver = ud.method.latest_versionstring(ud, rd) (upversion, revision) = pupver # format git version version+gitAUTOINC+HASH if uri_type == 'git': (pv, pfx, sfx) = get_recipe_pv_without_srcpv(pv, uri_type) # if contains revision but not upversion use current pv if upversion == '' and revision: upversion = pv if upversion: tmp = upversion upversion = '' if pfx: upversion = pfx + tmp else: upversion = tmp if sfx: upversion = upversion + sfx + revision[:10] if upversion: ru['version'] = upversion ru['type'] = 'A' ru['datetime'] = datetime.now() return ru
def get_recipe_upstream_version(rd): """ Get upstream version of recipe using bb.fetch2 methods with support for http, https, ftp and git. bb.fetch2 exceptions can be raised, FetchError when don't have network access or upstream site don't response. NoMethodError when uri latest_versionstring method isn't implemented. Returns a dictonary with version, type and datetime. Type can be A for Automatic, M for Manual and U for Unknown. """ from bb.fetch2 import decodeurl from datetime import datetime ru = {} ru['version'] = '' ru['type'] = 'U' ru['datetime'] = '' # XXX: we suppose that the first entry points to the upstream sources src_uri = rd.getVar('SRC_URI', True).split()[0] uri_type, _, _, _, _, _ = decodeurl(src_uri) pv = rd.getVar('PV', True) manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True) if manual_upstream_version: # manual tracking of upstream version. ru['version'] = manual_upstream_version ru['type'] = 'M' manual_upstream_date = rd.getVar("CHECK_DATE", True) if manual_upstream_date: date = datetime.strptime(manual_upstream_date, "%b %d, %Y") else: date = datetime.now() ru['datetime'] = date elif uri_type == "file": # files are always up-to-date ru['version'] = pv ru['type'] = 'A' ru['datetime'] = datetime.now() else: ud = bb.fetch2.FetchData(src_uri, rd) pupver = ud.method.latest_versionstring(ud, rd) if uri_type == 'git': (pv, pfx, sfx) = get_recipe_pv_without_srcpv(pv, uri_type) latest_revision = ud.method.latest_revision(ud, rd, ud.names[0]) # if contains revision but not pupver use current pv if pupver == '' and latest_revision: pupver = pv if pupver != '': pupver = pfx + pupver + sfx + latest_revision[:10] if pupver != '': ru['version'] = pupver ru['type'] = 'A' ru['datetime'] = datetime.now() return ru