Exemplo n.º 1
0
    def _sortable_buildindex_disabled(self, url, ud, d, rev):
        """
        Return a suitable buildindex for the revision specified. This is done by counting revisions
        using "git rev-list" which may or may not work in different circumstances.
        """

        cwd = os.getcwd()

        # Check if we have the rev already

        if not os.path.exists(ud.clonedir):
            print("no repo")
            self.download(None, ud, d)
            if not os.path.exists(ud.clonedir):
                logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
                return None


        os.chdir(ud.clonedir)
        if not self._contains_ref(rev, d):
            self.download(None, ud, d)

        output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
        os.chdir(cwd)

        buildindex = "%s" % output.split()[0]
        logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
        return buildindex
Exemplo n.º 2
0
    def download(self, loc, ud, d):
        """
        do fetch
        """
        # if the package has been downloaded, just return
        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
            logger.debug(1, "%s already exists (or was stashed). Skipping gclient sync.", ud.localpath)
            return

        depot_dir = data.getVar("DEPOTDIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "depot")
        sync_dir = os.path.join(depot_dir, ud.packname)

        bb.utils.mkdirhier(sync_dir)
        os.chdir(sync_dir)

        if not os.path.exists(os.path.join(sync_dir, ".gclient")):
            logger.info('This is the first time to sync this depot, config it as htttp://%s%s'
                    % (ud.host, ud.path))
            runfetchcmd('gclient config http://%s%s' % (ud.host, ud.path), d)

        logger.info('Start to sync source code..')
        runfetchcmd('gclient fetch --jobs %s' % ud.njobs, d)

        logger.info('Creating tarball %s.' % ud.localfile)
        runfetchcmd('tar --exclude .svn --exclude .git -czf %s ./' %
                os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), d)
Exemplo n.º 3
0
        def fetch_uri(uri, ud, d):
            if checkonly:
                fetchcmd = data.getVar("CHECKCOMMAND", d, True)
            elif os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, True)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, True)

            uri = uri.split(";")[0]
            uri_decoded = list(decodeurl(uri))
            uri_type = uri_decoded[0]
            uri_host = uri_decoded[1]

            fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            if not checkonly:
                logger.info("fetch " + uri)
                logger.debug(2, "executing " + fetchcmd)
            bb.fetch2.check_network_access(d, fetchcmd)
            runfetchcmd(fetchcmd, d, quiet=checkonly)

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath) and not checkonly:
                raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
Exemplo n.º 4
0
    def _p4listfiles(self, ud, d):
        """
        Return a list of the file names which are present in the depot using the
        'p4 files' command, including trailing '#rev' file revision indicator
        """
        p4cmd = self._buildp4command(ud, d, 'files')
        bb.fetch2.check_network_access(d, p4cmd, ud.url)
        p4fileslist = runfetchcmd(p4cmd, d, True)
        p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()]

        if not p4fileslist:
            raise FetchError('Unable to fetch listing of p4 files from %s@%s' % (ud.host, ud.path))

        count = 0
        filelist = []

        for filename in p4fileslist:
            item = filename.split(' - ')
            lastaction = item[1].split()
            logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
            if lastaction[0] == 'delete':
                continue
            filelist.append(item[0])

        return filelist
Exemplo n.º 5
0
    def _runwget(self, ud, d, command, quiet, workdir=None):

        progresshandler = WgetProgressHandler(d)

        logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
        bb.fetch2.check_network_access(d, command, ud.url)
        runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
Exemplo n.º 6
0
    def download(self, ud, d):
        """Fetch url"""

        if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
            logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
            return

        repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo")
        gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
        codir = os.path.join(repodir, gitsrcname, ud.manifest)

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        repodir = os.path.join(codir, "repo")
        bb.utils.mkdirhier(repodir)
        if not os.path.exists(os.path.join(repodir, ".repo")):
            bb.fetch2.check_network_access(d, "%s init -m %s -b %s -u %s://%s%s%s" % (ud.basecmd, ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
            runfetchcmd("%s init -m %s -b %s -u %s://%s%s%s" % (ud.basecmd, ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)

        bb.fetch2.check_network_access(d, "%s sync %s" % (ud.basecmd, ud.url), ud.url)
        runfetchcmd("%s sync" % ud.basecmd, d, workdir=repodir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude='.repo' --exclude='.git'"

        # Create a cache
        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d, workdir=codir)
Exemplo n.º 7
0
    def getcset(d, depot, host, user, pswd, parm):
        p4opt = ""
        if "cset" in parm:
            return parm["cset"]
        if user:
            p4opt += " -u %s" % (user)
        if pswd:
            p4opt += " -P %s" % (pswd)
        if host:
            p4opt += " -p %s" % (host)

        p4date = data.getVar("P4DATE", d, True)
        if "revision" in parm:
            depot += "#%s" % (parm["revision"])
        elif "label" in parm:
            depot += "@%s" % (parm["label"])
        elif p4date:
            depot += "@%s" % (p4date)

        p4cmd = data.getVar("FETCHCOMMAND_p4", d, True)
        logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
        p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
        cset = p4file.readline().strip()
        logger.debug(1, "READ %s", cset)
        if not cset:
            return -1

        return cset.split(" ")[1]
Exemplo n.º 8
0
    def _crate_urldata_init(self, ud, d):
        """
        Sets up the download for a crate
        """

        # URL syntax is: crate://NAME/VERSION
        # break the URL apart by /
        parts = ud.url.split('/')
        if len(parts) < 5:
            raise bb.fetch2.ParameterError("Invalid URL: Must be crate://HOST/NAME/VERSION", ud.url)

        # last field is version
        version = parts[len(parts) - 1]
        # second to last field is name
        name = parts[len(parts) - 2]
        # host (this is to allow custom crate registries to be specified
        host = '/'.join(parts[2:len(parts) - 2])

        # if using upstream just fix it up nicely
        if host == 'crates.io':
            host = 'crates.io/api/v1/crates'

        ud.url = "https://%s/%s/%s/download" % (host, name, version)
        ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
        ud.parm['name'] = name

        logger.debug(2, "Fetching %s to %s" % (ud.url, ud.parm['downloadfilename']))
Exemplo n.º 9
0
    def download(self, uri, ud, d, checkonly = False):
        """Fetch urls"""

        basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"

        if not checkonly and 'downloadfilename' in ud.parm:
            dldir = d.getVar("DL_DIR", True)
            bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
            basecmd += " -O " + dldir + os.sep + ud.localfile

        if checkonly:
            fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'")
        elif os.path.exists(ud.localpath):
            # file exists, but we didnt complete it.. trying again..
            fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'")
        else:
            fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")

        uri = uri.split(";")[0]

        fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
        fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
        if not checkonly:
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
        bb.fetch2.check_network_access(d, fetchcmd)
        runfetchcmd(fetchcmd, d, quiet=checkonly)

        # Sanity check since wget can pretend it succeed when it didn't
        # Also, this used to happen if sourceforge sent us to the mirror page
        if not os.path.exists(ud.localpath) and not checkonly:
            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)

        return True
Exemplo n.º 10
0
    def download(self, loc, ud, d):
        """Fetch urls"""

        svkroot = ud.host + ud.path

        svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
        tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
        tmpfile = tmpfile.strip()
        if not tmpfile:
            logger.error()
            raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)

        # check out sources there
        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.debug(1, "Running %s", svkcmd)
        runfetchcmd(svkcmd, d, cleanup = [tmpfile])

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath])

        # cleanup
        bb.utils.prunedir(tmpfile)
Exemplo n.º 11
0
    def getcset(d, depot, host, user, pswd, parm):
        p4opt = ""
        if "cset" in parm:
            return parm["cset"];
        if user:
            p4opt += " -u %s" % (user)
        if pswd:
            p4opt += " -P %s" % (pswd)
        if host:
            p4opt += " -p %s" % (host)

        p4date = d.getVar("P4DATE", True)
        if "revision" in parm:
            depot += "#%s" % (parm["revision"])
        elif "label" in parm:
            depot += "@%s" % (parm["label"])
        elif p4date:
            depot += "@%s" % (p4date)

        p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
        logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
        p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
        cset = p4file.strip()
        logger.debug(1, "READ %s", cset)
        if not cset:
            return -1

        return cset.split(' ')[1]
Exemplo n.º 12
0
    def download(self, uri, ud, d, checkonly = False):
        """Fetch urls"""

        if checkonly:
            fetchcmd = data.getVar("CHECKCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget --spider -t 5 --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
        elif os.path.exists(ud.localpath):
            # file exists, but we didnt complete it.. trying again..
            fetchcmd = data.getVar("RESUMECOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -c -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
        else:
            fetchcmd = data.getVar("FETCHCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")

        uri = uri.split(";")[0]
        uri_decoded = list(decodeurl(uri))
        uri_type = uri_decoded[0]
        uri_host = uri_decoded[1]

        fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
        fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
        if not checkonly:
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
        bb.fetch2.check_network_access(d, fetchcmd)
        runfetchcmd(fetchcmd, d, quiet=checkonly)

        # Sanity check since wget can pretend it succeed when it didn't
        # Also, this used to happen if sourceforge sent us to the mirror page
        if not os.path.exists(ud.localpath) and not checkonly:
            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)

        return True
Exemplo n.º 13
0
    def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
       file = data[pkg]['tgz']
       logger.debug(2, "file to extract is %s" % file)
       if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
            cmd = 'tar xz --strip 1 --no-same-owner -f %s/%s' % (dldir, file)
       else:
            bb.fatal("NPM package %s downloaded not a tarball!" % file)

       # Change to subdir before executing command
       save_cwd = os.getcwd()
       if not os.path.exists(destdir):
           os.makedirs(destdir)
       os.chdir(destdir)
       path = d.getVar('PATH', True)
       if path:
            cmd = "PATH=\"%s\" %s" % (path, cmd)
       bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
       ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
       os.chdir(save_cwd)

       if ret != 0:
            raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)

       if 'deps' not in data[pkg]:
            return
       for dep in data[pkg]['deps']:
           self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
Exemplo n.º 14
0
    def download(self, ud, d):
        """Fetch url"""
        jsondepobj = {}
        shrinkobj = {}
        lockdown = {}

        if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
            dest = d.getVar("DL_DIR", True)
            bb.utils.mkdirhier(dest)
            runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
            return

        shwrf = d.getVar('NPM_SHRINKWRAP', True)
        logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
        try:
            with open(shwrf) as datafile:
                shrinkobj = json.load(datafile)
        except:
            logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
        lckdf = d.getVar('NPM_LOCKDOWN', True)
        logger.debug(2, "NPM lockdown file is %s" % lckdf)
        try:
            with open(lckdf) as datafile:
                lockdown = json.load(datafile)
        except:
            logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)

        if ('name' not in shrinkobj):
            self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
        else:
            self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)

        with open(ud.localpath, 'w') as outfile:
            json.dump(jsondepobj, outfile)
Exemplo n.º 15
0
 def _getdependencies(self, pkg, data, version, d, ud):
     pkgfullname = pkg
     if version:
         pkgfullname += "@%s" % version
     logger.debug(2, "Calling getdeps on %s" % pkg)
     fetchcmd = "npm view %s dist.tarball --registry %s" % (pkgfullname, ud.registry)
     output = runfetchcmd(fetchcmd, d, True)
     # npm may resolve multiple versions
     outputarray = output.strip().splitlines()
     # we just take the latest version npm resolved
     #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
     outputurl = outputarray[len(outputarray)-1].rstrip()
     if (len(outputarray) > 1):
         # remove the preceding version/name from npm output and then the
         # first and last quotes
         outputurl = outputurl.split(" ")[1][1:-1]
     data[pkg] = {}
     data[pkg]['tgz'] = os.path.basename(outputurl)
     self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
     #fetchcmd = "npm view %s@%s dependencies --json" % (pkg, version)
     fetchcmd = "npm view %s dependencies --json --registry %s" % (pkgfullname, ud.registry)
     output = runfetchcmd(fetchcmd, d, True)
     try:
       depsfound = json.loads(output)
     except:
         # just assume there is no deps to be loaded here
         return
     data[pkg]['deps'] = {}
     for dep, version in depsfound.iteritems():
         self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
Exemplo n.º 16
0
Arquivo: git.py Projeto: jeremiah/isar
    def download(self, ud, d):
        """Fetch url"""

        no_clone = not os.path.exists(ud.clonedir)
        need_update = no_clone or self.need_update(ud, d)

        # A current clone is preferred to either tarball, a shallow tarball is
        # preferred to an out of date clone, and a missing clone will use
        # either tarball.
        if ud.shallow and os.path.exists(ud.fullshallow) and need_update:
            ud.localpath = ud.fullshallow
            return
        elif os.path.exists(ud.fullmirror) and no_clone:
            bb.utils.mkdirhier(ud.clonedir)
            runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)

        repourl = self._get_repo_url(ud)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            # We do this since git will use a "-l" option automatically for local urls where possible
            if repourl.startswith("file://"):
                repourl = repourl[7:]
            clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, clone_cmd, ud.url)
            progresshandler = GitProgressHandler(d)
            runfetchcmd(clone_cmd, d, log=progresshandler)

        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud, d, name, ud.clonedir):
                needupdate = True
        if needupdate:
            try: 
                runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
            fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            progresshandler = GitProgressHandler(d)
            runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
            runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
            runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
            try:
                os.unlink(ud.fullmirror)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise
        for name in ud.names:
            if not self._contains_ref(ud, d, name, ud.clonedir):
                raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
Exemplo n.º 17
0
    def download(self, ud, d):
        """Fetch url"""

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        ud.repochanged = not os.path.exists(ud.fullmirror)

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.clonedir)
            os.chdir(ud.clonedir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            # We do this since git will use a "-l" option automatically for local urls where possible
            if repourl.startswith("file://"):
                repourl = repourl[7:]
            clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
            if ud.proto.lower() != "file":
                bb.fetch2.check_network_access(d, clone_cmd)
            runfetchcmd(clone_cmd, d)

        os.chdir(ud.clonedir)
        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud, d, name):
                needupdate = True
        if needupdate:
            try:
                runfetchcmd("%s remote rm origin" % ud.basecmd, d)
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
            fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
            if ud.proto.lower() != "file":
                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            runfetchcmd(fetch_cmd, d)
            runfetchcmd("%s prune-packed" % ud.basecmd, d)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
            ud.repochanged = True
        os.chdir(ud.clonedir)
        for name in ud.names:
            if not self._contains_ref(ud, d, name):
                raise bb.fetch2.FetchError(
                    "Unable to find revision %s in branch %s even from upstream"
                    % (ud.revisions[name], ud.branches[name])
                )
Exemplo n.º 18
0
    def _latest_revision(self, url, ud, d, name):
        """
        Return the latest upstream revision number
        """
        logger.debug(2, "BZR fetcher hitting network for %s", url)

        bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)

        output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)

        return output.strip()
Exemplo n.º 19
0
    def _latest_revision(self, ud, d, name):
        """ Return the latest upstream scm revision number """
        p4cmd = self._buildp4command(ud, d, "changes")
        bb.fetch2.check_network_access(d, p4cmd, ud.url)
        tip = runfetchcmd(p4cmd, d, True)

        if not tip:
            raise FetchError('Could not determine the latest perforce changelist')

        tipcset = tip.split(' ')[1]
        logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
        return tipcset
Exemplo n.º 20
0
    def _latest_revision(self, ud, d, name):
        """ Return the latest upstream scm revision number """
        p4cmd = self._buildp4command(ud, d, "changes")
        bb.fetch2.check_network_access(d, p4cmd, ud.url)
        tip = runfetchcmd(p4cmd, d, True)

        if not tip:
            raise FetchError(
                'Could not determine the latest perforce changelist')

        tipcset = tip.split(' ')[1]
        logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
        return tipcset
Exemplo n.º 21
0
    def _runpack(self, ud, d, pkgfullname: str, quiet=False) -> str:
        """
        Runs npm pack on a full package name.
        Returns the filename of the downloaded package
        """
        bb.fetch2.check_network_access(d, pkgfullname, ud.registry)
        dldir = d.getVar("DL_DIR")
        dldir = os.path.join(dldir, ud.prefixdir)

        command = "npm pack {} --registry {}".format(pkgfullname, ud.registry)
        logger.debug(2, "Fetching {} using command '{}' in {}".format(pkgfullname, command, dldir))
        filename = runfetchcmd(command, d, quiet, workdir=dldir)
        return filename.rstrip()
Exemplo n.º 22
0
    def _latest_revision(self, ud, d, name):
        """
        Return the latest upstream revision number
        """
        logger.debug(2, "BZR fetcher hitting network for %s", ud.url)

        bb.fetch2.check_network_access(d,
                                       self._buildbzrcommand(ud, d,
                                                             "revno"), ud.url)

        output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)

        return output.strip()
Exemplo n.º 23
0
    def download(self, ud, d):
        """Fetch url"""

        # For performance reasons we do not use tar.gz if 'directpath' is specified in SRC_URI
        if ud.directpath != "1" and os.access(
                os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
            logger.debug(
                1,
                "%s already exists (or was stashed). Skipping repo init / sync.",
                ud.localpath)
            return

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        if ud.groups:
            use_groups = "--groups " + ud.groups
        else:
            use_groups = ""

        if ud.depth:
            use_depth = "--depth=" + ud.depth
        else:
            use_depth = ""

        bb.utils.mkdirhier(ud.repodir)
        bb.fetch2.check_network_access(
            d, "repo init -m %s -b %s -u %s://%s%s%s" %
            (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path),
            ud.url)
        runfetchcmd("repo init %s %s -m %s -b %s -u %s://%s%s%s" %
                    (use_depth, use_groups, ud.manifest, ud.branch, ud.proto,
                     username, ud.host, ud.path),
                    d,
                    workdir=ud.repodir)

        bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
        runfetchcmd("repo sync", d, workdir=ud.repodir)

        if ud.directpath != "1":
            scmdata = ud.parm.get("scmdata", "")
            if scmdata == "keep":
                tar_flags = ""
            else:
                tar_flags = "--exclude='.repo' --exclude='.git'"
            runfetchcmd("tar %s -cf - %s | pigz > %s" %
                        (tar_flags, os.path.join(".", "*"), ud.localpath),
                        d,
                        workdir=ud.codir)
Exemplo n.º 24
0
    def download(self, ud, d):
        """Fetch url"""

        logger.debug(
            2, "Fetch: checking for module directory '" + ud.moddir + "'")

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
            # Found the source, check whether need pull
            updatecmd = self._buildhgcommand(ud, d, "update")
            os.chdir(ud.moddir)
            logger.debug(1, "Running %s", updatecmd)
            try:
                runfetchcmd(updatecmd, d)
            except bb.fetch2.FetchError:
                # Runnning pull in the repo
                pullcmd = self._buildhgcommand(ud, d, "pull")
                logger.info("Pulling " + ud.url)
                # update sources there
                os.chdir(ud.moddir)
                logger.debug(1, "Running %s", pullcmd)
                bb.fetch2.check_network_access(d, pullcmd, ud.url)
                runfetchcmd(pullcmd, d)
                try:
                    os.unlink(ud.fullmirror)
                except OSError as exc:
                    if exc.errno != errno.ENOENT:
                        raise

        # No source found, clone it.
        if not os.path.exists(ud.moddir):
            fetchcmd = self._buildhgcommand(ud, d, "fetch")
            logger.info("Fetch " + ud.url)
            # check out sources there
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            logger.debug(1, "Running %s", fetchcmd)
            bb.fetch2.check_network_access(d, fetchcmd, ud.url)
            runfetchcmd(fetchcmd, d)

        # Even when we clone (fetch), we still need to update as hg's clone
        # won't checkout the specified revision if its on a branch
        updatecmd = self._buildhgcommand(ud, d, "update")
        os.chdir(ud.moddir)
        logger.debug(1, "Running %s", updatecmd)
        runfetchcmd(updatecmd, d)
Exemplo n.º 25
0
    def download(self, ud, d):
        """Fetch url"""

        logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")

        lf = bb.utils.lockfile(ud.svnlock)

        try:
            if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
                svncmd = self._buildsvncommand(ud, d, "update")
                logger.info("Update " + ud.url)
                # We need to attempt to run svn upgrade first in case its an older working format
                try:
                    runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
                except FetchError:
                    pass
                logger.debug("Running %s", svncmd)
                bb.fetch2.check_network_access(d, svncmd, ud.url)
                runfetchcmd(svncmd, d, workdir=ud.moddir)
            else:
                svncmd = self._buildsvncommand(ud, d, "fetch")
                logger.info("Fetch " + ud.url)
                # check out sources there
                bb.utils.mkdirhier(ud.pkgdir)
                logger.debug("Running %s", svncmd)
                bb.fetch2.check_network_access(d, svncmd, ud.url)
                runfetchcmd(svncmd, d, workdir=ud.pkgdir)

            if not ("externals" in ud.parm and ud.parm["externals"] == "nowarn"):
                # Warn the user if this had externals (won't catch them all)
                output = runfetchcmd("svn propget svn:externals || true", d, workdir=ud.moddir)
                if output:
                    if "--ignore-externals" in svncmd.split():
                        bb.warn("%s contains svn:externals." % ud.url)
                        bb.warn("These should be added to the recipe SRC_URI as necessary.")
                        bb.warn("svn fetch has ignored externals:\n%s" % output)
                        bb.warn("To disable this warning add ';externals=nowarn' to the url.")
                    else:
                        bb.debug(1, "svn repository has externals:\n%s" % output)

            scmdata = ud.parm.get("scmdata", "")
            if scmdata == "keep":
                tar_flags = ""
            else:
                tar_flags = "--exclude='.svn'"

            # tar them up to a defined filename
            runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
                        cleanup=[ud.localpath], workdir=ud.pkgdir)
        finally:
            bb.utils.unlockfile(lf)
Exemplo n.º 26
0
    def download(self, loc, ud, d):
        """Fetch url"""

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        ud.repochanged = not os.path.exists(ud.fullmirror)

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.clonedir)
            os.chdir(ud.clonedir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            clone_cmd = "%s clone --bare --mirror %s://%s%s%s %s" % (
                ud.basecmd,
                ud.proto,
                username,
                ud.host,
                ud.path,
                ud.clonedir,
            )
            bb.fetch2.check_network_access(d, clone_cmd)
            runfetchcmd(clone_cmd, d)

        os.chdir(ud.clonedir)
        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud.revisions[name], d):
                needupdate = True
        if needupdate:
            try:
                runfetchcmd("%s remote prune origin" % ud.basecmd, d)
                runfetchcmd("%s remote rm origin" % ud.basecmd, d)
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd(
                "%s remote add --mirror origin %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d
            )
            fetch_cmd = "%s fetch --all -t" % ud.basecmd
            bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            runfetchcmd(fetch_cmd, d)
            runfetchcmd("%s prune-packed" % ud.basecmd, d)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
            ud.repochanged = True
Exemplo n.º 27
0
 def localpaths(self, urldata, d):
     """
     Return the local filename of a given url assuming a successful fetch.
     """
     searched = []
     path = urldata.decodedurl
     newpath = path
     if path[0] == "/":
         return [path]
     filespath = d.getVar('FILESPATH')
     if filespath:
         logger.debug(2, "Searching for %s in paths:\n    %s" % (path, "\n    ".join(filespath.split(":"))))
         newpath, hist = bb.utils.which(filespath, path, history=True)
         searched.extend(hist)
     if not newpath:
         filesdir = d.getVar('FILESDIR')
         if filesdir:
             logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
             newpath = os.path.join(filesdir, path)
             searched.append(newpath)
     if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
         # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
         newpath, hist = bb.utils.which(filespath, ".", history=True)
         searched.extend(hist)
         logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
         return searched
     if not os.path.exists(newpath):
         dldirfile = os.path.join(d.getVar("DL_DIR"), path)
         logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
         bb.utils.mkdirhier(os.path.dirname(dldirfile))
         searched.append(dldirfile)
         return searched
     return searched
Exemplo n.º 28
0
    def download(self, ud, d):
        """Fetch url"""

        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")

        if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
            updatecmd = self._buildhgcommand(ud, d, "pull")
            logger.info("Update " + ud.url)
            # update sources there
            os.chdir(ud.moddir)
            logger.debug(1, "Running %s", updatecmd)
            bb.fetch2.check_network_access(d, updatecmd, ud.url)
            runfetchcmd(updatecmd, d)

        else:
            fetchcmd = self._buildhgcommand(ud, d, "fetch")
            logger.info("Fetch " + ud.url)
            # check out sources there
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            logger.debug(1, "Running %s", fetchcmd)
            bb.fetch2.check_network_access(d, fetchcmd, ud.url)
            runfetchcmd(fetchcmd, d)

        # Even when we clone (fetch), we still need to update as hg's clone
        # won't checkout the specified revision if its on a branch
        updatecmd = self._buildhgcommand(ud, d, "update")
        os.chdir(ud.moddir)
        logger.debug(1, "Running %s", updatecmd)
        runfetchcmd(updatecmd, d)
Exemplo n.º 29
0
    def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
        logger.debug(2, "NPM shrinkwrap file is %s" % data)
        if toplevel:
            name = data.get('name', None)
            if name and name != pkg:
                for obj in data.get('dependencies', []):
                    if obj == pkg:
                        self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
                        return

        pkgnameWithVersion = "{}@{}".format(pkg, version)
        logger.debug(2, "Get dependencies for {}".format(pkgnameWithVersion))
        filename = self._runpack(ud, d, pkgnameWithVersion)
        manifest[pkg] = {}
        manifest[pkg]['tgz'] = filename
        manifest[pkg]['deps'] = {}

        if pkg in lockdown:
            sha1_expected = lockdown[pkg][version]
            sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
            if sha1_expected != sha1_data:
                msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
                raise ChecksumError('Checksum mismatch!%s' % msg)
        else:
            logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))

        if 'dependencies' in data:
            for obj in data['dependencies']:
                logger.debug(2, "Found dep is %s" % str(obj))
                self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False)
Exemplo n.º 30
0
    def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
        if fetchedlist is None:
            fetchedlist = []
        pkgfullname = pkg
        if version != '*' and not '/' in version:
            pkgfullname += "@'%s'" % version
        logger.debug(2, "Calling getdeps on %s" % pkg)
        fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
        output = runfetchcmd(fetchcmd, d, True)
        pdata = self._parse_view(output)
        if not pdata:
            raise FetchError("The command '%s' returned no output" % fetchcmd)
        if optional:
            pkg_os = pdata.get('os', None)
            if pkg_os:
                if not isinstance(pkg_os, list):
                    pkg_os = [pkg_os]
                blacklist = False
                for item in pkg_os:
                    if item.startswith('!'):
                        blacklist = True
                        break
                if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
                    logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
                    return
        #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
        outputurl = pdata['dist']['tarball']
        data[pkg] = {}
        data[pkg]['tgz'] = os.path.basename(outputurl)
        if outputurl in fetchedlist:
            return

        self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
        fetchedlist.append(outputurl)

        dependencies = pdata.get('dependencies', {})
        optionalDependencies = pdata.get('optionalDependencies', {})
        dependencies.update(optionalDependencies)
        depsfound = {}
        optdepsfound = {}
        data[pkg]['deps'] = {}
        for dep in dependencies:
            if dep in optionalDependencies:
                optdepsfound[dep] = dependencies[dep]
            else:
                depsfound[dep] = dependencies[dep]
        for dep, version in optdepsfound.items():
            self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
        for dep, version in depsfound.items():
            self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
Exemplo n.º 31
0
    def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest):
        logger.debug(2, "NPM shrinkwrap file is %s" % data)
        outputurl = "invalid"
        if ('resolved' not in data) or (not data['resolved'].startswith('http')):
            # will be the case for ${PN}
            fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
            logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
            outputurl = runfetchcmd(fetchcmd, d, True)
        else:
            outputurl = data['resolved']
        self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
        manifest[pkg] = {}
        manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
        manifest[pkg]['deps'] = {}

        if pkg in lockdown:
            sha1_expected = lockdown[pkg][version]
            sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
            if sha1_expected != sha1_data:
                msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
                raise ChecksumError('Checksum mismatch!%s' % msg)
        else:
            logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))

        if 'dependencies' in data:
            for obj in data['dependencies']:
                logger.debug(2, "Found dep is %s" % str(obj))
                self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'])
Exemplo n.º 32
0
    def download(self, ud, d):
        """Fetch url"""

        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
            # Found the source, check whether need pull
            updatecmd = self._buildhgcommand(ud, d, "update")
            os.chdir(ud.moddir)
            logger.debug(1, "Running %s", updatecmd)
            try:
                runfetchcmd(updatecmd, d)
            except bb.fetch2.FetchError:
                # Runnning pull in the repo
                pullcmd = self._buildhgcommand(ud, d, "pull")
                logger.info("Pulling " + ud.url)
                # update sources there
                os.chdir(ud.moddir)
                logger.debug(1, "Running %s", pullcmd)
                bb.fetch2.check_network_access(d, pullcmd, ud.url)
                runfetchcmd(pullcmd, d)
                try:
                    os.unlink(ud.fullmirror)
                except OSError as exc:
                    if exc.errno != errno.ENOENT:
                        raise

        # No source found, clone it.
        if not os.path.exists(ud.moddir):
            fetchcmd = self._buildhgcommand(ud, d, "fetch")
            logger.info("Fetch " + ud.url)
            # check out sources there
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            logger.debug(1, "Running %s", fetchcmd)
            bb.fetch2.check_network_access(d, fetchcmd, ud.url)
            runfetchcmd(fetchcmd, d)

        # Even when we clone (fetch), we still need to update as hg's clone
        # won't checkout the specified revision if its on a branch
        updatecmd = self._buildhgcommand(ud, d, "update")
        os.chdir(ud.moddir)
        logger.debug(1, "Running %s", updatecmd)
        runfetchcmd(updatecmd, d)
Exemplo n.º 33
0
    def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
        logger.debug(2, "NPM shrinkwrap file is %s" % data)
        if toplevel:
            name = data.get('name', None)
            if name and name != pkg:
                for obj in data.get('dependencies', []):
                    if obj == pkg:
                        self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
                        return
        outputurl = "invalid"
        if ('resolved' not in data) or (not data['resolved'].startswith('http')):
            # will be the case for ${PN}
            fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
            logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
            outputurl = runfetchcmd(fetchcmd, d, True)
        else:
            outputurl = data['resolved']
        self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
        manifest[pkg] = {}
        manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
        manifest[pkg]['deps'] = {}

        if pkg in lockdown:
            sha1_expected = lockdown[pkg][version]
            sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
            if sha1_expected != sha1_data:
                msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
                raise ChecksumError('Checksum mismatch!%s' % msg)
        else:
            logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))

        if 'dependencies' in data:
            for obj in data['dependencies']:
                logger.debug(2, "Found dep is %s" % str(obj))
                self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False)
Exemplo n.º 34
0
 def localpaths(self, urldata, d):
     """
     Return the local filename of a given url assuming a successful fetch.
     """
     searched = []
     path = urldata.decodedurl
     newpath = path
     if path[0] == "/":
         return [path]
     filespath = data.getVar('FILESPATH', d, True)
     if filespath:
         logger.debug(2, "Searching for %s in paths:\n    %s" % (path, "\n    ".join(filespath.split(":"))))
         newpath, hist = bb.utils.which(filespath, path, history=True)
         searched.extend(hist)
     if not newpath:
         filesdir = data.getVar('FILESDIR', d, True)
         if filesdir:
             logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
             newpath = os.path.join(filesdir, path)
             searched.append(newpath)
     if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
         # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
         newpath, hist = bb.utils.which(filespath, ".", history=True)
         searched.extend(hist)
         logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
         return searched
     if not os.path.exists(newpath):
         dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
         logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
         bb.utils.mkdirhier(os.path.dirname(dldirfile))
         searched.append(dldirfile)
         return searched
     return searched
Exemplo n.º 35
0
    def download(self, ud, d):
        """Fetch url"""
        jsondepobj = {}
        shrinkobj = {}
        lockdown = {}

        if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
            dest = d.getVar("DL_DIR")
            bb.utils.mkdirhier(dest)
            runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
            return

        if ud.parm.get("noverify", None) != '1':
            shwrf = d.getVar('NPM_SHRINKWRAP')
            logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
            if shwrf:
                try:
                    with open(shwrf) as datafile:
                        shrinkobj = json.load(datafile)
                except Exception as e:
                    raise FetchError(
                        'Error loading NPM_SHRINKWRAP file "%s" for %s: %s' %
                        (shwrf, ud.pkgname, str(e)))
            elif not ud.ignore_checksums:
                logger.warning(
                    'Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!'
                    % ud.pkgname)
            lckdf = d.getVar('NPM_LOCKDOWN')
            logger.debug(2, "NPM lockdown file is %s" % lckdf)
            if lckdf:
                try:
                    with open(lckdf) as datafile:
                        lockdown = json.load(datafile)
                except Exception as e:
                    raise FetchError(
                        'Error loading NPM_LOCKDOWN file "%s" for %s: %s' %
                        (lckdf, ud.pkgname, str(e)))
            elif not ud.ignore_checksums:
                logger.warning(
                    'Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!'
                    % ud.pkgname)

        if ('name' not in shrinkobj):
            self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
        else:
            self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d,
                                          ud, lockdown, jsondepobj)

        with open(ud.localpath, 'w') as outfile:
            json.dump(jsondepobj, outfile)
Exemplo n.º 36
0
    def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
        if fetchedlist is None:
            fetchedlist = []
        pkgfullname = pkg
        if version != '*' and not '/' in version:
            pkgfullname += "@'%s'" % version
        logger.debug(2, "Calling getdeps on %s" % pkg)
        fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
        output = runfetchcmd(fetchcmd, d, True)
        pdata = self._parse_view(output)
        if not pdata:
            raise FetchError("The command '%s' returned no output" % fetchcmd)
        if optional:
            pkg_os = pdata.get('os', None)
            if pkg_os:
                if not isinstance(pkg_os, list):
                    pkg_os = [pkg_os]
                blacklist = False
                for item in pkg_os:
                    if item.startswith('!'):
                        blacklist = True
                        break
                if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
                    logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
                    return
        #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
        outputurl = pdata['dist']['tarball']
        data[pkg] = {}
        data[pkg]['tgz'] = os.path.basename(outputurl)
        if outputurl in fetchedlist:
            return

        self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
        fetchedlist.append(outputurl)

        dependencies = pdata.get('dependencies', {})
        optionalDependencies = pdata.get('optionalDependencies', {})
        dependencies.update(optionalDependencies)
        depsfound = {}
        optdepsfound = {}
        data[pkg]['deps'] = {}
        for dep in dependencies:
            if dep in optionalDependencies:
                optdepsfound[dep] = dependencies[dep]
            else:
                depsfound[dep] = dependencies[dep]
        for dep, version in optdepsfound.items():
            self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
        for dep, version in depsfound.items():
            self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
Exemplo n.º 37
0
    def download(self, ud, d):
        """Fetch url"""

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.clonedir)
            os.chdir(ud.clonedir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        repourl = self._get_repo_url(ud)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            # We do this since git will use a "-l" option automatically for local urls where possible
            if repourl.startswith("file://"):
                repourl = repourl[7:]
            clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, clone_cmd)
            runfetchcmd(clone_cmd, d)

        os.chdir(ud.clonedir)
        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud, d, name):
                needupdate = True
        if needupdate:
            try: 
                runfetchcmd("%s remote rm origin" % ud.basecmd, d) 
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
            fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            runfetchcmd(fetch_cmd, d)
            runfetchcmd("%s prune-packed" % ud.basecmd, d)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
            try:
                os.unlink(ud.fullmirror)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise
        os.chdir(ud.clonedir)
        for name in ud.names:
            if not self._contains_ref(ud, d, name):
                raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
Exemplo n.º 38
0
Arquivo: git.py Projeto: ack3000/poky
    def download(self, loc, ud, d):
        """Fetch url"""

        if ud.user:
            username = ud.user + '@'
        else:
            username = ""

        ud.repochanged = not os.path.exists(ud.fullmirror)

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.clonedir)
            os.chdir(ud.clonedir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            clone_cmd = "%s clone --bare --mirror %s %s" % (
                ud.basecmd, repourl, ud.clonedir)
            bb.fetch2.check_network_access(d, clone_cmd)
            runfetchcmd(clone_cmd, d)

        os.chdir(ud.clonedir)
        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud.revisions[name], d):
                needupdate = True
        if needupdate:
            try:
                runfetchcmd("%s remote prune origin" % ud.basecmd, d)
                runfetchcmd("%s remote rm origin" % ud.basecmd, d)
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd(
                "%s remote add --mirror=fetch origin %s" %
                (ud.basecmd, repourl), d)
            fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd,
                                                                  repourl)
            bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            runfetchcmd(fetch_cmd, d)
            runfetchcmd("%s prune-packed" % ud.basecmd, d)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd,
                        d)
            ud.repochanged = True
Exemplo n.º 39
0
 def download(self, ud, d):
     """Fetch packages"""
     fetchcmd = ( d.getVar("NPM", True) or "npm" ) + " "
     fetchcmd += d.getVar("NPM_ARCHFLAGS", True) or ""
     fetchcmd += " install " + ud.fetchname
     fetchcmd += " --force"
     if not os.path.exists(ud.installdir):
         bb.utils.mkdirhier(ud.installdir)
     os.chdir(ud.installdir)
     with open("package.json", 'w') as f:
         f.write("{}\n")
     logger.info("npm install " + ud.url)
     logger.debug(2, "executing " + fetchcmd)
     bb.fetch2.check_network_access(d, fetchcmd)
     runfetchcmd(fetchcmd, d, quiet=False)
     return True
Exemplo n.º 40
0
    def download(self, ud, d):
        """Fetch url"""

        if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
            logger.debug(
                1,
                "%s already exists (or was stashed). Skipping repo init / sync.",
                ud.localpath)
            return

        repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo")
        gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
        codir = os.path.join(repodir, gitsrcname, ud.manifest)

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        repodir = os.path.join(codir, "repo")
        bb.utils.mkdirhier(repodir)
        if not os.path.exists(os.path.join(repodir, ".repo")):
            bb.fetch2.check_network_access(
                d, "%s init -m %s -b %s -u %s://%s%s%s" %
                (ud.basecmd, ud.manifest, ud.branch, ud.proto, username,
                 ud.host, ud.path), ud.url)
            runfetchcmd("%s init -m %s -b %s -u %s://%s%s%s" %
                        (ud.basecmd, ud.manifest, ud.branch, ud.proto,
                         username, ud.host, ud.path),
                        d,
                        workdir=repodir)

        bb.fetch2.check_network_access(d, "%s sync %s" % (ud.basecmd, ud.url),
                                       ud.url)
        runfetchcmd("%s sync" % ud.basecmd, d, workdir=repodir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude='.repo' --exclude='.git'"

        # Create a cache
        runfetchcmd("tar %s -czf %s %s" %
                    (tar_flags, ud.localpath, os.path.join(".", "*")),
                    d,
                    workdir=codir)
Exemplo n.º 41
0
    def need_update(self, ud, d):
        if Git.need_update(self, ud, d):
            return True

        try:
            # Check for the nugget dropped by the download operation
            known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
                                        (ud.basecmd), d, workdir=ud.clonedir)

            if ud.revisions[ud.names[0]] in known_srcrevs.split():
                return False
        except bb.fetch2.FetchError:
            pass

        need_update_list = []
        def need_update_submodule(ud, url, module, modpath, workdir, d):
            url += ";bareclone=1;nobranch=1"

            try:
                newfetch = Fetch([url], d, cache=False)
                new_ud = newfetch.ud[url]
                if new_ud.method.need_update(new_ud, d):
                    need_update_list.append(modpath)
            except Exception as e:
                logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
                need_update_result = True

        # If we're using a shallow mirror tarball it needs to be unpacked
        # temporarily so that we can examine the .gitmodules file
        if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
            tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
            runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
            self.process_submodules(ud, tmpdir, need_update_submodule, d)
            shutil.rmtree(tmpdir)
        else:
            self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
            if len(need_update_list) == 0:
                # We already have the required commits of all submodules. Drop
                # a nugget so we don't need to check again.
                runfetchcmd("%s config --add bitbake.srcrev %s" % \
                            (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)

        if len(need_update_list) > 0:
            logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
            return True

        return False
Exemplo n.º 42
0
    def download(self, uri, ud, d, checkonly=False):
        """Fetch urls"""

        basecmd = d.getVar(
            "FETCHCMD_wget", True
        ) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"

        if 'downloadfilename' in ud.parm:
            basecmd += " -O ${DL_DIR}/" + ud.localfile

        if checkonly:
            fetchcmd = d.getVar(
                "CHECKCOMMAND_wget",
                True) or d.expand(basecmd + " --spider '${URI}'")
        elif os.path.exists(ud.localpath):
            # file exists, but we didnt complete it.. trying again..
            fetchcmd = d.getVar(
                "RESUMECOMMAND_wget",
                True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'")
        else:
            fetchcmd = d.getVar(
                "FETCHCOMMAND_wget",
                True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")

        uri = uri.split(";")[0]
        uri_decoded = list(decodeurl(uri))
        uri_type = uri_decoded[0]
        uri_host = uri_decoded[1]

        fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
        fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
        if not checkonly:
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
        bb.fetch2.check_network_access(d, fetchcmd)
        runfetchcmd(fetchcmd, d, quiet=checkonly)

        # Sanity check since wget can pretend it succeed when it didn't
        # Also, this used to happen if sourceforge sent us to the mirror page
        if not os.path.exists(ud.localpath) and not checkonly:
            raise FetchError(
                "The fetch command returned success for url %s but %s doesn't exist?!"
                % (uri, ud.localpath), uri)

        return True
Exemplo n.º 43
0
    def download(self, ud, d):
        """Fetch url"""
        jsondepobj = {}
        shrinkobj = {}
        lockdown = {}

        if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
            dest = d.getVar("DL_DIR", True)
            bb.utils.mkdirhier(dest)
            save_cwd = os.getcwd()
            os.chdir(dest)
            runfetchcmd("tar -xJf %s" % (ud.fullmirror), d)
            os.chdir(save_cwd)
            return

        shwrf = d.getVar('NPM_SHRINKWRAP', True)
        logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
        try:
            with open(shwrf) as datafile:
                shrinkobj = json.load(datafile)
        except:
            logger.warn(
                'Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!'
                % ud.pkgname)
        lckdf = d.getVar('NPM_LOCKDOWN', True)
        logger.debug(2, "NPM lockdown file is %s" % lckdf)
        try:
            with open(lckdf) as datafile:
                lockdown = json.load(datafile)
        except:
            logger.warn(
                'Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!'
                % ud.pkgname)

        if ('name' not in shrinkobj):
            self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
        else:
            self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d,
                                          ud, lockdown, jsondepobj)

        with open(ud.localpath, 'w') as outfile:
            json.dump(jsondepobj, outfile)
Exemplo n.º 44
0
Arquivo: npm.py Projeto: VCTLabs/poky
    def download(self, ud, d):
        """Fetch url"""
        jsondepobj = {}
        shrinkobj = {}
        lockdown = {}

        if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
            dest = d.getVar("DL_DIR")
            bb.utils.mkdirhier(dest)
            runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
            return

        if ud.parm.get("noverify", None) != '1':
            shwrf = d.getVar('NPM_SHRINKWRAP')
            logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
            if shwrf:
                try:
                    with open(shwrf) as datafile:
                        shrinkobj = json.load(datafile)
                except Exception as e:
                    raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e)))
            elif not ud.ignore_checksums:
                logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
            lckdf = d.getVar('NPM_LOCKDOWN')
            logger.debug(2, "NPM lockdown file is %s" % lckdf)
            if lckdf:
                try:
                    with open(lckdf) as datafile:
                        lockdown = json.load(datafile)
                except Exception as e:
                    raise FetchError('Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e)))
            elif not ud.ignore_checksums:
                logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)

        if ('name' not in shrinkobj):
            self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
        else:
            self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)

        with open(ud.localpath, 'w') as outfile:
            json.dump(jsondepobj, outfile)
Exemplo n.º 45
0
    def download(self, loc, ud, d):
        """Fetch urls"""

        svkroot = ud.host + ud.path

        svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX',
                                           localdata), localdata)
        tmppipe = os.popen(
            data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            logger.error()
            raise FetchError(
                "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.",
                loc)

        # check out sources there
        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.debug(1, "Running %s", svkcmd)
        runfetchcmd(svkcmd, d, cleanup=[tmpfile])

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        runfetchcmd("tar -czf %s %s" %
                    (ud.localpath, os.path.basename(ud.module)),
                    d,
                    cleanup=[ud.localpath])

        # cleanup
        bb.utils.prunedir(tmpfile)
Exemplo n.º 46
0
Arquivo: svn.py Projeto: kacf/poky
    def download(self, ud, d):
        """Fetch url"""

        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")

        if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
            svnupdatecmd = self._buildsvncommand(ud, d, "update")
            logger.info("Update " + ud.url)
            # We need to attempt to run svn upgrade first in case its an older working format
            try:
                runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
            except FetchError:
                pass
            logger.debug(1, "Running %s", svnupdatecmd)
            bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
            runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
        else:
            svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
            logger.info("Fetch " + ud.url)
            # check out sources there
            bb.utils.mkdirhier(ud.pkgdir)
            logger.debug(1, "Running %s", svnfetchcmd)
            bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
            runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude='.svn'"

        # tar them up to a defined filename
        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
                    cleanup=[ud.localpath], workdir=ud.pkgdir)
Exemplo n.º 47
0
    def download(self, loc, ud, d):
        """Fetch url"""

        if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
            bzrcmd = self._buildbzrcommand(ud, d, "update")
            logger.debug(1, "BZR Update %s", loc)
            bb.fetch2.check_network_access(d, bzrcmd, ud.url)
            os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
            runfetchcmd(bzrcmd, d)
        else:
            bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
            bzrcmd = self._buildbzrcommand(ud, d, "fetch")
            bb.fetch2.check_network_access(d, bzrcmd, ud.url)
            logger.debug(1, "BZR Checkout %s", loc)
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            logger.debug(1, "Running %s", bzrcmd)
            runfetchcmd(bzrcmd, d)

        os.chdir(ud.pkgdir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"

        # tar them up to a defined filename
        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
Exemplo n.º 48
0
    def download(self, loc, ud, d):
        """Fetch url"""

        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")

        if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
            svnupdatecmd = self._buildsvncommand(ud, d, "update")
            logger.info("Update " + loc)
            # update sources there
            os.chdir(ud.moddir)
            logger.debug(1, "Running %s", svnupdatecmd)
            bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
            runfetchcmd(svnupdatecmd, d)
        else:
            svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
            logger.info("Fetch " + loc)
            # check out sources there
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            logger.debug(1, "Running %s", svnfetchcmd)
            bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
            runfetchcmd(svnfetchcmd, d)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude '.svn'"

        os.chdir(ud.pkgdir)
        # tar them up to a defined filename
        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
Exemplo n.º 49
0
    def unpack(self, ud, destdir, d):
        """
        Make a local clone or export for the url
        """

        revflag = "-r %s" % ud.revision
        subdir = ud.parm.get("destsuffix", ud.module)
        codir = "%s/%s" % (destdir, subdir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata != "nokeep":
            proto = ud.parm.get('protocol', 'http')
            if not os.access(os.path.join(codir, '.hg'), os.R_OK):
                logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
                runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
            logger.debug(2, "Unpack: updating source in '" + codir + "'")
            if ud.user and ud.pswd:
                runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir)
            else:
                runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d, workdir=codir)
            if ud.user and ud.pswd:
                runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" up -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, revflag), d, workdir=codir)
            else:
                runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
        else:
            logger.debug(2, "Unpack: extracting source to '" + codir + "'")
            runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
Exemplo n.º 50
0
    def download(self, ud, d):
        """Fetch url"""

        if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
            logger.debug(
                1,
                "%s already exists (or was stashed). Skipping repo init / sync.",
                ud.localpath)
            return

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        bb.utils.mkdirhier(ud.repodir)
        bb.fetch2.check_network_access(
            d, "repo init -m %s -b %s -u %s://%s%s%s" %
            (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path),
            ud.url)
        runfetchcmd(
            "repo init -m %s -b %s -u %s://%s%s%s" %
            (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path),
            d,
            workdir=ud.repodir)

        bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
        runfetchcmd("repo sync", d, workdir=ud.repodir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude='.repo' --exclude='.git'"

        # Create a cache
        runfetchcmd("tar %s -cf - %s | pigz > %s" %
                    (tar_flags, os.path.join(".", "*"), ud.localpath),
                    d,
                    workdir=ud.codir)
Exemplo n.º 51
0
    def unpack(self, ud, destdir, d):
        """
        Make a local clone or export for the url
        """

        revflag = "-r %s" % ud.revision
        subdir = ud.parm.get("destsuffix", ud.module)
        codir = "%s/%s" % (destdir, subdir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata != "nokeep":
            if not os.access(os.path.join(codir, '.hg'), os.R_OK):
                logger.debug(
                    2, "Unpack: creating new hg repository in '" + codir + "'")
                runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
            logger.debug(2, "Unpack: updating source in '" + codir + "'")
            runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir),
                        d,
                        workdir=codir)
            runfetchcmd("%s up -C %s" % (ud.basecmd, revflag),
                        d,
                        workdir=codir)
        else:
            logger.debug(2, "Unpack: extracting source to '" + codir + "'")
            runfetchcmd("%s archive -t files %s %s" %
                        (ud.basecmd, revflag, codir),
                        d,
                        workdir=ud.moddir)
Exemplo n.º 52
0
 def localpath(self, url, urldata, d):
     """
     Return the local filename of a given url assuming a successful fetch.
     """
     path = urldata.decodedurl
     newpath = path
     if path[0] != "/":
         filespath = data.getVar('FILESPATH', d, True)
         if filespath:
             logger.debug(
                 2, "Searching for %s in paths:    \n%s" %
                 (path, "\n    ".join(filespath.split(":"))))
             newpath = bb.utils.which(filespath, path)
         if not newpath:
             filesdir = data.getVar('FILESDIR', d, True)
             if filesdir:
                 logger.debug(
                     2, "Searching for %s in path: %s" % (path, filesdir))
                 newpath = os.path.join(filesdir, path)
         if not os.path.exists(newpath) and path.find("*") == -1:
             dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
             logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
             bb.utils.mkdirhier(os.path.dirname(dldirfile))
             return dldirfile
     return newpath
Exemplo n.º 53
0
    def download(self, ud, d):
        """Fetch url"""

        if os.access(
                os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'),
                os.R_OK):
            bzrcmd = self._buildbzrcommand(ud, d, "update")
            logger.debug(1, "BZR Update %s", ud.url)
            bb.fetch2.check_network_access(d, bzrcmd, ud.url)
            os.chdir(os.path.join(ud.pkgdir, os.path.basename(ud.path)))
            runfetchcmd(bzrcmd, d)
        else:
            bb.utils.remove(
                os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
            bzrcmd = self._buildbzrcommand(ud, d, "fetch")
            bb.fetch2.check_network_access(d, bzrcmd, ud.url)
            logger.debug(1, "BZR Checkout %s", ud.url)
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            logger.debug(1, "Running %s", bzrcmd)
            runfetchcmd(bzrcmd, d)

        os.chdir(ud.pkgdir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude='.bzr' --exclude='.bzrtags'"

        # tar them up to a defined filename
        runfetchcmd("tar %s -czf %s %s" %
                    (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)),
                    d,
                    cleanup=[ud.localpath])
Exemplo n.º 54
0
    def download(self, ud, d, checkonly = False):
        """Fetch urls"""

        basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"

        if not checkonly and 'downloadfilename' in ud.parm:
            dldir = d.getVar("DL_DIR", True)
            bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
            basecmd += " -O " + dldir + os.sep + ud.localfile

        if checkonly:
            fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'")
        elif os.path.exists(ud.localpath):
            # file exists, but we didnt complete it.. trying again..
            fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'")
        else:
            fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")

        uri = ud.url.split(";")[0]

        fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
        fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
        if not checkonly:
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
        bb.fetch2.check_network_access(d, fetchcmd)
        runfetchcmd(fetchcmd, d, quiet=checkonly)

        # Sanity check since wget can pretend it succeed when it didn't
        # Also, this used to happen if sourceforge sent us to the mirror page
        if not os.path.exists(ud.localpath) and not checkonly:
            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)

        if not checkonly and os.path.getsize(ud.localpath) == 0:
            os.remove(ud.localpath)
            raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)

        return True
Exemplo n.º 55
0
Arquivo: git.py Projeto: thiagoss/poky
    def _sortable_buildindex_disabled(self, url, ud, d, rev):
        """
        Return a suitable buildindex for the revision specified. This is done by counting revisions
        using "git rev-list" which may or may not work in different circumstances.
        """

        cwd = os.getcwd()

        # Check if we have the rev already

        if not os.path.exists(ud.clonedir):
            logger.debug(
                1, "GIT repository for %s does not exist in %s.  \
                          Downloading.", url, ud.clonedir)
            self.download(None, ud, d)
            if not os.path.exists(ud.clonedir):
                logger.error(
                    "GIT repository for %s does not exist in %s after \
                             download. Cannot get sortable buildnumber, using \
                             old value", url, ud.clonedir)
                return None

        os.chdir(ud.clonedir)
        if not self._contains_ref(rev, d):
            self.download(None, ud, d)

        output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" %
                             (ud.basecmd, rev),
                             d,
                             quiet=True)
        os.chdir(cwd)

        buildindex = "%s" % output.split()[0]
        logger.debug(
            1,
            "GIT repository for %s in %s is returning %s revisions in rev-list before %s",
            url, ud.clonedir, buildindex, rev)
        return buildindex
Exemplo n.º 56
0
 def localpaths(self, urldata, d):
     """
     Return the local filename of a given url assuming a successful fetch.
     """
     searched = []
     path = urldata.decodedurl
     newpath = path
     if path[0] == "/":
         return [path]
     filespath = d.getVar('FILESPATH')
     if filespath:
         logger.debug(
             2, "Searching for %s in paths:\n    %s" %
             (path, "\n    ".join(filespath.split(":"))))
         newpath, hist = bb.utils.which(filespath, path, history=True)
         searched.extend(hist)
     if not os.path.exists(newpath):
         dldirfile = os.path.join(d.getVar("DL_DIR"), path)
         logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
         bb.utils.mkdirhier(os.path.dirname(dldirfile))
         searched.append(dldirfile)
         return searched
     return searched
Exemplo n.º 57
0
    def _index_urldata_init(self, ud, d):
        """
        Sets up the download for the cargo index
        """

        # URL syntax is: crate-index://REV
        # break the URL apart by /
        parts = ud.url.split('/')
        if len(parts) != 4:
            raise bb.fetch2.ParameterError(
                "Invalid URL: Must be crate-index://HOST/REV", ud.url)

        # last field is the rev
        rev = parts[3]
        host = parts[2]

        if host == 'crates.io':
            host = 'github.com/rust-lang/crates.io-index'

        ud.url = "https://%s/archive/%s.tar.gz" % (host, rev)
        ud.parm['downloadfilename'] = 'cargo-index-%s.tar.gz' % rev
        ud.parm['name'] = "index"

        logger.debug(2, "Fetching crate index %s" % ud.url)
Exemplo n.º 58
0
    def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
        file = data[pkg]['tgz']
        logger.debug(2, "file to extract is %s" % file)
        if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
            cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
        else:
            bb.fatal("NPM package %s downloaded not a tarball!" % file)

        # Change to subdir before executing command
        if not os.path.exists(destdir):
            os.makedirs(destdir)
        path = d.getVar('PATH', True)
        if path:
            cmd = "PATH=\"%s\" %s" % (path, cmd)
        bb.note("Unpacking %s to %s/" % (file, destdir))
        ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)

        if ret != 0:
            raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)

        if 'deps' not in data[pkg]:
            return
        for dep in data[pkg]['deps']:
            self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)