Exemplo n.º 1
0
 def _patch(self):
     for patch in self._patches:
         # Always exit patching ok as -N will still return status>1
         # for already applied patches.
         utils.q_print("info: applying patch '%s'." % patch)
         utils.run("patch -N -p0 -s -d %s < %s; exit 0" %
                   (self._package, patch))
Exemplo n.º 2
0
    def doc_stage(self, stage_dir):
        '''
        Copy (sphinx) documentation into the `stage_dir` and fix u
        intersphinx cross-references to be document root relative 
        (see :issue:`4775`)

        :param stage_dir: the name of the direcotry into which a release 
                          is staged before creating tarball.
        '''
        sphinxdir = os.path.join("doc", "_build", "html")
        if not os.path.exists(sphinxdir):
            return
        utils.q_print("info: running the documentation stage process")
        dst = os.path.abspath(self._builddir).replace(self._askaproot,
                                                      stage_dir)
        relpath = os.path.relpath(self._askaproot)
        sphinxdir = os.path.join("doc", "_build", "html") + os.path.sep
        dst = os.path.join(dst, self._builddir)
        if not os.path.exists(dst):
            os.makedirs(dst)
        utils.copy_tree(sphinxdir, dst, symlinks=True, overwrite=True)
        utils.q_print("info: updating documentation cross-references")

        for dname, dirs, files in os.walk(dst):
            for fname in files:
                if not fname.endswith(".html"):
                    continue
                fpath = os.path.join(dname, fname)
                with open(fpath) as f:
                    s = f.read()
                s = s.replace(self._askaproot, relpath)
                s = s.replace(sphinxdir, "")
                with open(fpath, "w") as f:
                    f.write(s)
Exemplo n.º 3
0
    def _build(self):
        '''
        Parse the header files listed file add_points.  The parsing involves

        1. extracting XML tags from the header file
        2. parsing the XML to a document tree
        3. expanding out any nodes such as iocStructures or iocArrays
        '''
        sourceFiles = []
        for header, library, epicsxml in self._adePoints:
            if library is None:
                filename = os.path.join(self._srcdir, header + '.h')
            else:
                filename = os.path.join(self.dep.get_install_path(library), 'include', library, header + '.h')

            sourceFiles.append((filename, library, epicsxml))

        if self._adeParser.rebuild_needed([x[0] for x in sourceFiles]):
            self._adeParser.reset()
            for filename, library, epicsxml in sourceFiles:
                self._adeParser.parse(filename, libName=library, epicsxml=epicsxml)

            files = self._adeParser.generate_output()
            for file in files:
                self.add_extra_clean_targets(file)
                utils.q_print("generated " + file)
Exemplo n.º 4
0
 def _get_archive_name(self, aname):
     archive = aname or self._package
     for suffix in [".tar.gz", ".tar.bz2", ".tgz", ".zip", ".jar"]:
         if os.path.exists(archive + suffix):
             archive += suffix
             return archive
     if os.path.abspath(os.curdir).find("3rdParty") > 0 and aname:
         utils.q_print("warn: Expected archive (%s) but none found." %
                       aname)
     return None
Exemplo n.º 5
0
    def _clean(self):
        if utils.in_code_tree() and os.path.exists('setup.py'):
            utils.run("python setup.py clean")

        # Bug #2803
        # An ASKAP/EPICS application (pkgname == '.') usually has a configure
        # directory in the root directory
        # as opposed to EPICS base and some support modules where the tarball
        # gets expanded in the pkgname directory.
        # This feature affects the way the package gets cleaned in order to
        # support idempotent cleaning command.
        # In case of ASKAP/EPICS applications, we need to check whether EPICS
        # base configure directory exists, otherwise
        # we cannot execute 'make clean' command. If epics base configure
        # directory exists, a RELEASE.<architecture> file
        # must exist in the configure directory in order to locate epics base
        # configure directory for the make command
        # to work correctly.
        if self._package == '.':
            if self._epicsbase_configure_exists():
                # RELEASE.<arch> must exists in order to run make clean.
                # This prevents an error when running clean when the package
                # has already been cleaned.
                self._create_releasefile()
                if not os.path.exists(self._deps_file):
                    open(self._deps_file, 'w').write("")
                shutil.copy(self._releasefile,
                            os.path.join(self._package, "configure"))
                if self._oldreleasefile is not None:
                    shutil.copy(
                        self._releasefile,
                        os.path.join(self._package, "configure",
                                     self._oldreleasefile))
                curdir = os.path.abspath(os.curdir)
                # Enter the untarred package directory
                os.chdir(self._package)
                utils.run("make clean uninstall")
                os.chdir(curdir)
            else:
                utils.q_print(
                    "WARNING: EPICS base configure directory does "
                    "not exists (required by 'make clean'). "
                    "Some temporary files inside the package will not"
                    "be removed. Build EPICS base and re-run clean "
                    "target or delete temporary files manually.")
        # Execute base class method, which removes install directory and
        # additional clean targets
        Builder._clean(self)
Exemplo n.º 6
0
    def _install(self):
        if os.path.exists("setupegg.py"):
            cmd = "%s setupegg.py install" % self._pycmd
        else:
            cmd = "%s setup.py install" % self._pycmd

        if utils.in_tools_tree():
            msg = "info: 'setuptools egg install', imports use default version."
        else:
            msg = "info: 'setuptools in local 'install' directory."
            installdir = os.path.join(self._bdir, self._installdir)
            cmd += " --prefix %s" % installdir
            utils.create_python_tree(installdir)

        utils.run("%s %s" % (self._get_env(), cmd), self.nowarnings)
        utils.q_print(msg)
        self._version_install()
Exemplo n.º 7
0
    def _install(self):
        if os.path.exists("setupegg.py"):
            cmd = "%s setupegg.py install" % self._pycmd
        else:
            cmd = "%s setup.py install" % self._pycmd

        if os.getcwd().find("Tools") > 0:
            msg = "info: 'setuptools egg install', imports use default version."
        else:
            msg = "info: 'setuptools in local 'install' directory."
            installdir = os.path.join(self._bdir, self._installdir)
            cmd += " --prefix %s" % installdir
            utils.create_python_tree(installdir)

        utils.run("%s %s" % (self._get_env(), cmd), self.nowarnings)
        utils.q_print(msg)
        self._version_install()
Exemplo n.º 8
0
 def _fetch_remote(self):
     if utils.in_code_tree():
         return
     if self.remote_archive is None:
         return
     uitem = urllib2.urlparse.urlsplit(self.remote_archive)
     outfile = os.path.split(uitem.path)[-1]
     if (os.path.isfile(outfile)):
         return
     fullpath = self.remote_archive
     if not uitem.scheme:
         root = os.environ["RBUILD_REMOTE_ARCHIVE"]
         fullpath = os.path.sep.join((root, self.remote_archive))
     remote = urllib2.urlopen(fullpath)
     utils.q_print("info: Fetching '{}'...".format(fullpath))
     with open(outfile, "wb") as of:
         of.write(remote.read())
     remote.close()
Exemplo n.º 9
0
    def _clean(self):
        if utils.in_code_tree() and os.path.exists('setup.py'):
            utils.run("python setup.py clean")

        # Bug #2803
        # An ASKAP/EPICS application (pkgname == '.') usually has a configure 
        # directory in the root directory
        # as opposed to EPICS base and some support modules where the tarball 
        # gets expanded in the pkgname directory.
        # This feature affects the way the package gets cleaned in order to 
        # support idempotent cleaning command.
        # In case of ASKAP/EPICS applications, we need to check whether EPICS 
        # base configure directory exists, otherwise
        # we cannot execute 'make clean' command. If epics base configure 
        # directory exists, a RELEASE.<architecture> file
        # must exist in the configure directory in order to locate epics base 
        # configure directory for the make command
        # to work correctly.
        if self._package == '.':
            if self._epicsbase_configure_exists():
                # RELEASE.<arch> must exists in order to run make clean. 
                # This prevents an error when running clean when the package 
                # has already been cleaned.
                self._create_releasefile()
                if not os.path.exists(self._deps_file):
                    open(self._deps_file, 'w').write("")
                shutil.copy(self._releasefile, 
                            os.path.join(self._package, "configure"))
                if self._oldreleasefile is not None:
                    shutil.copy(self._releasefile, 
                                os.path.join(self._package, 
                                             "configure", self._oldreleasefile))
                curdir = os.path.abspath(os.curdir)
                # Enter the untarred package directory
                os.chdir(self._package)
                utils.run("make clean uninstall")
                os.chdir(curdir)
            else:
                utils.q_print("WARNING: EPICS base configure directory does not ""exists (required by 'make clean'). Some temporary files inside the package will not be removed. Build EPICS base and re-run clean target or delete temporary files manually.")
        # Execute base class method, which removes install directory and 
        # additional clean targets
        Builder._clean(self)
Exemplo n.º 10
0
 def _get_system_opts(self):
     '''
     Add in the default/platform/hostname options.
     '''
     envfiles = [
         '%s/env.default' % self._askaproot,
         '%s/env.%s' % (self._askaproot, self._system),
         'env.' + self._system,
         'env.' + self._hostname,
     ]
     for env in envfiles:
         if os.path.exists(env):
             utils.q_print("info: processing environment file: %s" % env)
             opts = []
             for line in open(env, "r"):
                 line = line.strip()
                 if not line or not line.startswith("#"):
                     opts.append(line)
             self._opts += " " + " ".join(opts)
             utils.q_print("debug: using self._opts =>%s<=" % self._opts)
Exemplo n.º 11
0
    def add_install_iocboot(self, srcdir, appname=None):
        srcdir = os.path.normpath(srcdir) # remove trailing "/" chars
        basename = os.path.basename(srcdir)

        if appname is None:
            if basename.startswith('ioc'):
                appname = basename[3:] # Extract all the rest after 'ioc'
                if appname == '':
                    utils.q_print('warn: IOC boot directory name is ioc and '
                                  'should be ioc<appname>')
                    utils.q_print('warn: please enter appname in '
                                  'add_install_iocboot() method')
                    appname = '@@@error: no appname@@@'
            else:
                appname = basename

        if os.path.isdir(srcdir):
            # Check that basename does not already exist in dictionary.
            for sdir in self._iocbootdirsdict.keys():
                if basename == os.path.basename(sdir):
                    raise BuildError("IOC boot base directory name already "
                                     "exist")
            self._iocbootdirsdict[srcdir] = appname
        else:
            utils.q_print('warn: iocbootdir >%s< is not a directory.' % srcdir)
Exemplo n.º 12
0
    def add_install_iocboot(self, srcdir, appname=None):
        srcdir = os.path.normpath(srcdir)  # remove trailing "/" chars
        basename = os.path.basename(srcdir)

        if appname is None:
            if basename.startswith('ioc'):
                appname = basename[3:]  # Extract all the rest after 'ioc'
                if appname == '':
                    utils.q_print('warn: IOC boot directory name is ioc and '
                                  'should be ioc<appname>')
                    utils.q_print('warn: please enter appname in '
                                  'add_install_iocboot() method')
                    appname = '@@@error: no appname@@@'
            else:
                appname = basename

        if os.path.isdir(srcdir):
            # Check that basename does not already exist in dictionary.
            for sdir in self._iocbootdirsdict.keys():
                if basename == os.path.basename(sdir):
                    raise BuildError("IOC boot base directory name already "
                                     "exist")
            self._iocbootdirsdict[srcdir] = appname
        else:
            utils.q_print('warn: iocbootdir >%s< is not a directory.' % srcdir)
Exemplo n.º 13
0
    def _release(self):
        '''
        Create a tarball of the staged location.
        The rbuild program has -N option that stops the adding of virtualenv
        to the release.  This is for developers sharing purely python packages.
        In this case there is no point creating the release envfile and
        cannot update the easy-install.pth file.
        '''
        utils.q_print("info: running the release process")
        utils.q_print(
            "info: resetting python scripts to use '#!/usr/bin/env python'")
        bindir = os.path.join(self._stagedir, "bin")
        for fn in glob.glob("%s/*" % bindir):
            sedcmd = "sed -i -e '1s&^#\!.*/bin/python&#\!/usr/bin/env python&' %s" % fn
            utils.runcmd(sedcmd, shell=True)

        if os.path.exists(os.path.join(self._stagedir, "bin", "python")):
            self._create_release_envfile()
            self._update_easyinstall()
        dn, bn = os.path.split(self._stagedir.rstrip("/"))
        if not dn:
            dn = '.'
        utils.q_print("info: creating %s.tgz" % self._releasename)
        utils.runcmd("tar -C %s -czf %s.tgz %s" % (dn, self._releasename, bn))
        utils.rmtree(self._stagedir)
Exemplo n.º 14
0
 def add_ioc_config(self, envfile=None, hostfile=None, iocname=None):
     """Add deployment configuration files for tunning the ioc.
     These are two csv files (<iocname>_env.csv and <iocname>_host.csv)
     containing individual ioc environment and which IOCs to run on 
     which host. These get installed into install/ioc-config"""
     if not utils.in_code_tree():
         utils.q_print("warn: 'add_ioc_config' only works in Code tree")
         return
     if iocname is None:
         if self._appname.startswith("ioc"):
             iocname = self._appname
         else:
             iocname = "ioc" + self._appname
     if envfile is None:
         envfile = os.path.join("files", iocname + "_env.csv")
     if not os.path.exists(envfile):
         raise IOError("'%s' doesn't exist" % envfile)
     if hostfile is None:
         hostfile = os.path.join("files", iocname + "_host.csv")
     if not os.path.exists(hostfile):
         raise IOError("'%s' doesn't exist" % hostfile)
     self._csv_files.append((envfile, hostfile, iocname))
Exemplo n.º 15
0
 def add_ioc_config(self, envfile=None, hostfile=None, iocname=None):
     """Add deployment configuration files for tunning the ioc.
     These are two csv files (<iocname>_env.csv and <iocname>_host.csv)
     containing individual ioc environment and which IOCs to run on 
     which host. These get installed into install/ioc-config"""
     if not utils.in_code_tree():
         utils.q_print("warn: 'add_ioc_config' only works in Code tree")
         return
     if iocname is None:
         if self._appname.startswith("ioc"):
             iocname = self._appname
         else:
             iocname = "ioc"+self._appname
     if envfile is None:
         envfile = os.path.join("files", iocname+"_env.csv")
     if not os.path.exists(envfile):
         raise IOError("'%s' doesn't exist" % envfile)
     if hostfile is None:
         hostfile = os.path.join("files", iocname+"_host.csv")
     if not os.path.exists(hostfile):
         raise IOError("'%s' doesn't exist" % hostfile)
     self._csv_files.append((envfile, hostfile, iocname))
Exemplo n.º 16
0
    def _stage(self):
        '''
        Stage packages (or dependent packages) into a temporary release
        tree as a prerequsite for creating a package tarball.
        '''
        utils.q_print("info: running the stage process")
        if not os.path.exists(self._stagedir):
            os.makedirs(self._stagedir)

        if os.path.exists(self._installdir):
            os.chdir(self._installdir)

            for src in glob.glob("*"):
                dst = os.path.join(self._stagedir, src)
                # we do a binary release
                if src == 'include':
                    continue
                if src in ['lib', 'lib64']:
                    # we do a binary release
                    pattern = '*.so*'
                    if sys.platform == 'darwin':
                        pattern = '*.dylib*'
                    utils.copy_tree(src,
                                    dst,
                                    symlinks=True,
                                    pattern=pattern,
                                    overwrite=True)
                    utils.copy_tree(src,
                                    dst,
                                    symlinks=True,
                                    pattern='*.jar*',
                                    overwrite=True)
                elif src == "VERSION":
                    with open(os.path.join(self._stagedir, "PACKAGE_VERSIONS"),
                              "a+") as f:
                        f.write(open(src).read())
                else:
                    utils.copy_tree(src, dst, overwrite=True)

            os.chdir("..")
            bindir = os.path.join(self._stagedir, "bin")
            for shfile in glob.glob("%s/*.sh" % bindir):
                data = open(shfile).read()
                if data.find("ASKAP auto-generated") > 0:
                    utils.q_print(
                        "info: removing ASKAP shell wrapper file %s" % shfile)
                    os.remove(shfile)
        else:  # may be a document only package.
            utils.q_print('warn: %s does not exist.' % self._installdir)

        self.doc_stage(os.path.join(self._stagedir, "sphinxdoc"))
Exemplo n.º 17
0
    def get_parallel_opt(self, j='auto', max_j=16):
        '''
        Ask for parallel build. Default is 'auto' which scales with number
        of cores in a machine + 1.

        :param j:     The number of build threads (default is 'auto').
        :param max_j: The maximum number of build threads (default is '16').
        '''
        if not self.parallel:
            return ""

        for opt in self._comopts:
            if opt.startswith('j='):
                j = opt.split('=')[-1]
                return " -j%d" % int(j)

        if j in ['auto', '', None, 0]:
            j = utils.number_of_cpus() + 1
        else:
            try:
                j = abs(int(j))
            except ValueError:
                utils.q_print("warn: non-numeric j value given - setting to 2")
                j = 2
        try:
            max_j = abs(int(max_j))
        except ValueError:
            utils.q_print(
                "warn: non-integer max_j value given - setting to 16")
            max_j = 16
        if j > max_j:  # sanity check
            utils.q_print("warn: j value %d is greater than max_j value %d" %
                          (j, max_j))
            j = max_j
        if j > 1:
            return " -j%d" % j
        return ""
Exemplo n.º 18
0
 def q_print(self, msg):
     if self._silent:
         return
     utils.q_print(msg)
Exemplo n.º 19
0
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA.
#
# @author Malte Marquarding <*****@*****.**>
#
import os, re
import sys

from askapdev.rbuild.dependencies.depends import Depends
from askapdev.rbuild.debian import *
from askapdev.rbuild.utils import q_print

if __name__ == "__main__":
    if len(sys.argv) < 2:
        print "Usage: debianise.py <package_path or . >"
        sys.exit(1)
    rdir = sys.argv[1]
    a_root = os.getenv('ASKAP_ROOT')
    myself = os.path.relpath(rdir, a_root)
    d = Depends(rdir)
    for dep in d.ordered_dependencies+[myself]:
        nodeb = os.path.join(a_root, dep, "NO_DEBIAN")
        if os.path.exists(nodeb):
            q_print("Ignoring package '{0}' which set to NO_DEBIAN".format(dep))
            continue
        name, version = get_versioned_name(dep)
        add_debian(dep)
    
Exemplo n.º 20
0
    def create_dependency_graph(self, pkg_path, depfilename=DEPFILENAME,
                                use_repo='second', recursion=True):
        '''
        Adds new edges to the self._graph object from a packages
        dependency.default file.
        From the dependency file, pull out from each line:
        * alias,
        * relative path to dependent package,
        * optional libraries.
        If edge already in the graph skip, else add the edge (package)
        to the graph via its add_edge() method.
        use_repo ['first', 'second', 'never']
          'first'  - always check repo and if not found skip
          'second' - check work area if it exists else if not found try repo
          'never'  - check work area if not found skip
        '''
        data = ''
        rel_depfile  = os.path.join(pkg_path, depfilename)
        work_pkgdir  = os.path.join(ASKAP_ROOT, pkg_path)
        work_depfile = os.path.join(ASKAP_ROOT, rel_depfile)
        repo_depfile = os.path.join(ASKAP_URL,  rel_depfile)

        if use_repo in ['first', 'second'] and ASKAP_URL == '':
            utils.q_print('warn: depends module unable to determine ASKAP_ROOT repository URL.')
            use_repo = 'never'

        # Get contents of dependency.default file if it exists.
        # Can get from work area or from repository depending on use_repo
        # setting and whether work area and/or repo availability.
        #
        if use_repo == 'second' and os.path.exists(work_pkgdir):
            if os.path.exists(work_depfile):
                fh = open(work_depfile)
                data = fh.read()
                fh.close()
        elif use_repo != 'never':
            cmd = 'svn cat %s' % repo_depfile
            stdout, stderr, returncode = utils.runcmd(cmd)
            if returncode == 0:
                data = stdout

        # Process the dependency.default contents.
        # Format of dependencies.default line is:
        # <alias> = <relpath to dep pkg> [;] [<lib1> <lib2> ... <libN>]
        # 
        for line in data.split('\n'):
            line = line.strip()
            if not line or line.startswith('#'):
                continue
            alias, pkg_spec = line.split('=', 1)
            pkg_spec =  pkg_spec.split(';', 1)
            dep_pkg_path = pkg_spec[0].strip()
            if len(pkg_spec) == 2:
                dep_pkg_libs = pkg_spec[1].strip().split()
            else:
                dep_pkg_libs = []
            # Check if already processed rootpkgdir.
            # Need edge check not node check because the node may already
            # exist as dependency but have not processed its
            # dependencies yet.
            if self._graph.has_edge(pkg_path, dep_pkg_path):
                continue # Already analyzed this node.
            else:
                self._graph.add_edge(pkg_path, dep_pkg_path,
                                     libs=dep_pkg_libs)
                if recursion:
                    self.create_dependency_graph(dep_pkg_path)
Exemplo n.º 21
0
 def _deploy(self):
     utils.q_print("error: deploy target is deprecated.")
     sys.exit(1)
Exemplo n.º 22
0
 def _clean(self):
     try:
         utils.run_scons(extraargs="--clean . tidy", version=self.sconsversion)
     except Exception, inst:
         utils.q_print("info: could not run %s. Will try our clean." % inst)
Exemplo n.º 23
0
 def q_print(self, msg):
     if self._silent:
         return
     utils.q_print(msg)
Exemplo n.º 24
0
 def build(self):
     '''
     Run the complete build process.
     XXX rbuild script only allows single build targets to be specified
         but 'python build.py <targets>' allows for multiple targets
         to be specified.
     '''
     # get tar file name in case of a remote archive
     self._determine_tarfile()
     if "bclean" in self._comopts:
         self._build_clean()
     if "clean" in self._comopts:
         self._clean()  # will remove .packagesig
     if "depends" in self._comopts:
         pass  # handled by rbuild but for completeness list here.
     if "install" in self._comopts:
         if self.is_up_to_date():
             return
         # Run clean before building to get rid of possible old artifacts
         # in installdir.
         # The do_clean flag should only be set False for second builders.
         # In Code just remove the installdir rather than running _clean()
         # so rebuilds will be fast and handled by underlying build
         # system e.g. scons
         if self.do_clean:
             if utils.in_code_tree():
                 if os.path.exists(self._installdir):
                     utils.q_print("info:    pre-build removal of %s dir." %
                                   self._installdir)
                     utils.rmtree(self._installdir)
             else:
                 utils.q_print("info:    pre-build clean.")
                 self._clean()
         self._get_system_opts()
         self._fetch_remote()
         self._unpack()
         self._copy(self._files, self._package)
         self._patch()
         self._replace()
         self._precommand()
         os.chdir(self._builddir)
         self._configure()
         self._build()
         self._install()
         os.chdir(self._bdir)
         self._copy_to_install()
         self._lib64_symlink()
         self._create_info()
         self._create_init()
         self._postcommand()
         self._signature()
         self._save_clean_targets()
     if "test" in self._comopts and utils.in_code_tree():
         os.chdir(self._package)
         self._test()
         os.chdir(self._bdir)
     if "functest" in self._comopts and utils.in_code_tree():
         os.chdir(self._package)
         self._functest()
         os.chdir(self._bdir)
     if "signature" in self._comopts:
         self._signature()
     if "stage" in self._comopts:
         self._stage()
     if "release" in self._comopts:
         self._release()
     if "deploy" in self._comopts:
         self._deploy()
     if "format" in self._comopts:
         if utils.in_code_tree():
             utils.format_src_code()
         else:
             utils.q_print("warn: format only applies in the Code tree.")
     if "doc" in self._comopts:
         os.chdir(self._package)
         self._doc()
         os.chdir(self._bdir)
Exemplo n.º 25
0
    def create_dependency_graph(self,
                                pkg_path,
                                depfilename=DEPFILENAME,
                                use_repo='second',
                                recursion=True):
        '''
        Adds new edges to the self._graph object from a packages
        dependency.default file.
        From the dependency file, pull out from each line:
        * alias,
        * relative path to dependent package,
        * optional libraries.
        If edge already in the graph skip, else add the edge (package)
        to the graph via its add_edge() method.
        use_repo ['first', 'second', 'never']
          'first'  - always check repo and if not found skip
          'second' - check work area if it exists else if not found try repo
          'never'  - check work area if not found skip
        '''
        data = ''
        rel_depfile = os.path.join(pkg_path, depfilename)
        work_pkgdir = os.path.join(ASKAP_ROOT, pkg_path)
        work_depfile = os.path.join(ASKAP_ROOT, rel_depfile)
        repo_depfile = os.path.join(ASKAP_URL, rel_depfile)

        if use_repo in ['first', 'second'] and ASKAP_URL == '':
            utils.q_print(
                'warn: depends module unable to determine ASKAP_ROOT repository URL.'
            )
            use_repo = 'never'

        # Get contents of dependency.default file if it exists.
        # Can get from work area or from repository depending on use_repo
        # setting and whether work area and/or repo availability.
        #
        if use_repo == 'second' and os.path.exists(work_pkgdir):
            if os.path.exists(work_depfile):
                fh = open(work_depfile)
                data = fh.read()
                fh.close()
        elif use_repo != 'never':
            cmd = 'svn cat %s' % repo_depfile
            stdout, stderr, returncode = utils.runcmd(cmd)
            if returncode == 0:
                data = stdout

        # Process the dependency.default contents.
        # Format of dependencies.default line is:
        # <alias> = <relpath to dep pkg> [;] [<lib1> <lib2> ... <libN>]
        #
        for line in data.split('\n'):
            line = line.strip()
            if not line or line.startswith('#'):
                continue
            alias, pkg_spec = line.split('=', 1)
            pkg_spec = pkg_spec.split(';', 1)
            dep_pkg_path = pkg_spec[0].strip()
            if len(pkg_spec) == 2:
                dep_pkg_libs = pkg_spec[1].strip().split()
            else:
                dep_pkg_libs = []
            # Check if already processed rootpkgdir.
            # Need edge check not node check because the node may already
            # exist as dependency but have not processed its
            # dependencies yet.
            if self._graph.has_edge(pkg_path, dep_pkg_path):
                continue  # Already analyzed this node.
            else:
                self._graph.add_edge(pkg_path, dep_pkg_path, libs=dep_pkg_libs)
                if recursion:
                    self.create_dependency_graph(dep_pkg_path)
Exemplo n.º 26
0
 def _clean(self):
     try:
         utils.run_scons(extraargs="--clean . tidy",
                         version=self.sconsversion)
     except Exception, inst:
         utils.q_print("info: could not run %s. Will try our clean." % inst)