Beispiel #1
0
 def read_conf(self):
     self.conf = dict()
     for line in open(self.arch_path('include', 'petscconf.h')):
         if line.startswith('#define '):
             define = line[len('#define '):]
             space = define.find(' ')
             key = define[:space]
             val = define[space+1:]
             self.conf[key] = val
     self.conf.update(parse_makefile(self.arch_path('lib','petsc','conf', 'petscvariables')))
     # allow parsing package additional configurations (if any)
     if self.pkg_name != 'petsc' :
         f = self.pkg_arch_path('include', self.pkg_name + 'conf.h')
         if os.path.isfile(f):
             for line in open(self.pkg_arch_path('include', self.pkg_name + 'conf.h')):
                 if line.startswith('#define '):
                     define = line[len('#define '):]
                     space = define.find(' ')
                     key = define[:space]
                     val = define[space+1:]
                     self.conf[key] = val
         f = self.pkg_arch_path('lib',self.pkg_name,'conf', self.pkg_name + 'variables')
         if os.path.isfile(f):
             self.conf.update(parse_makefile(self.pkg_arch_path('lib',self.pkg_name,'conf', self.pkg_name + 'variables')))
     self.have_fortran = int(self.conf.get('PETSC_HAVE_FORTRAN', '0'))
Beispiel #2
0
def parse_conf(ctx, conf_path, lib_dirs, libs):
    vars = {}
    sysconfig.parse_makefile(conf_path, vars)
    flag_dict = ctx.env.ParseFlags(vars['PACKAGES_LIBS'])
    lib_dirs.extend(flag_dict['LIBPATH'])
    for ii in range(len(libs)):
        libs[ii].extend(flag_dict['LIBS'])
Beispiel #3
0
 def __init__(self, petsc_dir=None, petsc_arch=None, verbose=False):
     if petsc_dir is None:
         petsc_dir = os.environ.get("PETSC_DIR")
         if petsc_dir is None:
             try:
                 petsc_dir = parse_makefile(os.path.join("lib", "petsc", "conf", "petscvariables")).get("PETSC_DIR")
             finally:
                 if petsc_dir is None:
                     raise RuntimeError("Could not determine PETSC_DIR, please set in environment")
     if petsc_arch is None:
         petsc_arch = os.environ.get("PETSC_ARCH")
         if petsc_arch is None:
             try:
                 petsc_arch = parse_makefile(os.path.join(petsc_dir, "lib", "petsc", "conf", "petscvariables")).get(
                     "PETSC_ARCH"
                 )
             finally:
                 if petsc_arch is None:
                     raise RuntimeError("Could not determine PETSC_ARCH, please set in environment")
     self.petsc_dir = petsc_dir
     self.petsc_arch = petsc_arch
     self.read_conf()
     logging.basicConfig(filename=self.arch_path("lib", "petsc", "conf", "gmake.log"), level=logging.DEBUG)
     self.log = logging.getLogger("gmakegen")
     self.mistakes = Mistakes(debuglogger(self.log), verbose=verbose)
     self.gendeps = []
Beispiel #4
0
def WriteNinja(petsc):
    conf = dict()
    parse_makefile(os.path.join(petsc.petsc_dir, 'lib', 'petsc','conf', 'variables'), conf)
    parse_makefile(petsc.arch_path('lib','petsc','conf', 'petscvariables'), conf)
    build_ninja = petsc.arch_path('build.ninja')
    fd = open(build_ninja, 'w')
    fd.write('objdir = obj-ninja\n')
    fd.write('libdir = lib\n')
    fd.write('c_compile = %(PCC)s\n' % conf)
    fd.write('c_flags = %(PETSC_CC_INCLUDES)s %(PCC_FLAGS)s %(CCPPFLAGS)s\n' % conf)
    fd.write('c_link = %(PCC_LINKER)s\n' % conf)
    fd.write('c_link_flags = %(PCC_LINKER_FLAGS)s\n' % conf)
    if petsc.have_fortran:
        fd.write('f_compile = %(FC)s\n' % conf)
        fd.write('f_flags = %(PETSC_FC_INCLUDES)s %(FC_FLAGS)s %(FCPPFLAGS)s\n' % conf)
        fd.write('f_link = %(FC_LINKER)s\n' % conf)
        fd.write('f_link_flags = %(FC_LINKER_FLAGS)s\n' % conf)
    fd.write('petsc_external_lib = %(PETSC_EXTERNAL_LIB_BASIC)s\n' % conf)
    fd.write('python = %(PYTHON)s\n' % conf)
    fd.write('\n')
    fd.write('rule C_COMPILE\n'
             '  command = $c_compile -MMD -MF $out.d $c_flags -c $in -o $out\n'
             '  description = CC $out\n'
             '  depfile = $out.d\n'
             # '  deps = gcc\n') # 'gcc' is default, 'msvc' only recognized by newer versions of ninja
             '\n')
    fd.write('rule C_LINK_SHARED\n'
             '  command = $c_link $c_link_flags -shared -o $out $in $petsc_external_lib\n'
             '  description = CLINK_SHARED $out\n'
             '\n')
    if petsc.have_fortran:
        fd.write('rule F_COMPILE\n'
                 '  command = $f_compile -MMD -MF $out.d $f_flags -c $in -o $out\n'
                 '  description = FC $out\n'
                 '  depfile = $out.d\n'
                 '\n')
        fd.write('rule F_LINK_SHARED\n'
                 '  command = $f_link $f_link_flags -shared -o $out $in $petsc_external_lib\n'
                 '  description = FLINK_SHARED $out\n'
                 '\n')
    fd.write('rule GEN_NINJA\n'
             '  command = $python $in --output=ninja\n'
             '  generator = 1\n'
             '\n')
    petsc.gen_ninja(fd)
    fd.write('\n')
    fd.write('build %s : GEN_NINJA | %s %s %s %s\n' % (build_ninja,
                                                       os.path.abspath(__file__),
                                                       os.path.join(petsc.petsc_dir, 'lib','petsc','conf', 'variables'),
                                                       petsc.arch_path('lib','petsc','conf', 'petscvariables'),
                                                       ' '.join(os.path.join(petsc.pkg_dir, dep) for dep in petsc.gendeps)))
Beispiel #5
0
def pkgsources(pkg):
  '''
  Walks the source tree associated with 'pkg', analyzes the conditional written into the makefiles,
  and returns a list of sources associated with each unique conditional (as a dictionary).
  '''
  from distutils.sysconfig import parse_makefile
  autodirs = set('ftn-auto ftn-custom f90-custom'.split()) # Automatically recurse into these, if they exist
  skipdirs = set('examples benchmarks'.split())            # Skip these during the build
  def compareDirLists(mdirs,dirs):
    smdirs = set(mdirs)
    sdirs  = set(dirs).difference(autodirs)
    if not smdirs.issubset(sdirs):
      MISTAKES.append('Makefile contains directory not on filesystem: %s: %r' % (root, sorted(smdirs - sdirs)))
    if not VERBOSE: return
    if smdirs != sdirs:
      from sys import stderr
      print >>stderr, ('Directory mismatch at %s:\n\t%s: %r\n\t%s: %r\n\t%s: %r'
                       % (root,
                          'in makefile   ',sorted(smdirs),
                          'on filesystem ',sorted(sdirs),
                          'symmetric diff',sorted(smdirs.symmetric_difference(sdirs))))
  def compareSourceLists(msources, files):
    smsources = set(msources)
    ssources  = set(f for f in files if os.path.splitext(f)[1] in ['.c', '.cxx', '.cc', '.cu', '.cpp', '.F'])
    if not smsources.issubset(ssources):
      MISTAKES.append('Makefile contains file not on filesystem: %s: %r' % (root, sorted(smsources - ssources)))
    if not VERBOSE: return
    if smsources != ssources:
      from sys import stderr
      print >>stderr, ('Source mismatch at %s:\n\t%s: %r\n\t%s: %r\n\t%s: %r'
                       % (root,
                          'in makefile   ',sorted(smsources),
                          'on filesystem ',sorted(ssources),
                          'symmetric diff',sorted(smsources.symmetric_difference(ssources))))
  allconditions = defaultdict(set)
  sources = defaultdict(deque)
  for root,dirs,files in os.walk(os.path.join('src',pkg)):
    conditions = allconditions[os.path.dirname(root)].copy()
    makefile = os.path.join(root,'makefile')
    if not os.path.exists(makefile):
      continue
    makevars = parse_makefile(makefile)
    mdirs = makevars.get('DIRS','').split() # Directories specified in the makefile
    compareDirLists(mdirs,dirs) # diagnostic output to find unused directories
    candidates = set(mdirs).union(autodirs).difference(skipdirs)
    dirs[:] = list(candidates.intersection(dirs))
    lines = open(makefile)
    def stripsplit(line):
      return filter(lambda c: c!="'", line[len('#requires'):]).split()
    conditions.update(set(tuple(stripsplit(line)) for line in lines if line.startswith('#requires')))
    lines.close()
    def relpath(filename):
      return os.path.join(root,filename)
    sourcecu = makevars.get('SOURCECU','').split()
    sourcec = makevars.get('SOURCEC','').split()
    sourcef = makevars.get('SOURCEF','').split()
    compareSourceLists(sourcec+sourcef+sourcecu, files) # Diagnostic output about unused source files
    sources[repr(sorted(conditions))].extend(relpath(f) for f in sourcec + sourcef + sourcecu)
    allconditions[root] = conditions
  return sources
Beispiel #6
0
def pkgsources(pkg, mistakes):
  '''
  Walks the source tree associated with 'pkg', analyzes the conditional written into the makefiles,
  and returns a list of sources associated with each unique conditional (as a dictionary).
  '''
  from distutils.sysconfig import parse_makefile
  allconditions = defaultdict(set)
  sources = defaultdict(deque)
  for root,dirs,files in os.walk(os.path.join('src',pkg)):
    conditions = allconditions[os.path.dirname(root)].copy()
    makefile = os.path.join(root,'makefile')
    if not os.path.exists(makefile):
      continue
    makevars = parse_makefile(makefile)
    mdirs = makevars.get('DIRS','').split() # Directories specified in the makefile
    mistakes.compareDirLists(root,mdirs,dirs) # diagnostic output to find unused directories
    candidates = set(mdirs).union(AUTODIRS).difference(SKIPDIRS)
    dirs[:] = list(candidates.intersection(dirs))
    lines = open(makefile)
    conditions.update(set(tuple(stripsplit(line)) for line in lines if line.startswith('#requires')))
    lines.close()
    def relpath(filename):
      return os.path.join(root,filename)
    sourcecu = makevars.get('SOURCECU','').split()
    sourcec = makevars.get('SOURCEC','').split()
    sourcecxx = makevars.get('SOURCECXX','').split()
    sourcef = makevars.get('SOURCEF','').split()
    mistakes.compareSourceLists(root,sourcec+sourcecxx+sourcef+sourcecu, files) # Diagnostic output about unused source files
    sources[repr(sorted(conditions))].extend(relpath(f) for f in sourcec + sourcecxx + sourcef + sourcecu)
    allconditions[root] = conditions
  return sources
Beispiel #7
0
    def gen_pkg(self, pkg):
        pkgsrcs = dict()
        for lang in LANGS:
            pkgsrcs[lang] = []
        for root, dirs, files in os.walk(os.path.join(self.petsc_dir, "src", pkg)):
            makefile = os.path.join(root, "makefile")
            if not os.path.exists(makefile):
                dirs[:] = []
                continue
            mklines = open(makefile)
            conditions = set(tuple(stripsplit(line)) for line in mklines if line.startswith("#requires"))
            mklines.close()
            if not all(self.inconf(key, val) for key, val in conditions):
                dirs[:] = []
                continue
            makevars = parse_makefile(makefile)
            mdirs = makevars.get("DIRS", "").split()  # Directories specified in the makefile
            self.mistakes.compareDirLists(root, mdirs, dirs)  # diagnostic output to find unused directories
            candidates = set(mdirs).union(AUTODIRS).difference(SKIPDIRS)
            dirs[:] = list(candidates.intersection(dirs))
            allsource = []

            def mkrel(src):
                return self.relpath(root, src)

            source = self.get_sources(makevars)
            for lang, s in source.items():
                pkgsrcs[lang] += map(mkrel, s)
                allsource += s
            self.mistakes.compareSourceLists(root, allsource, files)  # Diagnostic output about unused source files
            self.gendeps.append(self.relpath(root, "makefile"))
        return pkgsrcs
def read_setup_file (filename):
    from distutils.sysconfig import \
         parse_makefile, expand_makefile_vars, _variable_rx
    from distutils.text_file import TextFile
    from distutils.util import split_quoted

    # First pass over the file to gather "VAR = VALUE" assignments.
    vars = parse_makefile(filename)

    # Second pass to gobble up the real content: lines of the form
    #   <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
    file = TextFile(filename,
                    strip_comments=1, skip_blanks=1, join_lines=1,
                    lstrip_ws=1, rstrip_ws=1)
    extensions = []

    while 1:
        line = file.readline()
        if line is None:                # eof
            break
        if _variable_rx.match(line):    # VAR=VALUE, handled in first pass
            continue

        if line[0] == line[-1] == "*":
            file.warn("'%s' lines not handled yet" % line)
            continue

        #print "original line: " + line
        line = expand_makefile_vars(line, vars)
        words = split_quoted(line)
        #print "expanded line: " + line

        # NB. this parses a slightly different syntax than the old
        # makesetup script: here, there must be exactly one extension per
        # line, and it must be the first word of the line.  I have no idea
        # why the old syntax supported multiple extensions per line, as
        # they all wind up being the same.

        module = words[0]
        ext = Extension(module, [])
        append_next_word = None

        for word in words[1:]:
            if append_next_word is not None:
                append_next_word.append(word)
                append_next_word = None
                continue

            suffix = os.path.splitext(word)[1]
            switch = word[0:2] ; value = word[2:]

            if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
                # hmm, should we do something about C vs. C++ sources?
                # or leave it up to the CCompiler implementation to
                # worry about?
                ext.sources.append(word)
            elif switch == "-I":
                ext.include_dirs.append(value)
            elif
Beispiel #9
0
def _guess_toolchain_kwargs_from_python_config():
    def strip_prefix(pfx, value):
        if value.startswith(pfx):
            return value[len(pfx):]
        else:
            return value

    from distutils.sysconfig import parse_makefile, get_makefile_filename
    make_vars = parse_makefile(get_makefile_filename())

    cc_cmdline = (make_vars["CXX"].split()
            + make_vars["CFLAGS"].split()
            + make_vars["CFLAGSFORSHARED"].split())
    object_suffix = '.' + make_vars['MODOBJS'].split()[0].split('.')[1]
    from os.path import join

    cflags = []
    defines = []
    undefines = []

    for cflag in cc_cmdline[1:]:
        if cflag.startswith("-D"):
            defines.append(cflag[2:])
        elif cflag.startswith("-U"):
            undefines.append(cflag[2:])
        else:
            cflags.append(cflag)

    # on Mac OS X, "libraries" can also be "frameworks"
    libraries = []
    for lib in make_vars["LIBS"].split():
        if lib.startswith("-l"):
            libraries.append(strip_prefix("-l", lib))
        else:
            cflags.append(lib)

    # need to add a workaround for bug at
    # http://bugs.python.org/issue3588
    if "PYTHONFRAMEWORKPREFIX" in make_vars:
        cflags.append("-F"+make_vars["PYTHONFRAMEWORKPREFIX"])

    return dict(
            cc=cc_cmdline[0],
            ld=make_vars["LDSHARED"].split()[0],
            cflags=cflags,
            ldflags=(
                make_vars["LDSHARED"].split()[1:]
                + make_vars["LINKFORSHARED"].split()
                ),
            libraries=libraries,
            include_dirs=[
                make_vars["INCLUDEPY"]
                ],
            library_dirs=[make_vars["LIBDIR"]],
            so_ext=make_vars["SO"],
            o_ext=object_suffix,
            defines=defines,
            undefines=undefines,
            )
Beispiel #10
0
def find_conf(ctx, base, inc_dirs, lib_dirs, libs, extra_libs):
    # PETSc 3.1
    conf_path = os.path.join(base, 'conf', 'petscvariables')
    if os.path.exists(conf_path):
        parse_conf(ctx, conf_path, lib_dirs, libs)

    # PETSC 2.3.3
    conf_path = os.path.join(base, 'bmake', 'petscconf')
    if os.path.exists(conf_path):
        vars = {}
        sysconfig.parse_makefile(conf_path, vars)
        if 'PETSC_ARCH' in vars:
            arch = vars['PETSC_ARCH']
            inc_dirs.extend([os.path.join(base, 'bmake', arch)])
            lib_dirs.extend([os.path.join(base, 'lib', arch)])
            conf_path = os.path.join(base, 'bmake', arch, 'petscconf')
            parse_conf(ctx, conf_path, lib_dirs, libs)
Beispiel #11
0
 def test_parse_makefile_literal_dollar(self):
     self.makefile = TESTFN
     fd = open(self.makefile, "w")
     fd.write(r"CONFIG_ARGS=  '--arg1=optarg1' 'ENV=\$$LIB'" "\n")
     fd.write("VAR=$OTHER\nOTHER=foo")
     fd.close()
     d = sysconfig.parse_makefile(self.makefile)
     self.assertEquals(d, {"CONFIG_ARGS": r"'--arg1=optarg1' 'ENV=\$LIB'", "OTHER": "foo"})
 def test_parse_makefile_base(self):
     self.makefile = TESTFN
     fd = open(self.makefile, 'w')
     fd.write(r"CONFIG_ARGS=  '--arg1=optarg1' 'ENV=LIB'" '\n')
     fd.write('VAR=$OTHER\nOTHER=foo')
     fd.close()
     d = sysconfig.parse_makefile(self.makefile)
     self.assertEquals(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'",
                           'OTHER': 'foo'})
 def test_parse_makefile_literal_dollar(self):
     self.makefile = test.test_support.TESTFN
     fd = open(self.makefile, 'w')
     fd.write(r"CONFIG_ARGS=  '--arg1=optarg1' 'ENV=\$$LIB'" '\n')
     fd.write('VAR=$OTHER\nOTHER=foo')
     fd.close()
     d = sysconfig.parse_makefile(self.makefile)
     self.assertEquals(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'",
                           'OTHER': 'foo'})
 def test_parse_makefile_base(self):
     self.makefile = test.test_support.TESTFN
     fd = open(self.makefile, "w")
     try:
         fd.write(r"CONFIG_ARGS=  '--arg1=optarg1' 'ENV=LIB'" "\n")
         fd.write("VAR=$OTHER\nOTHER=foo")
     finally:
         fd.close()
     d = sysconfig.parse_makefile(self.makefile)
     self.assertEqual(d, {"CONFIG_ARGS": "'--arg1=optarg1' 'ENV=LIB'", "OTHER": "foo"})
 def read_conf(self):
     self.conf = dict()
     for line in open(self.petsc_path('include', 'petscconf.h')):
         if line.startswith('#define '):
             define = line[len('#define '):]
             space = define.find(' ')
             key = define[:space]
             val = define[space+1:]
             self.conf[key] = val
     self.conf.update(parse_makefile(self.petsc_path('conf', 'petscvariables')))
     for line in open(self.arch_path('include', 'slepcconf.h')):
         if line.startswith('#define '):
             define = line[len('#define '):]
             space = define.find(' ')
             key = define[:space]
             val = define[space+1:]
             self.conf[key] = val
     self.conf.update(parse_makefile(self.arch_path('conf', 'slepcvariables')))
     self.have_fortran = int(self.conf.get('PETSC_HAVE_FORTRAN', '0'))
    def get_flags_linker_so(self):
        opt = self.linker_so[1:]
        if sys.platform == 'darwin':
            target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
            # If MACOSX_DEPLOYMENT_TARGET is set, we simply trust the value
            # and leave it alone.  But, distutils will complain if the
            # environment's value is different from the one in the Python
            # Makefile used to build Python.  We let disutils handle this
            # error checking.
            if not target:
                # If MACOSX_DEPLOYMENT_TARGET is not set in the environment,
                # we try to get it first from the Python Makefile and then we
                # fall back to setting it to 10.3 to maximize the set of
                # versions we can work with.  This is a reasonable default
                # even when using the official Python dist and those derived
                # from it.
                import distutils.sysconfig as sc
                g = {}
                try:
                    get_makefile_filename = sc.get_makefile_filename
                except AttributeError:
                    pass  # i.e. PyPy
                else:
                    filename = get_makefile_filename()
                    sc.parse_makefile(filename, g)
                target = g.get('MACOSX_DEPLOYMENT_TARGET', '10.3')
                os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
                if target == '10.3':
                    s = 'Env. variable MACOSX_DEPLOYMENT_TARGET set to 10.3'
                    warnings.warn(s, stacklevel=2)

            opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])
        else:
            opt.append("-shared")
        if sys.platform.startswith('sunos'):
            # SunOS often has dynamically loaded symbols defined in the
            # static library libg2c.a  The linker doesn't like this.  To
            # ignore the problem, use the -mimpure-text flag.  It isn't
            # the safest thing, but seems to work. 'man gcc' says:
            # ".. Instead of using -mimpure-text, you should compile all
            #  source code with -fpic or -fPIC."
            opt.append('-mimpure-text')
        return opt
 def get_srcdir(self, command_info, mfname):
     if 'srcdir' in command_info:
         return command_info['srcdir']
     elif os.path.exists(mfname):
         return '.'
     elif os.path.exists('Makefile'):
         mf = parse_makefile('Makefile')
         return expand_makefile_vars('$(srcdir)', mf)
     else:
         return re.sub('/[^/]*/build/', '/src/', os.getcwd())
Beispiel #18
0
    def get_flags_linker_so(self):
        opt = self.linker_so[1:]
        if sys.platform == 'darwin':
            target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
            # If MACOSX_DEPLOYMENT_TARGET is set, we simply trust the value
            # and leave it alone.  But, distutils will complain if the
            # environment's value is different from the one in the Python
            # Makefile used to build Python.  We let disutils handle this
            # error checking.
            if not target:
                # If MACOSX_DEPLOYMENT_TARGET is not set in the environment,
                # we try to get it first from the Python Makefile and then we
                # fall back to setting it to 10.3 to maximize the set of
                # versions we can work with.  This is a reasonable default
                # even when using the official Python dist and those derived
                # from it.
                import distutils.sysconfig as sc
                g = {}
                try:
                    get_makefile_filename = sc.get_makefile_filename
                except AttributeError:
                    pass # i.e. PyPy
                else: 
                    filename = get_makefile_filename()
                    sc.parse_makefile(filename, g)
                target = g.get('MACOSX_DEPLOYMENT_TARGET', '10.3')
                os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
                if target == '10.3':
                    s = 'Env. variable MACOSX_DEPLOYMENT_TARGET set to 10.3'
                    warnings.warn(s, stacklevel=2)

            opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])
        else:
            opt.append("-shared")
        if sys.platform.startswith('sunos'):
            # SunOS often has dynamically loaded symbols defined in the
            # static library libg2c.a  The linker doesn't like this.  To
            # ignore the problem, use the -mimpure-text flag.  It isn't
            # the safest thing, but seems to work. 'man gcc' says:
            # ".. Instead of using -mimpure-text, you should compile all
            #  source code with -fpic or -fPIC."
            opt.append('-mimpure-text')
        return opt
Beispiel #19
0
 def __init__(self, petsc_dir=None, petsc_arch=None, pkg_dir=None, pkg_name=None, pkg_arch=None, pkg_pkgs=None, verbose=False):
     if petsc_dir is None:
         petsc_dir = os.environ.get('PETSC_DIR')
         if petsc_dir is None:
             try:
                 petsc_dir = parse_makefile(os.path.join('lib','petsc','conf', 'petscvariables')).get('PETSC_DIR')
             finally:
                 if petsc_dir is None:
                     raise RuntimeError('Could not determine PETSC_DIR, please set in environment')
     if petsc_arch is None:
         petsc_arch = os.environ.get('PETSC_ARCH')
         if petsc_arch is None:
             try:
                 petsc_arch = parse_makefile(os.path.join(petsc_dir, 'lib','petsc','conf', 'petscvariables')).get('PETSC_ARCH')
             finally:
                 if petsc_arch is None:
                     raise RuntimeError('Could not determine PETSC_ARCH, please set in environment')
     self.petsc_dir = os.path.normpath(petsc_dir)
     self.petsc_arch = petsc_arch.rstrip(os.sep)
     self.pkg_dir = pkg_dir
     self.pkg_name = pkg_name
     self.pkg_arch = pkg_arch
     if self.pkg_dir is None:
       self.pkg_dir = petsc_dir
       self.pkg_name = 'petsc'
       self.pkg_arch = self.petsc_arch
     if self.pkg_name is None:
       self.pkg_name = os.path.basename(os.path.normpath(self.pkg_dir))
     if self.pkg_arch is None:
       self.pkg_arch = self.petsc_arch
     self.pkg_pkgs = PetscPKGS
     if pkg_pkgs is not None:
       self.pkg_pkgs += list(set(pkg_pkgs.split(','))-set(self.pkg_pkgs))
     self.read_conf()
     try:
         logging.basicConfig(filename=self.pkg_arch_path('lib',self.pkg_name,'conf', 'gmake.log'), level=logging.DEBUG)
     except IOError:
         # Disable logging if path is not writeable (e.g., prefix install)
         logging.basicConfig(filename='/dev/null', level=logging.DEBUG)
     self.log = logging.getLogger('gmakegen')
     self.mistakes = Mistakes(debuglogger(self.log), verbose=verbose)
     self.gendeps = []
Beispiel #20
0
 def _extras_paths(cls):
   standard_lib = sysconfig.get_python_lib(standard_lib=True)
   try:
     makefile = sysconfig.parse_makefile(sysconfig.get_makefile_filename())
   except (AttributeError, IOError):
     # This is not available by default in PyPy's distutils.sysconfig or it simply is
     # no longer available on the system (IOError ENOENT)
     makefile = {}
   extras_paths = filter(None, makefile.get('EXTRASPATH', '').split(':'))
   for path in extras_paths:
     yield os.path.join(standard_lib, path)
def main(outdir, parameters=None):
    legend_fontsize = 30
    legend_handlelength = 0.3
    if parameters is not None:
        params = parse_makefile(parameters)

    # set plotting params
    axlabel_fontsize = 35
    ax_ticksize = 35
    t_final = 10

    width = 28
    height = 9

    # set up subplots
    gs = gridspec.GridSpec(1, 4, width_ratios=[1.08, 0.05, 1, 1])

    fig = plt.figure(figsize=(width, height))
    ax = [None, None, None]
    ax[0] = fig.add_subplot(gs[0])
    ax[1] = fig.add_subplot(gs[2])
    ax[2] = fig.add_subplot(gs[3], sharey=ax[1])

    # draw plots
    plot_heatmap(axis=ax[0],
                 axlabel_fontsize=axlabel_fontsize,
                 contour_linewidth=5,
                 contour_fontsize=30)

    # style whole plot some more

    # no zeroth tick, for clash avoidance:

    y_max = max([ax[axis_no].get_ylim()[1] for axis_no in [1, 2]])
    for axis in [ax[1], ax[2]]:
        axis.set_xlim([0, t_final])
        axis.set_xlabel("Time (days)", fontsize=axlabel_fontsize)

    ax[1].set_ylabel("free virions")
    ax[1].set_ylabel("free virions")
    plt.setp(ax[2].get_yticklabels(), visible=False)
    for axis in ax:
        labels = axis.get_yticklabels()
        labels[0].set_visible(False)

    leg_labels = [
        "Uninfected cells", "Wild-type virions", "Mutant virions",
        "Detection limit"
    ]
    ax[2].legend(labels=leg_labels,
                 fontsize=legend_fontsize,
                 handlelength=legend_handlelength)

    fig.savefig(outdir)
Beispiel #22
0
 def _extras_paths(cls):
   standard_lib = sysconfig.get_python_lib(standard_lib=True)
   try:
     makefile = sysconfig.parse_makefile(sysconfig.get_makefile_filename())
   except (AttributeError, IOError):
     # This is not available by default in PyPy's distutils.sysconfig or it simply is
     # no longer available on the system (IOError ENOENT)
     makefile = {}
   extras_paths = filter(None, makefile.get('EXTRASPATH', '').split(':'))
   for path in extras_paths:
     yield os.path.join(standard_lib, path)
Beispiel #23
0
 def read_conf(self):
     self.conf = dict()
     for line in open(self.arch_path('include', 'petscconf.h')):
         if line.startswith('#define '):
             define = line[len('#define '):]
             space = define.find(' ')
             key = define[:space]
             val = define[space+1:]
             self.conf[key] = val
     self.conf.update(parse_makefile(self.arch_path('lib','petsc','conf', 'petscvariables')))
     self.have_fortran = int(self.conf.get('PETSC_HAVE_FORTRAN', '0'))
Beispiel #24
0
 def test_parse_makefile_literal_dollar(self):
     self.makefile = test.test_support.TESTFN
     fd = open(self.makefile, 'w')
     fd.write(r"CONFIG_ARGS=  '--arg1=optarg1' 'ENV=\$$LIB'" '\n')
     fd.write('VAR=$OTHER\nOTHER=foo')
     fd.close()
     d = sysconfig.parse_makefile(self.makefile)
     self.assertEquals(d, {
         'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'",
         'OTHER': 'foo'
     })
Beispiel #25
0
 def read_conf(self):
     self.conf = dict()
     for line in open(self.arch_path("include", "petscconf.h")):
         if line.startswith("#define "):
             define = line[len("#define ") :]
             space = define.find(" ")
             key = define[:space]
             val = define[space + 1 :]
             self.conf[key] = val
     self.conf.update(parse_makefile(self.arch_path("lib", "petsc", "conf", "petscvariables")))
     self.have_fortran = int(self.conf.get("PETSC_HAVE_FORTRAN", "0"))
Beispiel #26
0
 def test_parse_makefile_base(self):
     self.makefile = TESTFN
     fd = open(self.makefile, 'w')
     try:
         fd.write(r"CONFIG_ARGS=  '--arg1=optarg1' 'ENV=LIB'" '\n')
         fd.write('VAR=$OTHER\nOTHER=foo')
     finally:
         fd.close()
     d = sysconfig.parse_makefile(self.makefile)
     self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'",
                          'OTHER': 'foo'})
def getMakeInfo():
    # NYI: This is Autotools-specific.
    makefile = parse_makefile("Makefile")
    info = {'package_name': makefile['PACKAGE_NAME'],
            'package': makefile['PACKAGE'],
            'version': makefile['VERSION'],
            'srcdir': makefile['top_srcdir'],
            'prefix': makefile['prefix'],
            'python_version': makefile['PYTHON_VERSION'],
            }
    return info
Beispiel #28
0
def getMakeInfo():
    # NYI: This is Autotools-specific.
    makefile = parse_makefile("Makefile")
    info = {
        "package_name": makefile["PACKAGE_NAME"],
        "package": makefile["PACKAGE"],
        "version": makefile["VERSION"],
        "srcdir": makefile["top_srcdir"],
        "prefix": makefile["prefix"],
        "python_version": makefile["PYTHON_VERSION"],
    }
    return info
    def test_parse_makefile_base(self):
        self.makefile = test.test_support.TESTFN
        fd = open(self.makefile, 'w')
        try:
            fd.write("CONFIG_ARGS=  '--arg1=optarg1' 'ENV=LIB'\n")
            fd.write('VAR=$OTHER\nOTHER=foo')
        finally:
            fd.close()

        d = sysconfig.parse_makefile(self.makefile)
        self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'",
         'OTHER': 'foo'})
Beispiel #30
0
def _guess_toolchain_kwargs_from_python_config():
    def strip_prefix(pfx, value):
        if value.startswith(pfx):
            return value[len(pfx):]
        else:
            return value

    from distutils.sysconfig import parse_makefile, get_makefile_filename
    make_vars = parse_makefile(get_makefile_filename())

    cc_cmdline = (make_vars["CXX"].split() + make_vars["CFLAGS"].split() +
                  make_vars["CFLAGSFORSHARED"].split())
    object_names = [
        oname for oname in make_vars['OBJECT_OBJS'].split()
        if "(" not in oname and ")" not in oname
    ]

    object_suffix = '.' + object_names[0].split('.')[1]

    cflags = []
    defines = []
    undefines = []

    for cflag in cc_cmdline[1:]:
        if cflag.startswith("-D"):
            defines.append(cflag[2:])
        elif cflag.startswith("-U"):
            undefines.append(cflag[2:])
        else:
            cflags.append(cflag)

    # on Mac OS X, "libraries" can also be "frameworks"
    libraries = []
    for lib in make_vars["LIBS"].split():
        if lib.startswith("-l"):
            libraries.append(strip_prefix("-l", lib))
        else:
            cflags.append(lib)

    return dict(
        cc=cc_cmdline[0],
        ld=make_vars["LDSHARED"].split()[0],
        cflags=cflags,
        ldflags=(make_vars["LDSHARED"].split()[1:] +
                 make_vars["LINKFORSHARED"].split()),
        libraries=libraries,
        include_dirs=[make_vars["INCLUDEPY"]],
        library_dirs=[make_vars["LIBDIR"]],
        so_ext=make_vars["SO"] if 'SO' in make_vars else '.so',
        o_ext=object_suffix,
        defines=defines,
        undefines=undefines,
    )
Beispiel #31
0
def _autotools_dist_builder(module):
    source_dir = module.get_source_dir()

    os.chdir(source_dir)
    command.run(["make", "distcheck"])

    makefile = parse_makefile(os.path.join(source_dir, "Makefile"))
    tarball = "%s-%s.tar.xz" % (module.name, makefile["VERSION"])

    shutil.move(os.path.join(source_dir, tarball),
                os.path.join(config.get_dist_dir(), tarball))

    return True
Beispiel #32
0
 def __init__(self, petsc_dir=None, petsc_arch=None, verbose=False):
     if petsc_dir is None:
         petsc_dir = os.environ.get('PETSC_DIR')
         if petsc_dir is None:
             try:
                 petsc_dir = parse_makefile(
                     os.path.join('lib', 'petsc', 'conf',
                                  'petscvariables')).get('PETSC_DIR')
             finally:
                 if petsc_dir is None:
                     raise RuntimeError(
                         'Could not determine PETSC_DIR, please set in environment'
                     )
     if petsc_arch is None:
         petsc_arch = os.environ.get('PETSC_ARCH')
         if petsc_arch is None:
             try:
                 petsc_arch = parse_makefile(
                     os.path.join(petsc_dir, 'lib', 'petsc', 'conf',
                                  'petscvariables')).get('PETSC_ARCH')
             finally:
                 if petsc_arch is None:
                     raise RuntimeError(
                         'Could not determine PETSC_ARCH, please set in environment'
                     )
     self.petsc_dir = petsc_dir
     self.petsc_arch = petsc_arch
     self.read_conf()
     try:
         logging.basicConfig(filename=self.arch_path(
             'lib', 'petsc', 'conf', 'gmake.log'),
                             level=logging.DEBUG)
     except IOError:
         # Disable logging if path is not writeable (e.g., prefix install)
         logging.basicConfig(filename='/dev/null', level=logging.DEBUG)
     self.log = logging.getLogger('gmakegen')
     self.mistakes = Mistakes(debuglogger(self.log), verbose=verbose)
     self.gendeps = []
 def __init__(self, petsc_dir=None, petsc_arch=None, verbose=False):
     if petsc_dir is None:
         petsc_dir = os.environ.get('PETSC_DIR')
         if petsc_dir is None:
             try:
                 petsc_dir = parse_makefile(os.path.join('conf', 'petscvariables')).get('PETSC_DIR')
             finally:
                 if petsc_dir is None:
                     raise RuntimeError('Could not determine PETSC_DIR, please set in environment')
     if petsc_arch is None:
         petsc_arch = os.environ.get('PETSC_ARCH')
         if petsc_arch is None:
             try:
                 petsc_arch = parse_makefile(os.path.join(petsc_dir, 'conf', 'petscvariables')).get('PETSC_ARCH')
             finally:
                 if petsc_arch is None:
                     raise RuntimeError('Could not determine PETSC_ARCH, please set in environment')
     self.petsc_dir = petsc_dir
     self.petsc_arch = petsc_arch
     self.read_conf()
     logging.basicConfig(filename=self.arch_path('conf', 'gmake.log'), level=logging.DEBUG)
     self.log = logging.getLogger('gmakegen')
     self.mistakes = Mistakes(debuglogger(self.log), verbose=verbose)
     self.gendeps = []
Beispiel #34
0
def _guess_toolchain_kwargs_from_python_config():
    def strip_prefix(pfx, value):
        if value.startswith(pfx):
            return value[len(pfx):]
        else:
            return value

    from distutils.sysconfig import parse_makefile, get_makefile_filename
    make_vars = parse_makefile(get_makefile_filename())

    cc_cmdline = (make_vars["CXX"].split()
            + make_vars["CFLAGS"].split()
            + make_vars["CFLAGSFORSHARED"].split())
    object_suffix = '.' + make_vars['MODOBJS'].split()[0].split('.')[1]
    from os.path import join

    cflags = []
    defines = []
    undefines = []

    for cflag in cc_cmdline[1:]:
        if cflag.startswith("-D"):
            defines.append(cflag[2:])
        elif cflag.startswith("-U"):
            undefines.append(cflag[2:])
        else:
            cflags.append(cflag)

    return dict(
            cc=cc_cmdline[0],
            ld=make_vars["LDSHARED"].split()[0],
            cflags=cflags,
            ldflags=(
                make_vars["LDSHARED"].split()[1:]
                + make_vars["LINKFORSHARED"].split()
                ),
            libraries=[strip_prefix("-l", lib)
                for lib in make_vars["LIBS"].split()],
            include_dirs=[
                make_vars["INCLUDEPY"]
                ],
            library_dirs=[make_vars["LIBDIR"]],
            so_ext=make_vars["SO"],
            o_ext=object_suffix,
            defines=defines,
            undefines=undefines,
            )
    def get_contact(self, mf):
        next_dir = os.getcwd()
        while mf is not None:
            if 'WATCHERS' in mf:
                return expand_makefile_vars('$(WATCHERS)', mf)
            elif next_dir is None:
                break
            mfname = os.path.join(next_dir, 'Makefile')
            if os.path.exists(mfname):
                mf = parse_makefile(mfname)
            else:
                break
            if next_dir == '/':
                next_dir = None
            else:
                next_dir = os.path.dirname(next_dir)

        return '-'
Beispiel #36
0
def _guess_toolchain_kwargs_from_python_config():
    def strip_prefix(pfx, value):
        if value.startswith(pfx):
            return value[len(pfx):]
        else:
            return value

    from distutils.sysconfig import parse_makefile, get_makefile_filename
    make_vars = parse_makefile(get_makefile_filename())

    cc_cmdline = (make_vars["CXX"].split() + make_vars["CFLAGS"].split() +
                  make_vars["CFLAGSFORSHARED"].split())
    object_suffix = '.' + make_vars['MODOBJS'].split()[0].split('.')[1]
    from os.path import join

    cflags = []
    defines = []
    undefines = []

    for cflag in cc_cmdline[1:]:
        if cflag.startswith("-D"):
            defines.append(cflag[2:])
        elif cflag.startswith("-U"):
            undefines.append(cflag[2:])
        else:
            cflags.append(cflag)

    return dict(
        cc=cc_cmdline[0],
        ld=make_vars["LDSHARED"].split()[0],
        cflags=cflags,
        ldflags=(make_vars["LDSHARED"].split()[1:] +
                 make_vars["LINKFORSHARED"].split()),
        libraries=[
            strip_prefix("-l", lib) for lib in make_vars["LIBS"].split()
        ],
        include_dirs=[make_vars["INCLUDEPY"]],
        library_dirs=[make_vars["LIBDIR"]],
        so_ext=make_vars["SO"],
        o_ext=object_suffix,
        defines=defines,
        undefines=undefines,
    )
Beispiel #37
0
def pkgsources(pkg, mistakes):
    '''
  Walks the source tree associated with 'pkg', analyzes the conditional written into the makefiles,
  and returns a list of sources associated with each unique conditional (as a dictionary).
  '''
    from distutils.sysconfig import parse_makefile
    allconditions = defaultdict(set)
    sources = defaultdict(deque)
    for root, dirs, files in os.walk(os.path.join('src', pkg)):
        dirs.sort()
        files.sort()
        conditions = allconditions[os.path.dirname(root)].copy()
        makefile = os.path.join(root, 'makefile')
        if not os.path.exists(makefile):
            continue
        makevars = parse_makefile(makefile)
        mdirs = makevars.get(
            'DIRS', '').split()  # Directories specified in the makefile
        mistakes.compareDirLists(
            root, mdirs, dirs)  # diagnostic output to find unused directories
        candidates = set(mdirs).union(AUTODIRS).difference(SKIPDIRS)
        dirs[:] = list(candidates.intersection(dirs))
        lines = open(makefile)
        conditions.update(
            set(
                tuple(stripsplit(line)) for line in lines
                if line.startswith('#requires')))
        lines.close()

        def relpath(filename):
            return os.path.join(root, filename)

        sourcecu = makevars.get('SOURCECU', '').split()
        sourcec = makevars.get('SOURCEC', '').split()
        sourcecxx = makevars.get('SOURCECXX', '').split()
        sourcef = makevars.get('SOURCEF', '').split()
        mistakes.compareSourceLists(
            root, sourcec + sourcecxx + sourcef + sourcecu,
            files)  # Diagnostic output about unused source files
        sources[repr(sorted(conditions))].extend(
            relpath(f) for f in sourcec + sourcecxx + sourcef + sourcecu)
        allconditions[root] = conditions
    return sources
Beispiel #38
0
    def gen_pkg(self, pkg):
        pkgsrcs = dict()
        for lang in LANGS:
            pkgsrcs[lang] = []
        for root, dirs, files in os.walk(os.path.join(self.pkg_dir, 'src',
                                                      pkg)):
            dirs.sort()
            files.sort()
            makefile = os.path.join(root, 'makefile')
            if not os.path.exists(makefile):
                dirs[:] = []
                continue
            mklines = open(makefile)
            conditions = set(
                tuple(stripsplit(line)) for line in mklines
                if line.startswith('#requires'))
            mklines.close()
            if not all(self.inconf(key, val) for key, val in conditions):
                dirs[:] = []
                continue
            makevars = parse_makefile(makefile)
            mdirs = makevars.get(
                'DIRS', '').split()  # Directories specified in the makefile
            self.mistakes.compareDirLists(
                root, mdirs,
                dirs)  # diagnostic output to find unused directories
            candidates = set(mdirs).union(AUTODIRS).difference(SKIPDIRS)
            dirs[:] = list(candidates.intersection(dirs))
            allsource = []

            def mkrel(src):
                return self.relpath(root, src)

            source = self.get_sources(makevars)
            for lang, s in source.items():
                pkgsrcs[lang] += [mkrel(t) for t in s]
                allsource += s
            self.mistakes.compareSourceLists(
                root, allsource,
                files)  # Diagnostic output about unused source files
            self.gendeps.append(self.relpath(root, 'makefile'))
        return pkgsrcs
Beispiel #39
0
  def _extras_paths(cls):
    standard_lib = sysconfig.get_python_lib(standard_lib=True)

    try:
      makefile = sysconfig.parse_makefile(sysconfig.get_makefile_filename())
    except (AttributeError, IOError):
      # This is not available by default in PyPy's distutils.sysconfig or it simply is
      # no longer available on the system (IOError ENOENT)
      makefile = {}

    extras_paths = filter(None, makefile.get('EXTRASPATH', '').split(':'))
    for path in extras_paths:
      yield os.path.join(standard_lib, path)

    # Handle .pth injected paths as extras.
    sitedirs = cls._get_site_packages()
    for pth_path in cls._scan_pth_files(sitedirs):
      TRACER.log('Found .pth file: %s' % pth_path, V=3)
      for extras_path in iter_pth_paths(pth_path):
        yield extras_path
Beispiel #40
0
  def _extras_paths(cls):
    standard_lib = sysconfig.get_python_lib(standard_lib=True)

    try:
      makefile = sysconfig.parse_makefile(sysconfig.get_makefile_filename())
    except (AttributeError, IOError):
      # This is not available by default in PyPy's distutils.sysconfig or it simply is
      # no longer available on the system (IOError ENOENT)
      makefile = {}

    extras_paths = filter(None, makefile.get('EXTRASPATH', '').split(':'))
    for path in extras_paths:
      yield os.path.join(standard_lib, path)

    # Handle .pth injected paths as extras.
    sitedirs = cls._get_site_packages()
    for pth_path in cls._scan_pth_files(sitedirs):
      TRACER.log('Found .pth file: %s' % pth_path, V=3)
      for extras_path in iter_pth_paths(pth_path):
        yield extras_path
Beispiel #41
0
def _distribute_autotools(module):
    makefile = parse_makefile("Makefile")
    filename = makefile["DIST_ARCHIVES"]
    version = makefile["VERSION"]

    git_module = git.get_module(module)

    version_revision = None
    description = git_module.describe()
    if description != "v%s" % version:
        match = re.match(r"(v[\d\.]+)", description)
        if match is None:
            print("No version tag was found")
            return False
        else:
            version_revision = match.groups()[0]

    if version_revision is not None:
        git_module.checkout(version_revision)

    return command.run(["make", "distcheck"])
def generate_rb(config, glob, common_vars):
    mk_vars = parse_makefile("{GLOB_DIR}/tstfiles.mk".format(**common_vars))
    basenames = [v[:-4] for v in mk_vars["TEST_RES_SOURCE"].split()]
    basenames += [
        "casing",
        "mc",
        "root",
        "sh",
        "sh_YU",
        "te",
        "te_IN",
        "te_IN_REVISED",
        "testtypes",
        "testaliases",
        "testempty",
        "structLocale",
        "idna_rules",
        "conversion",
        "icuio",
        # "metaZones",
        # "timezoneTypes",
        # "windowsZones",
    ]
    return [
        # Inference rule for creating resource bundles
        # Some test data resource bundles are known to have warnings and bad data.
        # The -q option is there on purpose, so we don't see it normally.
        # TODO: Use option -k?
        RepeatedExecutionRequest(
            name = "testrb",
            category = "tests",
            input_files = [InFile("%s.txt" % bn) for bn in basenames],
            output_files = [OutFile("%s.res" % bn) for bn in basenames],
            tool = IcuTool("genrb"),
            args = "-q -s {IN_DIR} -d {OUT_DIR} {INPUT_FILE}",
            format_with = {},
            repeat_with = {}
        ),
        # Other standalone res files
        SingleExecutionRequest(
            name = "encoded",
            category = "tests",
            input_files = [InFile("encoded.utf16be")],
            output_files = [OutFile("encoded.res")],
            tool = IcuTool("genrb"),
            args = "-s {IN_DIR} -eUTF-16BE -d {OUT_DIR} {INPUT_FILES[0]}",
            format_with = {}
        ),
        SingleExecutionRequest(
            name = "zoneinfo64",
            category = "tests",
            input_files = [InFile("zoneinfo64.txt")],
            output_files = [TmpFile("zoneinfo64.res")],
            tool = IcuTool("genrb"),
            args = "-s {IN_DIR} -d {TMP_DIR} {INPUT_FILES[0]}",
            format_with = {}
        ),
        SingleExecutionRequest(
            name = "filtertest",
            category = "tests",
            input_files = [InFile("filtertest.txt")],
            output_files = [OutFile("filtertest.res")],
            tool = IcuTool("genrb"),
            args = "-s {IN_DIR} -d {OUT_DIR} -i {OUT_DIR} "
                "--filterDir {IN_DIR}/filters filtertest.txt",
            format_with = {}
        )
    ]
def read_setup_file(filename):
    """Reads a Setup file and returns Extension instances."""
    from distutils.sysconfig import (parse_makefile, expand_makefile_vars,
                                     _variable_rx)

    from distutils.text_file import TextFile
    from distutils.util import split_quoted

    # First pass over the file to gather "VAR = VALUE" assignments.
    vars = parse_makefile(filename)

    # Second pass to gobble up the real content: lines of the form
    #   <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
    file = TextFile(filename,
                    strip_comments=1, skip_blanks=1, join_lines=1,
                    lstrip_ws=1, rstrip_ws=1)
    extensions = []

    while True:
        line = file.readline()
        if line is None:                # eof
            break
        if _variable_rx.match(line):    # VAR=VALUE, handled in first pass
            continue

        if line[0] == line[-1] == "*":
            file.warn("'%s' lines not handled yet" % line)
            continue

        line = expand_makefile_vars(line, vars)
        words = split_quoted(line)

        # NB. this parses a slightly different syntax than the old
        # makesetup script: here, there must be exactly one extension per
        # line, and it must be the first word of the line.  I have no idea
        # why the old syntax supported multiple extensions per line, as
        # they all wind up being the same.

        module = words[0]
        ext = Extension(module, [])
        append_next_word = None

        for word in words[1:]:
            if append_next_word is not None:
                append_next_word.append(word)
                append_next_word = None
                continue

            suffix = os.path.splitext(word)[1]
            switch = word[0:2] ; value = word[2:]

            if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
                # hmm, should we do something about C vs. C++ sources?
                # or leave it up to the CCompiler implementation to
                # worry about?
                ext.sources.append(word)
            elif switch == "-I":
                ext.include_dirs.append(value)
            elif switch == "-D":
                equals = value.find("=")
                if equals == -1:        # bare "-DFOO" -- no value
                    ext.define_macros.append((value, None))
                else:                   # "-DFOO=blah"
                    ext.define_macros.append((value[0:equals],
                                              value[equals+2:]))
            elif switch == "-U":
                ext.undef_macros.append(value)
            elif switch == "-C":        # only here 'cause makesetup has it!
                ext.extra_compile_args.append(word)
            elif switch == "-l":
                ext.libraries.append(value)
            elif switch == "-L":
                ext.library_dirs.append(value)
            elif switch == "-R":
                ext.runtime_library_dirs.append(value)
            elif word == "-rpath":
                append_next_word = ext.runtime_library_dirs
            elif word == "-Xlinker":
                append_next_word = ext.extra_link_args
            elif word == "-Xcompiler":
                append_next_word = ext.extra_compile_args
            elif switch == "-u":
                ext.extra_link_args.append(word)
                if not value:
                    append_next_word = ext.extra_link_args
            elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
                # NB. a really faithful emulation of makesetup would
                # append a .o file to extra_objects only if it
                # had a slash in it; otherwise, it would s/.o/.c/
                # and append it to sources.  Hmmmm.
                ext.extra_objects.append(word)
            else:
                file.warn("unrecognized argument '%s'" % word)

        extensions.append(ext)

    return extensions
Beispiel #44
0
def main():
    if sys.argv[1] == "Debug":
        print("OpenSSL debug builds aren't supported.")
    elif sys.argv[1] != "Release":
        raise ValueError('Unrecognized configuration: %s' % sys.argv[1])

    if sys.argv[2] == "Win32":
        platform = "VC-WIN32"
        suffix = '32'
    elif sys.argv[2] == "x64":
        platform = "VC-WIN64A"
        suffix = '64'
    else:
        raise ValueError('Unrecognized platform: %s' % sys.argv[2])

    # Have the distutils functions display information output
    log.set_verbosity(1)

    # Use the same properties that are used in the VS projects
    solution_dir = os.path.dirname(__file__)
    propfile = os.path.join(solution_dir, 'pyproject.vsprops')
    props = get_project_properties(propfile)

    # Ensure we have the necessary external depenedencies
    ssl_dir = os.path.join(solution_dir, props['opensslDir'])
    if not os.path.isdir(ssl_dir):
        print("Could not find the OpenSSL sources, try running "
              "'build.bat -e'")
        sys.exit(1)

    # Ensure the executables used herein are available.
    if not find_executable('nmake.exe'):
        print('Could not find nmake.exe, try running env.bat')
        sys.exit(1)

    # add our copy of NASM to PATH.  It will be on the same level as openssl
    externals_dir = os.path.join(solution_dir, props['externalsDir'])
    for dir in os.listdir(externals_dir):
        if dir.startswith('nasm'):
            nasm_dir = os.path.join(externals_dir, dir)
            nasm_dir = os.path.abspath(nasm_dir)
            old_path = os.environ['PATH']
            os.environ['PATH'] = os.pathsep.join([nasm_dir, old_path])
            break
    else:
        if not find_executable('nasm.exe'):
            print('Could not find nasm.exe, please add to PATH')
            sys.exit(1)

    # If the ssl makefiles do not exist, we invoke PCbuild/prepare_ssl.py
    # to generate them.
    platform_makefile = MK1MF_FMT.format(suffix)
    if not os.path.isfile(os.path.join(ssl_dir, platform_makefile)):
        pcbuild_dir = os.path.join(os.path.dirname(externals_dir), 'PCbuild')
        prepare_ssl = os.path.join(pcbuild_dir, 'prepare_ssl.py')
        rc = subprocess.call([sys.executable, prepare_ssl, ssl_dir])
        if rc:
            print('Executing', prepare_ssl, 'failed (error %d)' % rc)
            sys.exit(rc)

    old_cd = os.getcwd()
    try:
        os.chdir(ssl_dir)

        # Get the variables defined in the current makefile, if it exists.
        makefile = MK1MF_FMT.format('')
        try:
            makevars = parse_makefile(makefile)
        except EnvironmentError:
            makevars = {'PLATFORM': None}

        # Rebuild the makefile when building for different a platform than
        # the last run.
        if makevars['PLATFORM'] != platform:
            print("Updating the makefile...")
            sys.stdout.flush()
            # Firstly, apply the changes for the platform makefile into
            # a temporary file to prevent any errors from this script
            # causing false positives on subsequent runs.
            new_makefile = makefile + '.new'
            fix_makefile(new_makefile, platform_makefile, suffix)
            makevars = parse_makefile(new_makefile)

            # Secondly, perform the make recipes that use Perl
            copy_files(new_makefile, makevars)

            # Set our build information in buildinf.h.
            # XXX: This isn't needed for a properly "prepared" SSL, but
            # it fixes the current checked-in external (as of 2017-05).
            fix_buildinf(makevars)

            # Finally, move the temporary file to its real destination.
            if os.path.exists(makefile):
                os.remove(makefile)
            os.rename(new_makefile, makefile)

        # Now run make.
        makeCommand = "nmake /nologo /f \"%s\" lib" % makefile
        print("Executing ssl makefiles:", makeCommand)
        sys.stdout.flush()
        rc = os.system(makeCommand)
        if rc:
            print("Executing", makefile, "failed (error %d)" % rc)
            sys.exit(rc)
    finally:
        os.chdir(old_cd)
    sys.exit(rc)
Beispiel #45
0
def expand_addprefix(s):
    add_prefix =  re.compile(r'\$[\(\{]addprefix (.*?),(.*?)[\}\)]')
    try:
        m = add_prefix.search(s)
    except TypeError:
        return s
    if m:
        while m is not None:
            prefix, files = m.groups()
            s = add_prefix.sub(' '.join([prefix + f for f in files.split()]),s,1).strip()
            m = add_prefix.search(s)
    return s

atomeye_dir =  os.path.join(os.getcwd(), '..')
os.system('cd %s && make -p atomeyelib > %s/make.log' % (atomeye_dir, os.getcwd()))
makefile = parse_makefile('make.log')
os.unlink('make.log')

syslibs = makefile['ATOMEYE_LIBS']
syslibs = expand_addprefix(syslibs)
syslibs = expand_addsuffix(syslibs)
syslibs = syslibs.split()

atomeye_libdirs = [ f[2:] for f in syslibs if f.startswith('-L') ]
atomeye_internal_libs = ['%s/lib%s.a' % (os.path.join(atomeye_dir, 'lib'), lib) for lib in [ 'AtomEye', 'AX', 'Atoms', 'VecMat3', 'VecMat', 'IO', 'Scalar', 'Timer'] ]
atomeye_extra_link_args = atomeye_internal_libs + [ f for f in syslibs if not f.startswith('-L')]

quip_root_dir = os.environ['QUIP_ROOT']
quip_arch = os.environ['QUIP_ARCH']

if 'QUIPPY_LDFLAGS' in makefile:
Beispiel #46
0
def generate_tree(config, glob, common_vars, sub_dir, out_sub_dir,
                  resfile_name, version_var, source_var, use_pool_bundle,
                  dep_targets):
    requests = []
    category = "%s_tree" % sub_dir
    out_prefix = "%s/" % out_sub_dir if out_sub_dir else ""
    # TODO: Clean this up for curr
    input_files = [InFile(filename) for filename in glob("%s/*.txt" % sub_dir)]
    if sub_dir == "curr":
        input_files.remove(InFile("curr/supplementalData.txt"))
    input_basenames = [v.filename[len(sub_dir) + 1:] for v in input_files]
    output_files = [
        OutFile("%s%s.res" % (out_prefix, v[:-4])) for v in input_basenames
    ]

    # Generate Pool Bundle
    if use_pool_bundle:
        input_pool_files = [OutFile("%spool.res" % out_prefix)]
        pool_target_name = "%s_pool_write" % sub_dir
        use_pool_bundle_option = "--usePoolBundle {OUT_DIR}/{OUT_PREFIX}".format(
            OUT_PREFIX=out_prefix, **common_vars)
        requests += [
            SingleExecutionRequest(
                name=pool_target_name,
                category=category,
                dep_targets=dep_targets,
                input_files=input_files,
                output_files=input_pool_files,
                tool=IcuTool("genrb"),
                args=
                "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
                "--writePoolBundle -k "
                "{INPUT_BASENAMES_SPACED}",
                format_with={
                    "IN_SUB_DIR":
                    sub_dir,
                    "OUT_PREFIX":
                    out_prefix,
                    "INPUT_BASENAMES_SPACED":
                    utils.SpaceSeparatedList(input_basenames)
                }),
        ]
        dep_targets = dep_targets + [DepTarget(pool_target_name)]
    else:
        use_pool_bundle_option = ""

    # Generate Res File Tree
    requests += [
        RepeatedOrSingleExecutionRequest(
            name="%s_res" % sub_dir,
            category=category,
            dep_targets=dep_targets,
            input_files=input_files,
            output_files=output_files,
            tool=IcuTool("genrb"),
            args=
            "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
            "{EXTRA_OPTION} -k "
            "{INPUT_BASENAME}",
            format_with={
                "IN_SUB_DIR": sub_dir,
                "OUT_PREFIX": out_prefix,
                "EXTRA_OPTION": use_pool_bundle_option
            },
            repeat_with={
                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
            })
    ]

    # Generate index txt file
    # TODO: Change .mk files to .py files so they can be loaded directly.
    # Alternatively, figure out a way to not require reading this file altogether.
    # Right now, it is required for the index list file.
    # Reading these files as .py will be required for Bazel.
    mk_values = parse_makefile("{GLOB_DIR}/{IN_SUB_DIR}/{RESFILE_NAME}".format(
        IN_SUB_DIR=sub_dir, RESFILE_NAME=resfile_name, **common_vars))
    cldr_version = mk_values[
        version_var] if version_var and sub_dir == "locales" else None
    index_input_files = [
        InFile("%s/%s" % (sub_dir, basename))
        for basename in mk_values[source_var].split()
    ]
    index_file_txt = TmpFile("{IN_SUB_DIR}/{INDEX_NAME}.txt".format(
        IN_SUB_DIR=sub_dir, **common_vars))
    requests += [
        IndexTxtRequest(name="%s_index_txt" % sub_dir,
                        category=category,
                        input_files=index_input_files,
                        output_file=index_file_txt,
                        cldr_version=cldr_version)
    ]

    # Generate index res file
    index_res_file = OutFile("{OUT_PREFIX}{INDEX_NAME}.res".format(
        OUT_PREFIX=out_prefix, **common_vars))
    requests += [
        SingleExecutionRequest(
            name="%s_index_res" % sub_dir,
            category="%s_index" % sub_dir,
            dep_targets=[],
            input_files=[index_file_txt],
            output_files=[index_res_file],
            tool=IcuTool("genrb"),
            args=
            "-s {TMP_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
            "-k "
            "{INDEX_NAME}.txt",
            format_with={
                "IN_SUB_DIR": sub_dir,
                "OUT_PREFIX": out_prefix
            })
    ]

    return requests
Beispiel #47
0
#! /usr/bin/env python
# $Id: setup.py,v 1.2 2002/01/08 07:13:21 jgg Exp $

from distutils.core import setup, Extension
from distutils.sysconfig import parse_makefile
from DistUtilsExtra.command import *
import glob, os, string

# The apt_pkg module
files = map(lambda source: "python/" + source,
            string.split(parse_makefile("python/makefile")["APT_PKG_SRC"]))
apt_pkg = Extension("apt_pkg", files, libraries=["apt-pkg"])

# The apt_inst module
files = map(lambda source: "python/" + source,
            string.split(parse_makefile("python/makefile")["APT_INST_SRC"]))
apt_inst = Extension("apt_inst", files, libraries=["apt-pkg", "apt-inst"])

# Replace the leading _ that is used in the templates for translation
templates = []
if not os.path.exists("build/data/templates/"):
    os.makedirs("build/data/templates")
for template in glob.glob('data/templates/*.info.in'):
    source = open(template, "r")
    build = open(os.path.join("build", template[:-3]), "w")
    lines = source.readlines()
    for line in lines:
        build.write(line.lstrip("_"))
    source.close()
    build.close()
Beispiel #48
0
for a in sys.argv:
    if a.startswith(w_cxx):
        buildroot = a[len(w_cxx):]
    elif a == '--help':
        filtered_args.append(a)
        print("""In addition to the standard options described below,
this script supports one custom option:

  --with-c++-build=DIR Build against the C++ Toolkit installation in DIR
                       (default: %s)
""" % (def_buildroot, ))
    else:
        filtered_args.append(a)
sys.argv = filtered_args

vv = parse_makefile(buildroot + '/build/Makefile.mk')


def mfv(v):
    return expand_makefile_vars('$(' + v + ')', vv)


unparsed_rx = re.compile(r'\$\(.*?\)')
for v in ('CXX', 'CXXFLAGS'):
    os.environ.setdefault(v, unparsed_rx.sub('', mfv(v)))
os.environ.setdefault('CC', unparsed_rx.sub('', mfv('CXX')))
os.environ.setdefault('CFLAGS', unparsed_rx.sub('', mfv('CXXFLAGS')))
os.environ.setdefault(
    'CPPFLAGS',
    unparsed_rx.sub('', mfv('CPPFLAGS')) + ' -DPYDBAPI_SUPPORT_DIR=\\"' +
    support_dir + '\\"' + ' -I' + mfv('top_srcdir') +
def read_setup_file(filename):
    from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx
    from distutils.text_file import TextFile
    from distutils.util import split_quoted
    vars = parse_makefile(filename)
    file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1)
    try:
        extensions = []
        while 1:
            line = file.readline()
            if line is None:
                break
            if _variable_rx.match(line):
                continue
                if line[0] == line[-1] == '*':
                    file.warn("'%s' lines not handled yet" % line)
                    continue
            line = expand_makefile_vars(line, vars)
            words = split_quoted(line)
            module = words[0]
            ext = Extension(module, [])
            append_next_word = None
            for word in words[1:]:
                if append_next_word is not None:
                    append_next_word.append(word)
                    append_next_word = None
                    continue
                suffix = os.path.splitext(word)[1]
                switch = word[0:2]
                value = word[2:]
                if suffix in ('.c', '.cc', '.cpp', '.cxx', '.c++', '.m', '.mm'):
                    ext.sources.append(word)
                elif switch == '-I':
                    ext.include_dirs.append(value)
                elif switch == '-D':
                    equals = string.find(value, '=')
                    if equals == -1:
                        ext.define_macros.append((value, None))
                    else:
                        ext.define_macros.append((value[0:equals],
                         value[equals + 2:]))
                elif switch == '-U':
                    ext.undef_macros.append(value)
                elif switch == '-C':
                    ext.extra_compile_args.append(word)
                elif switch == '-l':
                    ext.libraries.append(value)
                elif switch == '-L':
                    ext.library_dirs.append(value)
                elif switch == '-R':
                    ext.runtime_library_dirs.append(value)
                elif word == '-rpath':
                    append_next_word = ext.runtime_library_dirs
                elif word == '-Xlinker':
                    append_next_word = ext.extra_link_args
                elif word == '-Xcompiler':
                    append_next_word = ext.extra_compile_args
                elif switch == '-u':
                    ext.extra_link_args.append(word)
                    if not value:
                        append_next_word = ext.extra_link_args
                elif word == '-Xcompiler':
                    append_next_word = ext.extra_compile_args
                elif switch == '-u':
                    ext.extra_link_args.append(word)
                    if not value:
                        append_next_word = ext.extra_link_args
                elif suffix in ('.a', '.so', '.sl', '.o', '.dylib'):
                    ext.extra_objects.append(word)
                else:
                    file.warn("unrecognized argument '%s'" % word)

            extensions.append(ext)

    finally:
        file.close()

    return extensions
Beispiel #50
0
    def gen_envs(self, loc):
        env = self.env.Clone()

        # Must have the architecture as well.
        self.arch = self.get_option('petsc_arch')

        # Try to find PETSc information.
        extra_libs = []
        petscconf = None
        if loc[0]:
            bmake_dir = os.path.join(loc[0], 'bmake')
            # If we don't alrady have an arch, try read it.
            if not self.arch:
                petscconf = os.path.join(bmake_dir, 'petscconf')
                try:
                    inf = open(petscconf)
                    self.arch = inf.readline().split('=')[1].strip()
                except:
                    petscconf = None

            # Try looking for it the PETSc 3 uninstalled way.
            if not self.arch:
                try:
                    items = os.listdir(loc[0])
                    for i in items:
                        i = os.path.join(loc[0], i)
                        if os.path.isdir(i):
                            if os.path.exists(os.path.join(i, 'conf', 'petscvariables')):
                                self.arch = os.path.basename(i)
                                break
                except:
                    pass

            # If we were able to find a architecture.
            if self.arch is not None:
                # Try PETSc 2.x
                petscconf = os.path.join(bmake_dir, self.arch, 'petscconf')
                if os.path.exists(petscconf):
                    loc[1].append(os.path.dirname(petscconf))
                    loc[2].append(os.path.join(loc[2][0], self.arch))
                # Try PETSc 3 uninstalled.
                else:
                    petscconf = os.path.join(loc[0], self.arch, 'conf', 'petscvariables')
                    if os.path.exists(petscconf):
                        loc[1].append(os.path.join(loc[0], self.arch, 'include'))
                        loc[1].append(os.path.join(loc[0], 'include'))
                        loc[2].append(os.path.join(loc[0], self.arch, 'lib'))
                    else:
                        petscconf = None

            # Try PETSc 3 installed information.
            if not petscconf:
                petscconf = os.path.join(loc[0], 'conf', 'petscvariables')
                if not os.path.exists(petscconf):
                    petscconf = None

        # Can we locate static or shared libs?
        libs = ['petscsnes', 'petscksp', 'petscdm',
                'petscmat', 'petscvec', 'petsc']
        lib_types = self.find_libraries(loc[2], libs)
        if lib_types is None:
            libs = ['petsc']
            lib_types = self.find_libraries(loc[2], libs)
        if lib_types is not None:

            # Add basic environment.
            env.AppendUnique(CPPPATH=loc[1])
            env.AppendUnique(LIBPATH=loc[2])
            env.AppendUnique(RPATH=loc[2])
            
            # Add additional libraries.
            if petscconf is not None:
                from distutils import sysconfig
                vars = {}
                sysconfig.parse_makefile(petscconf, vars)
                flags = ''
                if 'PACKAGES_LIBS' in vars:
                    flags = sysconfig.expand_makefile_vars(vars['PACKAGES_LIBS'], vars)
            
                # Static libs? i.e. no shared libs. Must also do this if we are
                # linking static libraries.
                if lib_types[1] is None or self.static:
                    # Add a bunch of extra jazz.
                    if 'X11_INCLUDE' in vars:
                        flags += ' ' + sysconfig.expand_makefile_vars(str(vars['X11_INCLUDE']), vars)
                    if 'MPI_INCLUDE' in vars:
                        flags += ' ' + sysconfig.expand_makefile_vars(str(vars['MPI_INCLUDE']), vars)
                    if 'BLASLAPACK_INCLUDE' in vars:
                        flags += sysconfig.expand_makefile_vars(str(vars['BLASLAPACK_INCLUDE']), vars)
                    if 'PCC_LINKER_FLAGS' in vars:
                        flags += ' ' + sysconfig.expand_makefile_vars(str(vars['PCC_LINKER_FLAGS']), vars)
                    if 'PCC_FLAGS' in vars:
                        flags += ' ' + sysconfig.expand_makefile_vars(str(vars['PCC_FLAGS']), vars)
                    if 'PCC_LINKER_LIBS' in vars:
                        flags += ' ' + sysconfig.expand_makefile_vars(str(vars['PCC_LINKER_LIBS']), vars)

                # Use SCons to parse the flags.
                flag_dict = env.ParseFlags(flags)
                # Keep the libs for a bit later.
                if 'LIBS' in flag_dict:
                    extra_libs = flag_dict['LIBS']
                    del flag_dict['LIBS']
                env.MergeFlags(flag_dict)

                # Add libs and return.
                env.PrependUnique(LIBS=libs)
                env.AppendUnique(LIBS=extra_libs)

            yield env
Beispiel #51
0
    def gen_envs(self, loc):
	# If we've been given an MPI compiler just try that.
	if os.path.basename(self.env['CC']) in ['mpicc', 'mpicxx']:
            yield self.env.Clone()
            return

        # If flagged to do so, try standard MPI compilers, unless
        # we were given other options to use.
        if self.try_compilers and \
                self.get_option('mpi_dir') is None and \
                self.get_option('mpi_inc_dir') is None and \
                self.get_option('mpi_lib_dir') is None and \
                self.get_option('mpi_libs') is None:
            env = self.env.Clone()
            env['CC'] = 'mpicc'
            env['CXX'] = 'mpicxx'
            yield env

        # attempt to use mpicc to find libs reqired
        import subprocess
        # test for open-mpi 
        try:
            proc = subprocess.Popen(['mpicc', '-showme:libs'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            if proc.wait() == 0:
              env = self.env.Clone()
              libs=proc.communicate()[0].split()
              for env in Package.gen_envs(self, loc):
                lib_env = env.Clone()
                lib_env.PrependUnique(LIBS=libs)
                yield lib_env
        except OSError:
            # if error in 'mpicc -showme:libs'
            import sys
            #sys.stdout.write("no mpicc here, ")

        # attempt to find everything from mpicc.conf
        mpiconf = None
        if loc[0]:
           mpiconf = os.path.join( loc[0], 'etc/mpicc.conf' )
           if not os.path.exists( mpiconf ):
              mpiconf = None

        if mpiconf:
           # rape and pilage through the mpicc.conf file
           from distutils import sysconfig 
           vars = {} 
           flags = ''
           # get a list of all 'variable' names
           sysconfig.parse_makefile(mpiconf, vars) 
           if 'MPILIBNAME' in vars:
              flags += vars['MPILIBNAME']
           if 'LIBS' in vars:
              flags += vars['LIBS']
           if 'MPI_OTHERLIBS' in vars:
              flags += vars['MPI_OTHERLIBS']
           if flags != []:
              # format the flags string 
              flags_nice=flags.replace('"', '').replace('-l', ' ').split(' ')
              # remove empty elements
              for xxx in reversed(flags_nice):
                 if xxx=='': 
                    flags_nice.remove(xxx)

              for env in Package.gen_envs(self, loc):
                 lib_env = env.Clone()
                 lib_env.PrependUnique(LIBS=flags_nice)
                 yield lib_env


        for env in Package.gen_envs(self, loc):

            # Not sure which extra libraries to check for, so try them all.
            extra_libs = [[], ['rt'], ['pthread', 'rt'],
                          ['dl'], ['dl', 'rt'], ['dl', 'pthread'],
                          ['dl', 'pthread', 'rt']]
            for libs in extra_libs:

                # Check for OpenMPI with nsl and util libs.
                if self.find_libraries(loc[2], ['open-pal', 'nsl', 'util']):
                    lib_env = env.Clone()
                    lib_env.PrependUnique(LIBS=['mpi', 'open-rte', 'open-pal', 'nsl', 'util'] + libs)
                    yield lib_env

                # Check for OpenMPI.
                if self.find_libraries(loc[2], 'open-pal'):
                    lib_env = env.Clone()
                    lib_env.PrependUnique(LIBS=['mpi', 'open-rte', 'open-pal'] + libs)
                    yield lib_env

                # Check for general MPI.
                if self.find_libraries(loc[2], 'mpi'):
                    lib_env = env.Clone()
                    lib_env.PrependUnique(LIBS=['mpi'] + libs)
                    yield lib_env

                # Check for MPICH + pmpich.
                if self.find_libraries(loc[2], ['mpich', 'pmpich']):
                    lib_env = env.Clone()
                    lib_env.PrependUnique(LIBS=['pmpich', 'mpich'] + libs)
                    yield lib_env

                # Check for MPICH
                if self.find_libraries(loc[2], 'mpich'):
                    lib_env = env.Clone()
                    lib_env.PrependUnique(LIBS=['mpich'] + libs )
                    yield lib_env
Beispiel #52
0
    if have_warnings:
        cflags.append('-DHAVE_WARNINGS')
    if freeze_exceptions:
        cflags.append('-DFREEZE_EXCEPTIONS')
        cflags.append('-DEXCEPTIONS_LEN=%d' % codelen)
    if non_elf:
        cflags.append('-DNONELF')


#    libs = [os.path.join(sysconfig.get_config_vars('LIBDIR')[0], sysconfig.get_config_vars('INSTSONAME')[0])]

    somevars = {}
    if os.path.exists(makefile_in):
        print "Using '%s' as Makefile template" % makefile_in
        makevars = sysconfig.parse_makefile(makefile_in)
    else:
        raise ValueError, "Makefile '%s' not found" % makefile_in
    for key in makevars.keys():
        somevars[key] = makevars[key]

    somevars['CFLAGS'] = string.join(cflags)  # override
    if sys.platform.startswith("darwin"):
        somevars['LDFLAGS'] += " -F$(PYTHONFRAMEWORKPREFIX)"
        somevars['LDFLAGS'] += " -mmacosx-version-min=%s" % somevars[
            "MACOSX_DEPLOYMENT_TARGET"]
        somevars['LINKFORSHARED'] = ""  #override
    files = ['$(OPT)', '$(LDFLAGS)', '$(LINKFORSHARED)', 'getpath.c'] + \
            files + \
            ['$(MODLIBS)', '$(LIBS)', '$(SYSLIBS)', '-lz']  # XXX zlib not always -lz
Beispiel #53
0
(options, args) = p.parse_args()

from distutils import sysconfig

sysconfig.PREFIX = '@PREFIX@'
sysconfig.EXEC_PREFIX = '@PREFIX@'

if options.prefix:
    sysconfig.EXEC_PREFIX = options.prefix
    sysconfig.PREFIX = options.prefix

if options.cflags:
    sys.stdout.write('-I%s\n' % sysconfig.get_python_inc())

if options.ldflags:
    extra = "@EXTRA_LDFLAGS@"
    if 0:
        mf = sysconfig.get_makefile_filename()
        d = sysconfig.parse_makefile(mf)
        if options.verbose:
            sys.stderr.write(mf + '\n')
        # Using flags from native python build is asking for trouble,
        # ie, arch or $$ORIGIN may break things.
        extra = d['LDFLAGS']

    sys.stdout.write(
        '-L%s -L%s %s\n' %
        (sysconfig.get_python_lib(), sysconfig.PREFIX + '/lib/', extra))

## -*-Python-*-
Beispiel #54
0
def WriteNinja(petsc):
    conf = dict()
    parse_makefile(
        os.path.join(petsc.petsc_dir, 'lib', 'petsc', 'conf', 'variables'),
        conf)
    parse_makefile(petsc.arch_path('lib', 'petsc', 'conf', 'petscvariables'),
                   conf)
    build_ninja = petsc.arch_path('build.ninja')
    fd = open(build_ninja, 'w')
    fd.write('objdir = obj-ninja\n')
    fd.write('libdir = lib\n')
    fd.write('c_compile = %(PCC)s\n' % conf)
    fd.write('c_flags = %(PETSC_CC_INCLUDES)s %(PCC_FLAGS)s %(CCPPFLAGS)s\n' %
             conf)
    fd.write('c_link = %(PCC_LINKER)s\n' % conf)
    fd.write('c_link_flags = %(PCC_LINKER_FLAGS)s\n' % conf)
    if petsc.have_fortran:
        fd.write('f_compile = %(FC)s\n' % conf)
        fd.write(
            'f_flags = %(PETSC_FC_INCLUDES)s %(FC_FLAGS)s %(FCPPFLAGS)s\n' %
            conf)
        fd.write('f_link = %(FC_LINKER)s\n' % conf)
        fd.write('f_link_flags = %(FC_LINKER_FLAGS)s\n' % conf)
    fd.write('petsc_external_lib = %(PETSC_EXTERNAL_LIB_BASIC)s\n' % conf)
    fd.write('python = %(PYTHON)s\n' % conf)
    fd.write('\n')
    fd.write(
        'rule C_COMPILE\n'
        '  command = $c_compile -MMD -MF $out.d $c_flags -c $in -o $out\n'
        '  description = CC $out\n'
        '  depfile = $out.d\n'
        # '  deps = gcc\n') # 'gcc' is default, 'msvc' only recognized by newer versions of ninja
        '\n')
    fd.write(
        'rule C_LINK_SHARED\n'
        '  command = $c_link $c_link_flags -shared -o $out $in $petsc_external_lib\n'
        '  description = CLINK_SHARED $out\n'
        '\n')
    if petsc.have_fortran:
        fd.write(
            'rule F_COMPILE\n'
            '  command = $f_compile -MMD -MF $out.d $f_flags -c $in -o $out\n'
            '  description = FC $out\n'
            '  depfile = $out.d\n'
            '\n')
        fd.write(
            'rule F_LINK_SHARED\n'
            '  command = $f_link $f_link_flags -shared -o $out $in $petsc_external_lib\n'
            '  description = FLINK_SHARED $out\n'
            '\n')
    fd.write('rule GEN_NINJA\n'
             '  command = $python $in --output=ninja\n'
             '  generator = 1\n'
             '\n')
    petsc.gen_ninja(fd)
    fd.write('\n')
    fd.write(
        'build %s : GEN_NINJA | %s %s %s %s\n' %
        (build_ninja, os.path.abspath(__file__),
         os.path.join(petsc.petsc_dir, 'lib', 'petsc', 'conf', 'variables'),
         petsc.arch_path('lib', 'petsc', 'conf', 'petscvariables'), ' '.join(
             os.path.join(petsc.pkg_dir, dep) for dep in petsc.gendeps)))
Beispiel #55
0
def get_params(parameter_filepath):        
    filepath = parameter_filepath
    print("Reading ", filepath)
    return parse_makefile(parameter_filepath)
Beispiel #56
0
def pkgsources(pkg):
    '''
  Walks the source tree associated with 'pkg', analyzes the conditional written into the makefiles,
  and returns a list of sources associated with each unique conditional (as a dictionary).
  '''
    from distutils.sysconfig import parse_makefile
    autodirs = set('ftn-auto ftn-custom f90-custom'.split()
                   )  # Automatically recurse into these, if they exist
    skipdirs = set(
        'examples benchmarks'.split())  # Skip these during the build

    def compareDirLists(mdirs, dirs):
        smdirs = set(mdirs)
        sdirs = set(dirs).difference(autodirs)
        if not smdirs.issubset(sdirs):
            MISTAKES.append(
                'Makefile contains directory not on filesystem: %s: %r' %
                (root, sorted(smdirs - sdirs)))
        if not VERBOSE: return
        if smdirs != sdirs:
            from sys import stderr
            print >> stderr, (
                'Directory mismatch at %s:\n\t%s: %r\n\t%s: %r\n\t%s: %r' %
                (root, 'in makefile   ', sorted(smdirs), 'on filesystem ',
                 sorted(sdirs), 'symmetric diff',
                 sorted(smdirs.symmetric_difference(sdirs))))

    def compareSourceLists(msources, files):
        smsources = set(msources)
        ssources = set(
            f for f in files
            if os.path.splitext(f)[1] in ['.c', '.cxx', '.cc', '.cpp', '.F'])
        if not smsources.issubset(ssources):
            MISTAKES.append(
                'Makefile contains file not on filesystem: %s: %r' %
                (root, sorted(smsources - ssources)))
        if not VERBOSE: return
        if smsources != ssources:
            from sys import stderr
            print >> stderr, (
                'Source mismatch at %s:\n\t%s: %r\n\t%s: %r\n\t%s: %r' %
                (root, 'in makefile   ', sorted(smsources), 'on filesystem ',
                 sorted(ssources), 'symmetric diff',
                 sorted(smsources.symmetric_difference(ssources))))

    allconditions = defaultdict(set)
    sources = defaultdict(deque)
    for root, dirs, files in os.walk(os.path.join('src', pkg)):
        conditions = allconditions[os.path.dirname(root)].copy()
        makefile = os.path.join(root, 'makefile')
        if not os.path.exists(makefile):
            continue
        makevars = parse_makefile(makefile)
        mdirs = makevars.get(
            'DIRS', '').split()  # Directories specified in the makefile
        compareDirLists(mdirs,
                        dirs)  # diagnostic output to find unused directories
        candidates = set(mdirs).union(autodirs).difference(skipdirs)
        dirs[:] = list(candidates.intersection(dirs))
        with open(makefile) as lines:

            def stripsplit(line):
                return filter(lambda c: c != "'",
                              line[len('#requires'):]).split()

            conditions.update(
                set(
                    tuple(stripsplit(line)) for line in lines
                    if line.startswith('#requires')))

        def relpath(filename):
            return os.path.join(root, filename)

        sourcec = makevars.get('SOURCEC', '').split()
        sourcef = makevars.get('SOURCEF', '').split()
        compareSourceLists(
            sourcec + sourcef,
            files)  # Diagnostic output about unused source files
        sources[repr(sorted(conditions))].extend(
            relpath(f) for f in sourcec + sourcef)
        allconditions[root] = conditions
    return sources
Beispiel #57
0
    def package(self):
        if self.os == "Darwin":
            filename = "setup_darwin.sh"
            libsuffix = ".dylib"
        elif self.os == "Linux":
            filename = "setup_linux.sh"
            libsuffix = ".so"
        else:
            raise ValueError("Unknown os '%s'." % self.os)

        shutil.copyfile(os.path.join(self.build_config.src_dir, filename),
                        os.path.join(self.build_config.dest_dir, "setup.sh"))

        os.chdir(os.path.join(self.build_config.build_dir, "cencalvm-build"))
        from distutils.sysconfig import parse_makefile
        makefile = parse_makefile("Makefile")
        package = makefile["PACKAGE"]
        version = makefile["VERSION"]

        os.chdir(self.build_config.dest_dir)

        if self.os == "Darwin":
            self._update_darwinlinking()

        # Strip symbols from binaries and libraries
        strip = (
            "strip",
            "-x",
        ) if self.os == "Darwin" else ("strip", )
        strip_list = glob.glob("bin/*")
        libs = glob.glob("lib/lib*")
        libs += glob.glob("lib64/lib*")
        for lib in libs:
            if libsuffix in lib and not os.path.islink(
                    lib) and not lib.endswith("_s.so") and not lib.endswith(
                        ".py"):
                strip_list.append(lib)
        strip_list += [file for file in glob.glob("libexec/gcc/*/*/cc1*")]
        strip_list += [file for file in glob.glob("libexec/gcc/*/*/lto1*")]
        strip_list += [
            file for file in glob.glob("libexec/gcc/*/*/lt-wrapper*")
        ]
        cmd = strip + tuple(strip_list)
        run_cmd(cmd)

        orig_name = os.path.split(self.build_config.dest_dir)[1]
        base_name = "{0}-{1}".format(package, version)
        os.chdir("..")
        os.rename(orig_name, base_name)

        tarball = os.path.join(
            self.build_config.src_dir,
            "{package}-{version}-{os}-{arch}.tgz".format(package=package,
                                                         version=version,
                                                         os=self.os,
                                                         arch=self.arch))
        cmd = (
            "tar",
            "-zcf",
            tarball,
            "--exclude={0}/lib/*.a".format(base_name),
            "--exclude={0}/lib/*.la".format(base_name),
            base_name,
        )
        run_cmd(cmd)

        os.rename(base_name, orig_name)
        return
Beispiel #58
0
def read_setup_file(filename):
    """Reads a Setup file and returns Extension instances."""
    from distutils.sysconfig import (parse_makefile, expand_makefile_vars,
                                     _variable_rx)

    from distutils.text_file import TextFile
    from distutils.util import split_quoted

    # First pass over the file to gather "VAR = VALUE" assignments.
    vars = parse_makefile(filename)

    # Second pass to gobble up the real content: lines of the form
    #   <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
    file = TextFile(filename,
                    strip_comments=1,
                    skip_blanks=1,
                    join_lines=1,
                    lstrip_ws=1,
                    rstrip_ws=1)
    try:
        extensions = []

        while True:
            line = file.readline()
            if line is None:  # eof
                break
            if _variable_rx.match(line):  # VAR=VALUE, handled in first pass
                continue

            if line[0] == line[-1] == "*":
                file.warn("'%s' lines not handled yet" % line)
                continue

            line = expand_makefile_vars(line, vars)
            words = split_quoted(line)

            # NB. this parses a slightly different syntax than the old
            # makesetup script: here, there must be exactly one extension per
            # line, and it must be the first word of the line.  I have no idea
            # why the old syntax supported multiple extensions per line, as
            # they all wind up being the same.

            module = words[0]
            ext = Extension(module, [])
            append_next_word = None

            for word in words[1:]:
                if append_next_word is not None:
                    append_next_word.append(word)
                    append_next_word = None
                    continue

                suffix = os.path.splitext(word)[1]
                switch = word[0:2]
                value = word[2:]

                if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m",
                              ".mm"):
                    # hmm, should we do something about C vs. C++ sources?
                    # or leave it up to the CCompiler implementation to
                    # worry about?
                    ext.sources.append(word)
                elif switch == "-I":
                    ext.include_dirs.append(value)
                elif switch == "-D":
                    equals = value.find("=")
                    if equals == -1:  # bare "-DFOO" -- no value
                        ext.define_macros.append((value, None))
                    else:  # "-DFOO=blah"
                        ext.define_macros.append(
                            (value[0:equals], value[equals + 2:]))
                elif switch == "-U":
                    ext.undef_macros.append(value)
                elif switch == "-C":  # only here 'cause makesetup has it!
                    ext.extra_compile_args.append(word)
                elif switch == "-l":
                    ext.libraries.append(value)
                elif switch == "-L":
                    ext.library_dirs.append(value)
                elif switch == "-R":
                    ext.runtime_library_dirs.append(value)
                elif word == "-rpath":
                    append_next_word = ext.runtime_library_dirs
                elif word == "-Xlinker":
                    append_next_word = ext.extra_link_args
                elif word == "-Xcompiler":
                    append_next_word = ext.extra_compile_args
                elif switch == "-u":
                    ext.extra_link_args.append(word)
                    if not value:
                        append_next_word = ext.extra_link_args
                elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
                    # NB. a really faithful emulation of makesetup would
                    # append a .o file to extra_objects only if it
                    # had a slash in it; otherwise, it would s/.o/.c/
                    # and append it to sources.  Hmmmm.
                    ext.extra_objects.append(word)
                else:
                    file.warn("unrecognized argument '%s'" % word)

            extensions.append(ext)
    finally:
        file.close()

    return extensions
Beispiel #59
0
"""distutils.extension