コード例 #1
0
def check_mumps_version(self):
    # translate special tags, not yet used
    dict_vers = {'5.0.2consortium': '5.0.2', '5.1.1consortium': '5.1.1'}
    fragment = r'''
#include <stdio.h>
#include "smumps_c.h"

int main(void){
    printf("%s", MUMPS_VERSION);
    return 0;
}'''
    self.start_msg('Checking mumps version')
    try:
        ret = self.check_cc(fragment=fragment,
                            use='MUMPS',
                            mandatory=True,
                            execute=True,
                            define_ret=True)
        self.env['MUMPS_VERSION'] = ret
        if dict_vers.get(ret, ret) != '5.0.2' and dict_vers.get(
                ret, ret) != '5.0.2consortium' and dict_vers.get(
                    ret, ret) != '5.1.1' and dict_vers.get(
                        ret, ret) != '5.1.1consortium':
            raise Errors.ConfigurationError(
                "expected versions: {0}".format('5.0.2/5.1.1(consortium)'))
    except:
        self.end_msg('no', 'YELLOW')
        raise
    else:
        self.define('ASTER_MUMPS_VERSION', ret)
        self.end_msg(self.env['MUMPS_VERSION'])
コード例 #2
0
def check_mumps_version(self):
    fragment = r'''
#include <stdio.h>
#include "smumps_c.h"

int main(void){
    printf("%s", MUMPS_VERSION);
    return 0;
}'''
    self.start_msg('Checking mumps version')
    try:
        ret = self.check_cc(fragment=fragment,
                            use='MUMPS',
                            mandatory=True,
                            execute=True,
                            define_ret=True)
        self.env['MUMPS_VERSION'] = ret
        vers = ret.replace("consortium", "")
        if vers not in ("5.2.1", "5.1.2"):
            raise Errors.ConfigurationError(
                "expected versions: {0}".format('5.2.1/5.1.2(consortium)'))
    except:
        self.end_msg('no', 'YELLOW')
        raise
    else:
        self.define('ASTER_MUMPS_VERSION', ret)
        self.end_msg(self.env['MUMPS_VERSION'])
コード例 #3
0
def configure(conf):
    conf.load('brick_general')
    # this is a hack, because, when using ${CURRENT_RUNDIR} directly inside
    # the rule definition of the TaskChains, the concatenation with the
    # logfile name introduces a space between them
    conf.env.VLOG_LOGFILE = '/vlog_sv.log'
    conf.env.VCOM_LOGFILE = '/vcom.log'
    conf.env.VSIM_LOGFILE = conf.env.BRICK_LOGFILES + '/vsim.log'

    try:
        conf.env.INCLUDES_VENDOR = [
            os.environ['MODEL_SIM_ROOT'] + '/include/',
        ]
    except KeyError:
        raise Errors.ConfigurationError('It seems that modelsim hasn\'t been' \
                                        +' installed. Please make sure that' \
                                        +' variable MODEL_SIM_ROOT is defined.')

    conf.env.VSIM_OPTIONS = ['-64']
    conf.env.MODELSIM_WORKLIBS = []

    conf.find_program('vlog', var='MODEL_VLOG')
    conf.find_program('vcom', var='MODEL_VCOM')
    conf.find_program('vsim', var='MODEL_VSIM')
    conf.find_program('vlib', var='MODEL_VLIB')
コード例 #4
0
def check_prerequisites_package(self, yammdir, minvers):
    """Check for version of the prerequisites package.

    Can only be used if prerequisites are installed with a 'Yamm' package.

    Arguments:
        self (Configure): Configure object.
        yammdir (str): Directory path of prerequisites/tools installation.
        minvers (str): Minimal required version of the prerequisites package.
    """
    self.start_msg("Checking prerequisites version >= {0}".format(minvers))

    filename = osp.join(yammdir, 'VERSION')
    if osp.isfile(filename):
        with open(filename, 'r') as fvers:
            version = fvers.read().strip()
        ok = version >= minvers
    else:
        version = "not found"
        ok = False

    self.end_msg(version, 'GREEN' if ok else 'YELLOW')
    if not ok:
        msg = ("Official prerequisites not found! "
               "Please install updated prerequisites using: "
               "install_env --prerequisites")
        raise Errors.ConfigurationError(msg)
コード例 #5
0
def configure(self):
	if not self.env.PROJ_CONFIGURATION:
		self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n")

	# Check for any added config files added by the tool 'c_config'.
	if 'cfg_files' in self.env:
		self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files]

	# Create default project configuration?
	if 'PROJ_CONFIGURATION' not in self.env:
		defaults = delete_invalid_values(self.env.get_merged_dict())
		self.env.PROJ_CONFIGURATION = {
			"Debug": defaults,
			"Release": defaults,
		}

	# Some build settings are required to be present by XCode. We will supply default values
	# if user hasn't defined any.
	defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')]
	for cfgname,settings in self.env.PROJ_CONFIGURATION.iteritems():
		for default_var, default_val in defaults_required:
			if default_var not in settings:
				settings[default_var] = default_val

	# Error check customization
	if not isinstance(self.env.PROJ_CONFIGURATION, dict):
		raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.")
コード例 #6
0
def configure_python_module(ctx,
                            name,
                            url,
                            packtgz,
                            pack,
                            cmdline=None,
                            extracmd="",
                            forceinstall=False,
                            postinstall=None):
    import waflib.Logs
    import os
    from waflib import Errors
    import os.path as osp
    import autoinstall_lib as atl
    ctx.load("python")
    doit = False
    import sys

    iall = shouldIinstall_all(ctx, name)
    try:
        assert forceinstall == False and getattr(
            ctx.options, name + "_forceinstall") == False and iall == False
        check_python_module(ctx, name, extracmd)
    except Exception as e:
        if upgrade(ctx, name) or getattr(ctx.options, name + "_forceinstall",
                                         False) or iall:
            waflib.Logs.pprint("PINK", "Install python module '%s'" % name)
            atl.installsmthg_pre(ctx, url, packtgz)
            if not osp.exists(ctx.env.PYTHONDIR):
                os.makedirs(ctx.env.PYTHONDIR)
            if cmdline == None:
                cmdline = "cd build/%s; PYTHONPATH=%s:$PYTHONPATH %s setup.py build_ext -L=%s ;PYTHONPATH=%s:$PYTHONPATH %s setup.py install --install-lib=%s --install-scripts=%s" % (
                    pack, ctx.env.PYTHONDIR, ctx.env.PYTHON[0],
                    ctx.env.LIBPATH_PYEMBED[0], ctx.env.PYTHONDIR,
                    ctx.env.PYTHON[0], ctx.env.PYTHONDIR, ctx.env.BINDIR)
            waflib.Logs.pprint("PINK", cmdline)
            ret = ctx.exec_command(cmdline)
            if ret != 0:
                raise Errors.ConfigurationError("Cannot build %s" % name)
            # deal with eggs...
            if (not osp.exists(osp.join(ctx.env.PYTHONDIR, name))) and (
                    not osp.exists(osp.join(ctx.env.PYTHONDIR, name + ".py"))):
                eggdir = [
                    v for v in os.listdir(ctx.env.PYTHONDIR)
                    if name in v and osp.isdir(osp.join(ctx.env.PYTHONDIR, v))
                ][0]
                if eggdir != name and eggdir != name + ".py":
                    mdir = [
                        v for v in os.listdir(
                            osp.join(ctx.env.PYTHONDIR, eggdir)) if name in v
                    ][0]
                    import os
                    os.symlink(osp.join(ctx.env.PYTHONDIR, eggdir, mdir),
                               osp.join(ctx.env.PYTHONDIR, name))
            check_python_module(ctx, name, extracmd)
            if postinstall:
                postinstall()
        else:
            raise e
コード例 #7
0
 def check_folder(path, name):
     try:
         assert os.path.isdir(path)
     except AssertionError:
         raise Errors.ConfigurationError(
             f"{package_lo}_{name} ({path}) is not a folder!"
         )
     return path
コード例 #8
0
def list2nodes(self, subdir, list_, silent_fail):
    nodes = set()
    for file_ in list_:
        n = subdir.find_node(file_)
        if n in nodes:
            raise Errors.ConfigurationError(
                "'{0}': Node '{1}' already exits in list '{2}'.".format(
                    self.name,
                    subdir.srcpath() + '/' + n, list_))
        elif n is not None:
            nodes.add(n)
        elif not silent_fail:
            raise Errors.ConfigurationError(
                "'{0}': Failed to find '{1}' on disk.".format(
                    self.name,
                    subdir.srcpath() + '/' + node))
    return nodes
コード例 #9
0
	def check_folder(path, name):
		try:
			assert os.path.isdir(path)
		except AssertionError:
			raise Errors.ConfigurationError(
				"%s_%s (%s) is not a folder!" \
				% (package_lo, name, path))
		return path
コード例 #10
0
 def addview(self, name, view, **kwargs):
     if name not in self.units:
         raise Errors.ConfigurationError(
             ("SoCManager: Error. Cannot find Module '{0}'"
              " from script '{1}' to add view '{2}'").format(
                  name, self.ctx.cur_script.srcpath(), view))
     else:
         self.units[name].addview(view, **kwargs)
コード例 #11
0
def check_parmetis(self):
    opts = self.options
    if opts.enable_parmetis == False:
        raise Errors.ConfigurationError('PARMETIS disabled')
    if opts.parmetis_libs is None:
        opts.parmetis_libs = 'parmetis'
    if opts.parmetis_libs:
        self.check_parmetis_libs()
    self.check_parmetis_version()
コード例 #12
0
ファイル: mathematics.py プロジェクト: jacojvr/Code_Aster
def check_math_libs_call(self, color='RED'):
    """Compile and run a small blas/lapack program"""
    self.start_msg('Checking for a program using blas/lapack')
    try:
        ret = self.check_fc(fragment=blas_lapack_fragment,
                            use='MATH OPENMP MPI',
                            mandatory=False,
                            execute=True,
                            define_ret=True)
        values = map(float, ret and ret.split() or [])
        ref = [10.0, 5.0]
        if list(values) != ref:
            raise Errors.ConfigurationError(
                'invalid result: %r (expecting %r)' % (values, ref))
    except Exception, exc:
        # the message must be closed
        self.end_msg('no', color=color)
        raise Errors.ConfigurationError(str(exc))
コード例 #13
0
    def check_all(self):
        """
        Check all views
        """
        if self._check:
            raise Errors.ConfigurationError(
                "'{0}': Error. Module has already been checked.".format(
                    self.name))
        else:
            self._check = True

        try:
            for v in self._v:
                self._v[v].check()
        except SFFerrors.Error as e:
            raise Errors.ConfigurationError(
                ("Module '{0}': Error. View '{1}' failed check with "
                 "message: {2}".format(self.name, v, e.msg)))
コード例 #14
0
    def addview(self, view, **kwargs):
        if view in self._v:
            raise Errors.ConfigurationError(
                ("SoCManager: Error. View names must be unique. Module '{0}'"
                 " from script '{1}' already has view '{2}'").format(
                     self.name, self.script.srcpath(), view))

        #Create & store the kwargs into the view
        self._v[view] = SFFView(self, **kwargs)
コード例 #15
0
def check_scotch(self):
    opts = self.options
    if opts.enable_scotch == False:
        raise Errors.ConfigurationError('SCOTCH disabled')

    self.check_scotch_headers()
    self.check_scotch_version()

    if opts.scotch_libs is None:
        if self.env.SCOTCH_VERSION and self.env.SCOTCH_VERSION[0] < 5:
            opts.scotch_libs = 'scotch scotcherr scotcherrexit'
        else:
            # default or SCOTCH_VERSION >= 5
            opts.scotch_libs = 'esmumps scotch scotcherr'

    # Code_Aster v11.0.1: FICHE 016627
    if 'scotchmetis' in opts.scotch_libs:
        raise Errors.ConfigurationError(
            'scotchmetis variant library is not compatible with Code_Aster')

    if opts.scotch_libs:
        self.check_scotch_libs()
コード例 #16
0
def add_cds_netlist_target(self):
    try:
        cellview = getattr(self, 'cellview', '')
        if cellview.find('.') == -1 or cellview.find(':') == -1:
            Logs.error(
                'Please specify a cellview of the form Lib:Cell:View with the \'view\' attribute with the feature \'cds_netlist\'.'
            )
            return
        (self.libname, rest) = cellview.split(".")
        (self.cellname, self.viewname) = rest.split(":")

        try:
            config_file = self.path.find_dir(
                self.env['CDS_LIBS_FLAT'][self.libname])
        except KeyError:
            raise Errors.ConfigurationError(
                'Please specify a library path for library ' + self.libname +
                ' in conf.env[\'CDS_LIBS\'], No library path found.')

        if not config_file:
            raise Errors.ConfigurationError(
                'Library ' + lib + ' in ' +
                selv.env['CDS_LIBS_FLAT'][self.libname] + ' not found')
        config_file = config_file.make_node(self.cellname + '/' +
                                            self.viewname + '/expand.cfg')
        #if not config_file:
        #	raise Errors.ConfigurationError('Cellview '+self.cellname+':'+self.viewname+' in library '+self.libname+' not found.')

        # logfile
        self.logfile = self.env.BRICK_LOGFILES + '/cadence_netlist_' + self.cellname + '.log'

        self.rundir = self.cellname

        t = self.create_task('cdsNetlistTask', config_file)
    except ValueError:
        raise Errors.ConfigurationError(
            'For feature "cds_netlist", you need to specify a parameter "toplevel" in the form of lib.cell:view'
        )
コード例 #17
0
    def add(self, *args, **kwargs):
        """
        Creates a SFFUnit and inserts it into the unit dictionary if there
        isn't already an existing one.
        """
        unit = self.ctx.SFFUnit(*args, **kwargs)

        if unit.name in self.units:
            raise Errors.ConfigurationError(
                ("SoCManager: Error. Module names must be unique. Module '{0}'"
                 " from script '{1}' already defined by script '{2}'").format(
                     unit.name, self.ctx.cur_script.srcpath(),
                     self.getunit(unit.name).script.srcpath()))
        self.units[unit.name] = unit
コード例 #18
0
def check_math_libs_call_blacs(self, color='RED'):
    """Compile and run a minimal blacs program"""
    if self.get_define('HAVE_MPI'):
        self.start_msg('Checking for a program using blacs')
        try:
            ret = self.check_fc(fragment=blacs_fragment,
                                use='MPI OPENMP MATH',
                                mandatory=True)
        except Exception as exc:
            # the message must be closed
            self.end_msg('no', color=color)
            raise Errors.ConfigurationError(str(exc))
        else:
            self.end_msg('yes')
コード例 #19
0
def check_petsc(self):
    opts = self.options
    if opts.enable_petsc is False:
        raise Errors.ConfigurationError('PETSC disabled')

    optlibs = None
    if opts.petsc_libs is None:
        opts.petsc_libs = 'petsc'
        # add optional libs
        optlibs = 'ml HYPRE superlu stdc++'
    if opts.petsc_libs:
        self.check_petsc_libs(optlibs)

    self.check_petsc_headers()
    self.check_petsc_version()
コード例 #20
0
def check_mumps(self):
    opts = self.options
    if opts.enable_mumps == False:
        raise Errors.ConfigurationError('MUMPS disabled')
    self.check_mumps_headers()
    self.check_mumps_version()
    if opts.mumps_libs is None:
        opts.mumps_libs = 'dmumps zmumps smumps cmumps mumps_common pord'
    if not opts.parallel:
        opts.mumps_libs += ' mpiseq'
    if opts.mumps_libs:
        self.check_mumps_libs()
    self.set_define_from_env('MUMPS_INT_SIZE',
                             'Setting size of Mumps integers',
                             'unexpected value for mumps int: %(size)s',
                             into=(4, 8),
                             default=4)
コード例 #21
0
def configure(ctx):
    import waflib.Logs
    from waflib import Errors
    ctx.load("python")
    doit = False
    import sys
    sys.path += [ctx.env.PYTHONDIR]

    if ctx.options.h5py_install or ctx.options.upgrade_all_deps or ctx.options.install_all_deps:
        try:
            import h5py
            raise Exception()
        except Exception as e:
            doit = True
        if doit:
            import os.path as osp
            import autoinstall_lib as atl
            atl.installsmthg_pre(
                ctx, "http://h5py.googlecode.com/files/h5py-1.3.1.tar.gz",
                "h5py-1.3.1.tar.gz")
            if ctx.env.INCLUDES_hdf5:
                HDF5_DIR = osp.split(ctx.env.INCLUDES_hdf5[0])[0]
            else:
                fi = ctx.find_file("hdf5.h", ctx.env.INCLUDES_pmc)
                print(fi)
                HDF5_DIR = osp.split(osp.split(fi)[0])[0]
            HDF5_API = "18"
            print(HDF5_DIR)
            cmdline = "cd build/%s; HDF5_DIR=%s HDF5_API=%s python setup.py install --install-purelib=%s" % (
                "h5py-1.3.1", HDF5_DIR, HDF5_API, ctx.env.PYTHONDIR)
            waflib.Logs.pprint("PINK", cmdline)
            if ctx.cmd_and_log(cmdline) != 0:
                raise Errors.ConfigurationError("Cannot build h5py")
    try:
        import h5py
    except Exception as e:
        if not doit:
            waflib.Logs.pprint(
                "PINK",
                "You can install automatically h5py using cmdline option --h5py_install"
            )
        else:
            waflib.Logs.pprint("RED", "Autoinstall h5py has failed !")
        raise e
コード例 #22
0
def check_math_libs_call_openmp(self, color='RED'):
    """Compile and run a minimal openmp program"""
    self.start_msg('Checking for a program using omp thread')
    try:
        ret = self.check_fc(fragment=omp_thread_fragment,
                            use='MATH OPENMP MPI',
                            mandatory=True,
                            execute=True,
                            define_ret=True)
        nbThreads = int((ret and ret.split() or [])[-1])
        refe = min(self.env['NPROC'], 2) if self.env.BUILD_OPENMP else 1
        if nbThreads < refe:
            raise ValueError(
                "expected at least {0} thread(s)".format(nbThreads))
    except Exception as exc:
        # the message must be closed
        self.end_msg('no', color=color)
        raise Errors.ConfigurationError(str(exc))
    else:
        self.end_msg('yes (on {0} threads)'.format(nbThreads))
コード例 #23
0
    def _buildunitdeps(self, unit_order, unit):
        """
        Order a list which can be built from left to right to build all
        units from a dictionary of named units which have
        a subkey 'use' referring to other named prerequisite units.
        If memory becomes a problem then change to iterative from recursion.
        """
        if unit in unit_order:
            return unit_order
        elif 'use' in self.units[unit]._k.keys():
            for u in self.units[unit]._k.use('use'):
                try:
                    unit_order = self._buildunitdeps(unit_order, u)
                except KeyError:
                    raise Errors.ConfigurationError(
                        ('Unit \'{0}\''
                         ' required by \'{1}\' defined in \'{2}\' has not been'
                         ' defined.').format(
                             u, unit, self.units[unit].script.srcpath()))

        unit_order.append(unit)
        return unit_order
コード例 #24
0
def configure(ctx):
    if not ctx.options.top_level:
        raise Errors.ConfigurationError(
            'SoCManager: Please set a top level unit by running waf '
            'configure --top_level=<top_level>')
    ctx.env['top_level'] = ctx.options.top_level
    ctx.env['views'] = ctx.options.views
    ctx.env['check'] = ctx.options.check
    """Create class in the context to hold/manipulate the SFFUnits."""
    ctx.SFFUnits = ctx.SFFUnitsCont()
    """File extensions to compile as Verilog."""
    ctx.env.VLOG_EXT = ['.v']
    """File extensions to compile as VHDL."""
    ctx.env.VHDL_EXT = ['.vhdl', '.vhd']
    """File extensions to compile as System Verilog."""
    ctx.env.SVLOG_EXT = ['.sv']
    """File extensions to identify Synopsys Design Constraints (SDC) files."""
    ctx.env.SDC_EXT = ['.sdc']
    """
    Static version of Perl vppreproc http://www.veripool.org/ used to parse
    verilog for headers.
    """
    ctx.find_program('vppreproc')
コード例 #25
0
ファイル: planahead.py プロジェクト: electronicvisions/brick
def scan_planAhead_script(self):
    self.tcl_file_node = self.path.find_node(getattr(self, 'tcl_file', None))
    if not self.tcl_file_node:
        raise Errors.ConfigurationError(
            'A TCL file for planAhead could not be found: ' +
            getattr(self, 'tcl_file', None))

    # check if constraints and netlists for planahead exist
    # both rel and abs paths are needed due to tcl parsing behaviour below
    # ugly but works ...
    self.constraints_node = []
    self.c_path = {'rel': [], 'abs': []}
    for constraint in self.constraints:
        temp = self.path.find_node(constraint)
        if not temp:
            raise Errors.ConfigurationError(
                'A constraint file for planAhead could not be found: ' + temp)
        self.constraints_node.append(temp)
        self.c_path['rel'].append(constraint)
        self.c_path['abs'].append(temp.abspath())
    self.netlists_node = []
    self.n_path = {'rel': [], 'abs': []}
    for netlist in self.netlists:
        temp = self.path.find_node(netlist)
        if not temp:
            raise Errors.ConfigurationError(
                'A ngc netlist file for planAhead could not be found: ' + temp)
        self.netlists_node.append(temp)
        self.n_path['rel'].append(netlist)
        self.n_path['abs'].append(temp.abspath())

    inputs = [self.tcl_file_node]
    outputs = []
    variables = {
        'BRICK_RESULTS': './results',
        'PROJECT_ROOT': self.env.PROJECT_ROOT,
        'PROJECT_NAME': self.project_name,
        'CONSTRAINTS': ' '.join(self.c_path['rel']),
        'NETLISTS': ' '.join(self.n_path['rel'])
    }

    # help file
    project_file_name = os.path.split(self.tcl_file_node.abspath())[1]
    help_file = self.bld.bldnode.make_node('brick_' + project_file_name)
    with open(help_file.abspath(), 'w') as hf:
        hf.write('set BRICK_RESULTS ./results\n')
        hf.write('set PROJECT_ROOT ' + self.env.PROJECT_ROOT + '\n')
        hf.write('set PROJECT_NAME ' + self.project_name + '\n')
        hf.write('set CONSTRAINTS [list ' + ' '.join(self.c_path['abs']) +
                 ']\n')
        hf.write('set NETLISTS [list ' + ' '.join(self.n_path['abs']) + ']\n')

    #
    # Project file parsing
    #
    with open(self.tcl_file_node.abspath(), 'r') as tcl_handle:
        # This is ugly and will break!
        for line in tcl_handle:
            # skip comments
            if re.match('\s*#', line):
                continue

            # replace env variables
            get_env = re.search('\$env\s*\(\s*(\w+)\s*\)\s*', line)
            if get_env:
                line = re.sub('\$env\s*\(\w+\s*\)\s*',
                              self.env[get_env.group(1)], line)

            # replace variables
            get_vars = re.search('\$(\w+)', line)
            while get_vars:
                line = re.sub('\$' + get_vars.group(1),
                              variables[get_vars.group(1)], line)
                get_vars = re.search('\$(\w+)', line)

            # add the implicit dependencies
            m0 = re.search('add_files', line)
            if m0:
                # remove options
                line = re.sub('-norecurse\s+', '', line)
                line = re.sub('-quiet\s+', '', line)
                line = re.sub('-scan_for_includes\s+', '', line)
                line = re.sub('-verbose\s+', '', line)
                line = re.sub('-fileset\s+?.+?\s+?', '', line)

                m1 = re.search('add_files\s+{(.+)}', line)
                if m1:
                    files = m1.group(1).split(' ')
                    for file in files:
                        if self.env['BRICK_RESULTS'] in file:
                            input_node = self.path.get_bld().make_node(
                                os.path.join(self.path.bld_dir(), file))
                            inputs.append(input_node)
                        else:
                            input_node = self.path.make_node(file)
                            mroot = re.match('/', file)
                            if mroot:
                                input_node = self.bld.root.make_node(file)
                            if input_node:
                                inputs.append(input_node)
                            else:
                                raise Errors.ConfigurationError(
                                    'File ' + file +
                                    ' not found in project file for planAhead project.'
                                )
                else:
                    m2 = re.search('add_files\s+(.+)', line)
                    if m2:
                        file = m2.group(1)
                        if self.env['BRICK_RESULTS'] in file:
                            input_node = self.path.get_bld().make_node(
                                os.path.join(self.path.bld_dir(), file))
                            inputs.append(input_node)
                        else:
                            input_node = self.path.make_node(file)
                            mroot = re.match('/', file)
                            if mroot:
                                input_node = self.bld.root.make_node(file)
                            if input_node:
                                inputs.append(input_node)
                            else:
                                raise Errors.ConfigurationError(
                                    'File ' + file +
                                    ' not found in project file for planAhead project.'
                                )

            # look for variables
            m3 = re.search('set\s+(.+?)\s+(.+)', line)
            if m3:
                m3_1 = re.search('\[\s*\$env\s+(.+)\s*\]', m3.group(2))
                if m3_1:
                    variables[m3.group(1)] = self.env[m3_1.group(1)]
                else:
                    variables[m3.group(1)] = m3.group(2)

            # find out project dir and project name
            m4 = re.search('create_project', line)
            if m4:
                line = re.sub('-force\s+', '', line)
                line = re.sub('-part\s+?.+?\s+?', '', line)
                m5 = re.search(
                    'create_project\s+?([\.\-_\/\w]+)\s+?([\.\-_\/\w]+)', line)
                if m5:
                    self.project_name = m5.group(1)
                    self.project_dir = m5.group(2)
                else:
                    raise Errors.ConfigurationError(
                        'Project name and/or project dir could not be inferred from TCL file '
                        + self.tcl_file_node.abspath())

            # look for implemenatations
            m6 = re.search('launch_run\s+(\w+)', line)
            if m6:
                filename = os.path.join(self.project_dir,
                                        self.project_name + '.runs',
                                        m6.group(1),
                                        self.toplevel + '_routed.ncd')
                outputs.append(self.path.get_bld().make_node(
                    os.path.join(self.path.bld_dir(), filename)))

    outputs.append(outputs[0].parent.make_node(self.toplevel + '.pcf'))
    # save output file path to environment
    self.env['PLANAHEAD_OUTPUT'] = outputs[0].path_from(self.path)
    # create actual task
    self.planAheadTask = self.create_task('planAheadTask', inputs, outputs)
コード例 #26
0
    def finalize(self):
        """
        If --check is not defined:
        1) Process the unit views' inheritance on the use and use_tb keys
        2) Search the unit tree from the top and build the dependency order
            from --top_level unit for syn and sim using use and use_tb
        3) Drop unused units from self.units to save memory and processing
        4) Process the unit views' inheritance on remaining keys
        5) Pickle internal state where necessary and store to env['SFFUnits']

        if --check is defined:
        1) Check every view of every unit
        2) Go back to 1) above
        TODO 1.5) Walk all deps

        """

        # Test the existence of the top_level unit key
        try:
            self.getunit(self.top_level)
        except KeyError:
            raise Errors.ConfigurationError(
                ('Top Level "{0}" not'
                 ' found. Please re-run "waf configure --top_level= " with the'
                 ' correct top_level name or check the unit names and recurses'
                 ' in your wscript files.').format(self.top_level))

        if self.check:
            self.ctx.msg('Option', '--check', color='BLUE')
            for m in self.units:
                self.units[m].check_all()

        # Apply inheritance on the use and tb_use directives
        for name, unit in self.units.items():
            self.units[name].applyinheritance(self.views, ('use', 'tb_use'))

        # Get the top_level unit dependencies from the use and tb_use keys
        synu, simu = self.get_unit_deps(self.top_level)
        self.synu_deps = synu
        self.simu_deps = simu

        # Prune the SFFUnits dictionary to only syn and sim units
        self.units = dict((k, self.units[k]) for k in simu + synu)

        # Apply inheritance on all keys
        for name, unit in self.units.items():
            self.units[name].applyinheritance(self.views)

        # Get and store the unit dependencies from the use and tb_use keys
        for name, unit in self.units.items():
            synu, simu = self.get_unit_deps(name)
            self.units[name].set_deps(self.getunit(synu), self.getunit(simu))

        self.ctx.msg('top_level set to',
                     '{0}'.format(self.top_level),
                     color='BLUE')
        self.ctx.msg('Units for simulation',
                     '{0}'.format(self.simu_deps),
                     color='BLUE')
        self.ctx.msg('Units for synthesis',
                     '{0}'.format(self.synu_deps),
                     color='BLUE')

        # Context contains one or more waflib.Nodes.Nod3 which cannot be
        #  pickled so we have to get rid of it. Also the configuration
        #  context is not valid in build etc.
        for m in self.units:
            self.units[m].pack()
        env = self.ctx.env
        delattr(self, 'ctx')
        env['SFFUnits'] = pickle.dumps(self)
        self._packed = True
コード例 #27
0
def cds_ius_prepare(self):
	# save worklib to env
	self.env.WORKLIB = getattr(self,'worklib',self.env.CDS_WORKLIB)
	# create task to generate worklib (if necessary)
	self.check_create_worklib_task(self.env.WORKLIB)
	#
	# transform search paths to the format used for ncvlog
	#
	vsp = getattr(self,'verilog_search_paths',[])
	self.env.VERILOG_SEARCH_PATHS = []
	vid = []
	if len(vsp) > 0:
		for path in vsp:
			self.env.VERILOG_SEARCH_PATHS.append(path.abspath())
			vid.append('-INCDIR')
			vid.append(path.abspath())

	if len(vid) > 0:
		self.env.VERILOG_INC_DIRS = vid

	if not hasattr(self,'name'):
		self.name = Node.split_path(self.source[0])[-1]

	if not hasattr(self,'source'):
		raise Errors.ConfigurationError('Please specify the source attribute for task generator '+getattr(self,'name','?noname? (and give it a name, too!)'))

	# generate the logfile name
	self.logfile = self.get_logdir_node().make_node(self.env.NCVLOG_LOGFILE+'_'+self.name).abspath()

	# process source here, skip default process_source
	self.source_vams = []
	self.source_string_vams = []
	self.source_sv   = []
	self.source_string_sv   = []
	self.source_v    = []
	self.source_string_v    = []
	remove_sources = []
	for src in getattr(self,'source',[]):
		if src.suffix() == '.vams' or src.suffix() == '.va':
			self.source_string_vams.append(src.abspath())
			self.source_vams.append(src)
			remove_sources.append(src)
		elif src.suffix() == '.v':
			self.source_string_v.append(src.abspath())
			self.source_v.append(src)
			remove_sources.append(src)
		elif src.suffix() == '.sv':
			self.source_string_sv.append(src.abspath())
			self.source_sv.append(src)
			remove_sources.append(src)

	for src in remove_sources:
		self.source.remove(src)
	#print self.name
	#print len(self.source_string_vams), len(self.source_string_v), len(self.source_string_sv)

	if hasattr(self,'view'):
		self.ncvlog_add_options = ['-VIEW',self.view]
	else:
		self.ncvlog_add_options = []

	if len(self.source_string_vams) > 0:
		task = self.create_task("CadenceVamslogTask",self.source_vams,[])
	if len(self.source_string_v) > 0:
		task = self.create_task("CadenceVlogTask",self.source_v,[])
	if len(self.source_string_sv) > 0:
		task = self.create_task("CadenceSvlogTask",self.source_sv,[])
コード例 #28
0
def _check_minimum_python_version(major, minor):
    if sys.version_info[:2] < (major, minor):
        raise Errors.ConfigurationError(
            "Python version not supported: {0}, "
            "required minimum version: {1}.{2}".format(sys.version_info[:3],
                                                       major, minor))
コード例 #29
0
ファイル: mathematics.py プロジェクト: jacojvr/Code_Aster
        # the message must be closed
        self.end_msg('no', color=color)
        raise Errors.ConfigurationError(str(exc))
    else:
        self.end_msg('yes')

    if self.get_define('HAVE_MPI'):
        self.start_msg('Checking for a program using blacs')
        try:
            ret = self.check_fc(fragment=blacs_fragment,
                                use='MATH OPENMP MPI',
                                mandatory=True)
        except Exception, exc:
            # the message must be closed
            self.end_msg('no', color=color)
            raise Errors.ConfigurationError(str(exc))
        else:
            self.end_msg('yes')

    self.start_msg('Checking for a program using omp thread')
    try:
        ret = self.check_fc(fragment=omp_thread_fragment,
                            use='MATH OPENMP MPI',
                            mandatory=True,
                            execute=True,
                            define_ret=True)
        nbThreads = int((ret and ret.split() or [])[-1])
        refe = min(self.env['NPROC'], 2) if self.env.BUILD_OPENMP else 1
        if nbThreads < refe:
            raise ValueError(
                "expected at least {0} thread(s)".format(nbThreads))
コード例 #30
0
def scan_synplify_project_file(self):
	"""This function extracts the output file and inputs files for synthesis from a synplify project (i.e. tcl) file."""

	result_file = None

	self.project_file_node = self.path.find_node(getattr(self,'project_file',None))
	if not self.project_file_node:
		raise Errors.ConfigurationError('Project file for synplify not found: '+getattr(self,'project_file',''))

	# help file
	project_file_name = os.path.split(self.project_file_node.abspath())[1]
	help_file = self.bld.bldnode.make_node('brick_'+project_file_name)
	with open(help_file.abspath(),'w') as hf:
		hf.write('set results_dir ./results')

	# open the project file template
	input = open(self.project_file_node.abspath(),'r')
	inputs = [self.project_file_node]
	# split the filename into parts
	project_file_split = Node.split_path(self.project_file_node.abspath())
	# create the target project file
	self.project_file_node = self.path.get_bld().make_node(os.path.join(self.path.bld_dir(),project_file_split[len(project_file_split)-1]))
	output = open(self.project_file_node.abspath(),'w')
	variables = {}
	outputs = []
	for line in input:
		# copy file line by line
		output.write(line)
		# skip comments
		if re.match('\s*#',line):
			continue
		# replace env variables
		get_env = re.search('\[\s*get_env\s+(\w+)\s*\]',line)
		if get_env:
			if not get_env.group(1) in self.env:
				raise Errors.ConfigurationError('The environment variable '+get_env.group(1)+' used in synplify project file '+self.project_file_node.abspath()+' has not been defined.')

			line = re.sub('\[\s*get_env\s+\w+\s*\]',self.env[get_env.group(1)],line)

		# keep the rest
		#  _
		#  |
		#  v
		#
		# look for the results file
		m0 = re.search('project\s+-result_file\s+"(.+)"',line)
		if m0:
			# check if the line contains a reference to a variable
			m0_1 = re.search('\$(\w+)',m0.group(1))
			if m0_1:
				try:
					result_file = re.sub('\$(\w+)',variables[m0_1.group(1)],m0.group(1))
				except KeyError:
					print "Variable "+m0_1.group(1)+" not found in "+self.project_file

				outputs.append(self.bld.bldnode.make_node(result_file))
			else:
				# if the result path is given as a relative path,
				# synplify save the results relative to the project_file path,
				# not relative to the path where the program is executed in
				outputs.append(self.bld.bldnode.make_node(m0.group(1)))


		# look for variables
		m3 = re.search('set\s+(.+?)\s+(.+)',line)
		if m3:
			m3_1 = re.search('\[\s*get_env\s+(.+)\s*\]',m3.group(2))
			if m3_1:
				variables[m3.group(1)] = self.env[m3_1.group(1)]
			else:
				variables[m3.group(1)] = m3.group(2)

	input.close()

	for file in getattr(self,'source_files',[]):
		node = self.path.find_node(file)
		if not node:
			raise Errors.ConfigurationError('File '+file+' not found in task ' + self.name)

		if node.suffix() == '.v':
			output.write('add_file -verilog "'+node.abspath()+'"\n')
		elif node.suffix() == '.sv' or node.suffix() == '.svh':
			output.write('add_file -verilog -vlog_std sysv "'+node.abspath()+'"\n')
		elif node.suffix() == '.vhd' or node.suffix() == '.vhdl':
			output.write('add_file -vhdl "'+node.abspath()+'"\n')
		elif node.suffix() == '.sdc':
			output.write('add_file -constraint "'+node.abspath()+'"\n')
		else:
			raise Errors.ConfigurationError('Extension of file '+node.abspath()+' unknown.')

		inputs.append(node)

	for directory in getattr(self,'include_paths',[]):
		node = self.path.find_dir(directory)
		if not node:
			raise Errors.ConfigurationError('Include directory '+directory+' not found in synplify task.')

		output.write('set_option -include_path "'+node.abspath()+'"\n')

	output.close()

	self.logfile = outputs[0].change_ext('.srr')
	outputs.append(outputs[0].change_ext('.ncf'))
	outputs.append(outputs[0].parent.make_node('synplicity.ucf'))

	self.logfile = self.env.BRICK_LOGFILES+'/'+Node.split_path(self.project_file_node.abspath())[-1]

	# generate synthesis task
	self.synplify_task = self.create_task('synplifyTask', inputs, outputs)