def get_cc(self): """ Get the C compiler used to compile NMODL files Returns ------- cc : str Name of the C compiler used to compile NMODL files """ # Get path to the nrnmech_makefile, should be next to nrnivmodl nrnmech_makefile_path = os.path.join( os.path.dirname(os.path.realpath(self.nrnivmodl_path)), 'nrnmech_makefile') # Extract C-compiler used in nrnmech_makefile try: with open(nrnmech_makefile_path) as f: contents = f.read() except OSError: raise Pype9BuildError( "Could not read nrnmech_makefile at '{}'" .format(nrnmech_makefile_path)) matches = re.findall(r'\s*CC\s*=\s*(.*)', contents) if len(matches) != 1: raise Pype9BuildError( "Could not extract CC variable from nrnmech_makefile at '{}'" .format(nrnmech_makefile_path)) cc = matches[0] return cc
def compile_source_files(self, compile_dir, component_name): # Run configure script, make and make install os.chdir(compile_dir) logger.info("Compiling NEST model class in '{}' directory.".format( compile_dir)) stdout, stderr = self.run_command( ['make', '-j{}'.format(self._build_cores)], fail_msg=("Compilation of '{}' NEST module failed (see compile " "directory '{}'):\n\n {{}}".format( component_name, compile_dir))) if re.search(r'error:', stderr): # Ignores warnings raise Pype9BuildError( "Compilation of '{}' NEST module directory failed:\n\n{}\n{}". format(compile_dir, stdout, stderr)) logger.debug("make '{}':\nstdout:\n{}stderr:\n{}\n".format( compile_dir, stdout, stderr)) stdout, stderr = self.run_command( ['make', 'install'], fail_msg=("Installation of '{}' NEST module failed (see compile " "directory '{}'):\n\n {{}}".format( component_name, compile_dir))) if stderr: raise Pype9BuildError( "Installation of '{}' NEST module directory failed:\n\n{}\n{}". format(compile_dir, stdout, stderr)) logger.debug("make install'{}':\nstdout:\n{}stderr:\n{}\n".format( compile_dir, stdout, stderr)) logger.info("Compilation of '{}' NEST module completed " "successfully".format(component_name))
def _get_specials_dir(self): # Create a temporary directory to run nrnivmodl in tmp_dir_path = os.path.join(tempfile.gettempdir(), str(uuid.uuid4())) try: os.mkdir(tmp_dir_path) except OSError: raise Pype9BuildError("Error creating temporary directory '{}'" .format(tmp_dir_path)) orig_dir = os.getcwd() os.chdir(tmp_dir_path) # Run nrnivmodl to see what build directory is created try: with open(os.devnull, "w") as fnull: sp.check_call(self.nrnivmodl_path, stdout=fnull, stderr=fnull) except sp.CalledProcessError as e: raise Pype9BuildError("Error test running nrnivmodl".format(e)) # Get the name of the specials directory try: specials_dir = os.listdir(tmp_dir_path)[0] except IndexError: raise Pype9BuildError( "Error test running nrnivmodl no build directory created" .format(e)) # Return back to the original directory os.chdir(orig_dir) return specials_dir
def get_nest_install_prefix(cls): # Make doubly sure that the loaded nest install appears first on the # PYTHONPATH (not sure if this is necessary, but can't hurt) pynest_install_dir = os.path.join(os.path.dirname(nest.__file__), '..') env = os.environ.copy() env['PYTHONPATH'] = os.pathsep.join( (pynest_install_dir, env.get('PYTHONPATH', ''))) try: process = sp.Popen( [sys.executable, '-c', "import nest; nest.sysinfo()"], stdout=sp.PIPE, stderr=sp.PIPE, env=env) stdout, _ = process.communicate() except sp.CalledProcessError as e: raise Pype9BuildError( "Error trying to run 'import nest; nest.sysinfo()' in " "subprocess:\n{}".format(e)) if PY3: stdout = str(stdout.decode('utf-8')) match = re.search(r'\(([^\)]+)/share/nest/sli\)', stdout) if match is None: raise Pype9BuildError( "Could not find nest install prefix by searching for " "'share/nest/sli' in output from nest.sysinfo:\n{}".format( stdout)) return match.group(1)
def compile_source_files(self, compile_dir, name): """ Builds all NMODL files in a directory Parameters ---------- src_dir : str The path of the directory to build build_mode : str Can be one of either, 'lazy', 'super_lazy', 'require', 'force', or 'build_only'. 'lazy' doesn't run nrnivmodl if the library is found, 'require', requires that the library is found otherwise throws an exception (useful on clusters that require precompilation before parallelisation where the error message could otherwise be confusing), 'force' removes existing library if found and recompiles, and 'build_only' removes existing library if found, recompile and then exit ignore_units : Flag whether to only print a warning when units don't match instead of throwing an error """ # Change working directory to model directory os.chdir(compile_dir) logger.info("Building NMODL mechanisms in '{}' directory." .format(compile_dir)) # Check the created units by running modlunit if __debug__ and self.modlunit_path is not None: for fname in os.listdir('.'): if fname.endswith('.mod'): try: stdout, stderr = self.run_command([self.modlunit_path, fname]) assert '<<ERROR>>' not in stderr, ( "Incorrect units assigned in NMODL file:\n {}{}" .format(stdout, stderr)) except sp.CalledProcessError as e: raise Pype9BuildError( "Could not run 'modlunit' to check dimensions in " "NMODL file: {}\n{}".format(fname, e)) # Run nrnivmodl command in src directory nrnivmodl_cmd = [self.nrnivmodl_path, '-loadflags', ' '.join(self.nrnivmodl_flags)] logger.debug("Building nrnivmodl in {} with {}".format( compile_dir, nrnivmodl_cmd)) self.run_command(nrnivmodl_cmd, fail_msg=( "Compilation of NMODL files for '{}' model failed. See src " "directory '{}':\n\n{{}}".format(name, compile_dir))) if stderr.strip().endswith('Error 1'): raise Pype9BuildError( "Generated mod file failed to compile with output:\n{}\n{}" .format(stdout, stderr)) logger.info("Compilation of NEURON (NMODL) files for '{}' " "completed successfully".format(name))
def run_cmd(self, cmd, work_dir, fail_msg): p = sp.Popen(cmd, shell=True, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.STDOUT, close_fds=True, cwd=work_dir) stdout = p.stdout.readlines() result = p.wait() # test if cmd was successful if result != 0: raise Pype9BuildError( "{}:\n{}".format(fail_msg, ' '.join([''] + stdout)))
def clean_src_dir(self, src_dir, component_name): # @UnusedVariable # Clean existing src directories from previous builds. shutil.rmtree(src_dir, ignore_errors=True) try: os.makedirs(src_dir) except OSError as e: raise Pype9BuildError( "Could not create source directory ({}), please check the " "required permissions or specify a different \"build dir" "base\" ('build_dir_base'):\n{}".format(src_dir, e))
def clean_install_dir(self, install_dir): # Clean existing compile & install directories from previous builds shutil.rmtree(install_dir, ignore_errors=True) try: os.makedirs(install_dir) except OSError as e: raise Pype9BuildError( "Could not create install directory ({}), please check the " "required permissions or specify a different \"build dir" "base\" ('build_dir_base'):\n{}".format(install_dir, e))
def clean_compile_dir(self, compile_dir, purge=False, **kwargs): # @UnusedVariable @IgnorePep8 if purge: try: shutil.rmtree(compile_dir) except OSError as e: if e.errno != errno.ENOENT: # Ignore if missing raise if not path.exists(compile_dir): try: os.makedirs(compile_dir) except OSError as e: raise Pype9BuildError( "Could not make compile directory '{}': {}".format( compile_dir, e)) else: orig_dir = os.getcwd() os.chdir(compile_dir) try: stdout, stderr = self.run_command(['make', 'clean']) os.chdir(orig_dir) except (sp.CalledProcessError, OSError): os.chdir(orig_dir) shutil.rmtree(compile_dir, ignore_errors=True) try: os.makedirs(compile_dir) except OSError as e: raise Pype9BuildError( "Could not create build directory ({}), please check " "the required permissions or specify a different " "build directory:\n{}".format(e)) if stderr and 'No rule to make target' not in stderr: raise Pype9BuildError( "Clean of '{}' NEST module directory failed:\n\n{}\n{}". format(compile_dir, stdout, stderr)) logger.debug("make clean '{}':\nstdout:\n{}stderr:\n{}\n".format( compile_dir, stdout, stderr))
def get_neuron_util_path(cls, util_name, **kwargs): util_path = os.path.join(cls.get_neuron_bin_path(), util_name) if not os.path.exists(util_path): try: default_path = kwargs['default'] logger.warning("Did not find '{}' at expected path" .format(util_name, util_path)) util_path = default_path except KeyError: raise Pype9BuildError( "Did not find '{}' at expected path '{}'" .format(util_name, util_path)) return util_path
def get_neuron_bin_path(cls): path = neuron.h.neuronhome() path_contents = os.listdir(path) if 'examples' in path_contents: # returned NRNHOME/share/nrn nrnhome = os.path.join(path, '..', '..') if os.path.exists(os.path.join(nrnhome, 'x86_64')): bin_path = os.path.join(nrnhome, 'x86_64', 'bin') else: bin_path = os.path.join(nrnhome, 'bin') elif 'bin' in path_contents: bin_path = os.path.join(path, 'bin') elif 'nrnivmodl' in path_contents: bin_path = path if not os.path.exists(bin_path): raise Pype9BuildError( "Did not find NEURON 'bin' path at expected '{}' location" .format(bin_path)) return bin_path
def run_command(self, cmd, fail_msg=None, **kwargs): env = os.environ.copy() try: process = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE, env=env, **kwargs) stdout, stderr = process.communicate() if PY3: stdout = str(stdout.decode('utf-8')) stderr = str(stderr.decode('utf-8')) logger.debug("'{}' stdout:\n{}".format(cmd, stdout)) logger.debug("'{}' stderr:\n{}".format(cmd, stderr)) except sp.CalledProcessError as e: if fail_msg is None: raise else: msg = fail_msg.format(e) raise Pype9BuildError(msg) return stdout, stderr
def configure_build_files(self, name, src_dir, compile_dir, install_dir, **kwargs): # @UnusedVariable # Generate Makefile if it is not present if not path.exists(path.join(compile_dir, 'Makefile')): if not path.exists(compile_dir): os.mkdir(compile_dir) logger.info("Configuring build files in '{}' directory".format( compile_dir)) orig_dir = os.getcwd() config_args = { 'name': name, 'src_dir': src_dir, # NB: ODE solver currently ignored # 'ode_solver': kwargs.get('ode_solver', # self.ODE_SOLVER_DEFAULT), 'version': pype9.__version__, 'executable': sys.executable } self.render_to_file('CMakeLists.txt.tmpl', config_args, 'CMakeLists.txt', src_dir) os.chdir(compile_dir) stdout, stderr = self.run_command( [ 'cmake', '-Dwith-nest={}'.format(self.nest_config), '-DCMAKE_INSTALL_PREFIX={}'.format(install_dir), src_dir ], fail_msg=("Cmake of '{}' NEST module failed (see src " "directory '{}'):\n\n {{}}".format(name, src_dir))) if stderr: raise Pype9BuildError( "Configure of '{}' NEST module failed (see src " "directory '{}'):\n\n{}\n{}".format( name or src_dir, src_dir, stdout, stderr)) logger.debug("cmake '{}':\nstdout:\n{}stderr:\n{}\n".format( compile_dir, stdout, stderr)) os.chdir(orig_dir)
def get_gsl_prefixes(self): """ Get the library paths used to link GLS to PyNEST Returns ------- lib_paths : list(str) List of library paths passed to the PyNEST compile """ try: # Used to attempt to determine the location of the GSL library nest_config_path = self.path_to_utility('nest-config') except Pype9CommandNotFoundError: prefixes = [] except sp.CalledProcessError: raise Pype9BuildError( "Could not run '{} --libs'".format(self.nest_config_path)) else: libs = str(sp.check_output('{} --libs'.format(nest_config_path), shell=True)) prefixes = [ p[2:-3] for p in libs.split() if p.startswith('-L') and p.endswith('lib') and 'gsl' in p] return prefixes
def _transform_full_component(self, trfrm, component_class, v, **kwargs): # ----------------------------------------------------------------- # Remove all analog send ports with 'current' dimension so they # don't get confused with the converted voltage time derivative # expression # ----------------------------------------------------------------- for port in list(trfrm.analog_send_ports): if port.dimension == un.current: trfrm.remove(port) # ----------------------------------------------------------------- # Insert membrane capacitance if not present # ----------------------------------------------------------------- # Get or guess the location of the membrane capacitance try: name = kwargs['membrane_capacitance'] try: orig_cm = component_class.parameter(name) except KeyError: raise Pype9BuildError( "Could not find specified membrane capacitance '{}'" .format(name)) cm = trfrm.parameter(orig_cm.name) except KeyError: # 'membrane_capacitance' was not specified candidate_cms = [ccm for ccm in component_class.parameters if ccm.dimension == un.capacitance] if len(candidate_cms) == 1: orig_cm = candidate_cms[0] cm = trfrm.parameter(orig_cm.name) logger.info("Guessing that '{}' is the membrane capacitance" .format(orig_cm)) elif len(candidate_cms) > 1: raise Pype9BuildError( "Could not guess the membrane capacitance, candidates:" " '{}'".format("', '".join(candidate_cms))) else: cm = Parameter("cm___pype9", dimension=un.capacitance) trfrm.add(cm) cm.annotations.set((BUILD_TRANS, PYPE9_NS), TRANSFORM_SRC, None) trfrm.annotations.set((BUILD_TRANS, PYPE9_NS), MEMBRANE_CAPACITANCE, cm.name) # ----------------------------------------------------------------- # Replace membrane voltage equation with membrane current # ----------------------------------------------------------------- # Determine the regimes in which each state variables has a time # derivative in has_td = defaultdict(list) # List which regimes need to be clamped to their last voltage # (as it has no time derivative) clamped_regimes = [] # The voltage clamp equation where v_clamp is the last voltage # value and g_clamp_ is a large conductance clamp_i = sympy.sympify('g_clamp___pype9 * (v - v_clamp___pype9)') memb_is = [] for regime in trfrm.regimes: # Add an appropriate membrane current try: # Convert the voltage time derivative into a membrane # current dvdt = regime.time_derivative(v.name) regime.remove(dvdt) i = -dvdt.rhs * cm memb_is.append(i) except KeyError: i = clamp_i clamped_regimes.append(regime) regime.add(Alias('i___pype9', i)) # Record state vars that have a time deriv. in this regime for var in regime.time_derivative_variables: if var != 'v': has_td[var].append(regime) # Pick the most popular membrane current to be the alias in # the global scope assert memb_is, "No regimes contain voltage time derivatives" memb_i = Alias('i___pype9', max(memb_is, key=memb_is.count)) # Add membrane current along with a analog send port trfrm.add(memb_i) i_port = AnalogSendPort('i___pype9', dimension=un.current) i_port.annotations.set((BUILD_TRANS, PYPE9_NS), ION_SPECIES, NONSPECIFIC_CURRENT) trfrm.add(i_port) # Remove membrane currents that match the membrane current in the # outer scope for regime in trfrm.regimes: if regime.alias('i___pype9') == memb_i: regime.remove(regime.alias('i___pype9')) # If there are clamped regimes add extra parameters and set the # voltage to clamp to in the regimes that trfrmition to them if clamped_regimes: trfrm.add(StateVariable('v_clamp___pype9', un.voltage)) trfrm.add(Constant('g_clamp___pype9', 1e8, un.uS)) for trans in trfrm.transitions: if trans.target_regime in clamped_regimes: # Assign v_clamp_ to the value try: v_clamp_rhs = trans.state_assignment('v').rhs except KeyError: v_clamp_rhs = 'v' trans.add(StateAssignment('v_clamp___pype9', v_clamp_rhs)) # ----------------------------------------------------------------- trfrm.annotations.set( (BUILD_TRANS, PYPE9_NS), NO_TIME_DERIVS, ','.join(['v'] + [sv for sv in trfrm.state_variable_names if sv not in has_td])) trfrm.annotations.set((BUILD_TRANS, PYPE9_NS), NUM_TIME_DERIVS, len(has_td)) # ----------------------------------------------------------------- # Remove the external input currents # ----------------------------------------------------------------- # Analog receive or reduce ports that are of dimension current and # are purely additive to the membrane current and nothing else # (actually subtractive as it is outward current) try: ext_is = [] for i_name in kwargs['external_currents']: try: ext_i = trfrm.analog_receive_port(i_name) except KeyError: try: ext_i = trfrm.analog_reduce_port(i_name) except KeyError: raise Pype9BuildError( "Did not find specified external current port " "'{}'".format(i_name)) if ext_i.dimension != un.current: raise Pype9BuildError( "Analog receive port matching specified external " "current '{}' does not have 'current' dimension " "({})".format(ext_i.name, ext_i.dimension)) ext_is.append(ext_i) except KeyError: ext_is = [] for port in chain(component_class.analog_receive_ports, component_class.analog_reduce_ports): # Check to see if the receive/reduce port has current dimension if port.dimension != un.current: continue # Check to see if the current appears in the membrane current # expression # FIXME: This test should check to to see if the port is # additive to the membrane current and substitute all # aliases. if port.name not in memb_i.rhs_symbol_names: continue # Get the number of expressions the receive port appears in # an expression if len([e for e in component_class.all_expressions if port.symbol in e.free_symbols]) > 1: continue # If all those conditions are met guess that port is a external # current that can be removed (ports that don't meet these # conditions will have to be specified separately) ext_is.append(port) if ext_is: logger.info("Guessing '{}' are external currents to be removed" .format(ext_is)) trfrm.annotations.set((BUILD_TRANS, PYPE9_NS), EXTERNAL_CURRENTS, ','.join(p.name for p in ext_is)) # Remove external input current ports (as NEURON handles them) for ext_i in ext_is: trfrm.remove(ext_i) for expr in chain(trfrm.aliases, trfrm.all_time_derivatives()): expr.subs(ext_i, 0) expr.simplify()
def transform_for_build(self, name, component_class, **kwargs): """ Copies and transforms the component class to match the format of the simulator (overridden in derived class) Parameters ---------- name : str The name of the transformed component class component_class : nineml.Dynamics The component class to be transformed """ self._set_build_props(component_class, **kwargs) if not isinstance(component_class, WithSynapses): raise Pype9RuntimeError( "'component_class' must be a DynamicsWithSynapses object") # --------------------------------------------------------------------- # Clone original component class # --------------------------------------------------------------------- trfrm = component_class.dynamics.flatten() # --------------------------------------------------------------------- # Get the membrane voltage and convert it to 'v' # --------------------------------------------------------------------- try: name = kwargs['membrane_voltage'] try: orig_v = component_class.element( name, nineml_children=Dynamics.nineml_children) except KeyError: raise Pype9BuildError( "Could not find specified membrane voltage '{}'" .format(name)) except KeyError: # Guess voltage from its dimension if not supplied candidate_vs = [cv for cv in component_class.state_variables if cv.dimension == un.voltage] if len(candidate_vs) == 0: candidate_vs = [ cv for cv in component_class.analog_receive_ports if cv.dimension == un.voltage] if len(candidate_vs) == 1: orig_v = candidate_vs[0] logger.info("Guessing that '{}' is the membrane voltage" .format(orig_v)) elif len(candidate_vs) > 1: try: orig_v = next(c for c in candidate_vs if c.name == 'v') logger.info("Guessing that '{}' is the membrane voltage" .format(orig_v)) except StopIteration: raise Pype9BuildError( "Could not guess the membrane voltage, candidates: " "'{}'" .format("', '".join(v.name for v in candidate_vs))) else: orig_v = None logger.info( "Can't find candidate for the membrane voltage in " "state_variables '{}' or analog_receive_ports '{}', " "treating '{}' as an \"artificial cell\"".format( "', '".join( sv.name for sv in component_class.state_variables), "', '".join( p.name for p in component_class.analog_receive_ports), component_class.name)) if orig_v is not None: # Map voltage to hard-coded 'v' symbol if orig_v.name != 'v': trfrm.rename_symbol(orig_v.name, 'v') v = trfrm.state_variable('v') v.annotations.set((BUILD_TRANS, PYPE9_NS), TRANSFORM_SRC, orig_v) else: v = trfrm.state_variable('v') # Add annotations to the original and build models component_class.annotations.set((BUILD_TRANS, PYPE9_NS), MEMBRANE_VOLTAGE, orig_v.name) # @IgnorePep8 trfrm.annotations.set((BUILD_TRANS, PYPE9_NS), MEMBRANE_VOLTAGE, 'v') # Remove associated analog send port if present try: trfrm.remove(trfrm.analog_send_port('v')) except KeyError: pass # Need to convert to AnalogReceivePort if v is a StateVariable if isinstance(v, StateVariable): self._transform_full_component(trfrm, component_class, v, **kwargs) trfrm.annotations.set((BUILD_TRANS, PYPE9_NS), MECH_TYPE, FULL_CELL_MECH) else: raise NotImplementedError( "Build sub-components is not supported in PyPe9 v0.1") else: trfrm.annotations.set((BUILD_TRANS, PYPE9_NS), MECH_TYPE, ARTIFICIAL_CELL_MECH) # ----------------------------------------------------------------- # Insert dummy aliases for parameters (such as capacitance) that # now do not show up in the inferred interface for the transformed # class (i.e. that were only # present in the voltage time derivative) # ----------------------------------------------------------------- # Infer required parameters inferred = DynamicsInterfaceInferer(trfrm) for parameter in list(trfrm.parameters): if parameter.name not in inferred.parameter_names: trfrm.add(Alias(parameter.name + '___dummy', parameter.name)) # ----------------------------------------------------------------- # Validate the transformed component class and construct prototype # ----------------------------------------------------------------- trfrm.validate() trfrm_with_syn = DynamicsWithSynapses( name, trfrm, component_class.synapses, component_class.connection_parameter_sets) # Retun a prototype of the transformed class return trfrm_with_syn
def generate_source_files(self, component_class, src_dir, name=None, debug_print=None, **kwargs): if name is None: name = component_class.name # Get the initial regime and check that it refers to a regime in the # component class tmpl_args = { 'component_name': name, 'component_class': component_class, 'version': pype9.__version__, 'src_dir': src_dir, 'timestamp': datetime.now().strftime('%a %d %b %y %I:%M:%S%p'), 'unit_handler': UnitHandler(component_class), 'sorted_regimes': sorted(component_class.regimes, key=lambda r: component_class.index_of(r)), 'jacobian_approx_step': kwargs.get('jacobian_approx_step', self.GSL_JACOBIAN_APPROX_STEP_DEFAULT), 'max_step_size': kwargs.get('max_step_size', self.MAX_STEP_SIZE_DEFAULT), 'abs_tolerance': kwargs.get('max_step_size', self.ABS_TOLERANCE_DEFAULT), 'rel_tolerance': kwargs.get('max_step_size', self.REL_TOLERANCE_DEFAULT), 'max_simultaneous_transitions': kwargs.get('max_simultaneous_transitions', self.MAX_SIMULTANEOUS_TRANSITIONS), 'parameter_scales': [], 'v_threshold': kwargs.get('v_threshold', self.V_THRESHOLD_DEFAULT), 'regime_varname': self.REGIME_VARNAME, 'debug_print': [] if debug_print is None else debug_print } ode_solver = kwargs.get('ode_solver', self.ODE_SOLVER_DEFAULT) ss_solver = kwargs.get('ss_solver', self.SS_SOLVER_DEFAULT) if ode_solver is None: raise Pype9BuildError("'ode_solver' cannot be None") switches = {'ode_solver': ode_solver, 'ss_solver': ss_solver} # Render C++ header file self.render_to_file('header.tmpl', tmpl_args, name + '.h', src_dir, switches=switches) # Render C++ class file self.render_to_file('main.tmpl', tmpl_args, name + '.cpp', src_dir, switches=switches, post_hoc_subs=self._inline_random_implementations) # Render Loader header file self.render_to_file('module-header.tmpl', tmpl_args, name + 'Module.h', src_dir) # Render Loader C++ class self.render_to_file('module-cpp.tmpl', tmpl_args, name + 'Module.cpp', src_dir) # Render SLI initializer self.render_to_file('module_sli_init.tmpl', tmpl_args, name + 'Module-init.sli', path.join(src_dir, 'sli'))
def generate(self, component_class, build_mode='lazy', url=None, **kwargs): """ Generates and builds the required simulator-specific files for a given NineML cell class Parameters ---------- component_class : nineml.Dynamics 9ML Dynamics object name : str Name of the generated cell class install_dir : str Path to the directory where the NMODL files will be generated and compiled build_mode : str Available build options: lazy - only build if files are modified force - always generate and build purge - remove all config files, generate and rebuild require - require built binaries are present build_only - build and then quit generate_only - generate src and then quit recompile - don't generate src but compile build_version : str A suffix appended to the cell build name to distinguish it from other code generated from the component class url : str The URL where the component class is stored (used to form the build path) kwargs : dict A dictionary of (potentially simulator- specific) template arguments """ # Save original working directory to reinstate it afterwards (just to # be polite) name = component_class.name orig_dir = os.getcwd() if url is None: url = component_class.url # Calculate compile directory path within build directory src_dir = self.get_source_dir(name, url) compile_dir = self.get_compile_dir(name, url) install_dir = self.get_install_dir(name, url) # Path of the build component class built_comp_class_pth = os.path.join(src_dir, self._BUILT_COMP_CLASS) # Determine whether the installation needs rebuilding or whether there # is an existing library module to use. if build_mode == 'purge': remove_ignore_missing(src_dir) remove_ignore_missing(install_dir) remove_ignore_missing(compile_dir) generate_source = compile_source = True elif build_mode in ('force', 'build_only'): # Force build generate_source = compile_source = True elif build_mode == 'require': # Just check that prebuild is present generate_source = compile_source = False elif build_mode == 'generate_only': # Only generate generate_source = True compile_source = False elif build_mode == 'lazy': # Generate if source has been modified compile_source = True if not os.path.exists(built_comp_class_pth): generate_source = True else: try: built_component_class = read(built_comp_class_pth)[name] if built_component_class.equals(component_class, annotations_ns=[PYPE9_NS]): generate_source = False logger.info("Found existing source in '{}' directory, " "code generation skipped (set 'build_mode'" " argument to 'force' or 'build_only' to " "enforce regeneration)".format(src_dir)) else: generate_source = True logger.info("Found existing source in '{}' directory, " "but the component classes differ so " "regenerating sources".format(src_dir)) except (NineMLNameError, NineMLSerializationError): generate_source = True logger.info("Found existing source in '{}' directory, " "but could not find '{}' component class so " "regenerating sources".format(name, src_dir)) # Check if required directories are present depending on build_mode elif build_mode == 'require': if not os.path.exists(install_dir): raise Pype9BuildError( "Prebuilt installation directory '{}' is not " "present, and is required for 'require' build option". format(install_dir)) else: raise Pype9BuildError( "Unrecognised build option '{}', must be one of ('{}')".format( build_mode, "', '".join(self.BUILD_MODE_OPTIONS))) # Generate source files from NineML code if generate_source: self.clean_src_dir(src_dir, name) self.generate_source_files(name=name, component_class=component_class, src_dir=src_dir, compile_dir=compile_dir, install_dir=install_dir, **kwargs) component_class.write(built_comp_class_pth, preserve_order=True, version=2.0) if compile_source: # Clean existing compile & install directories from previous builds if generate_source: self.clean_compile_dir(compile_dir, purge=(build_mode == 'purge')) self.configure_build_files(name=name, src_dir=src_dir, compile_dir=compile_dir, install_dir=install_dir, **kwargs) self.clean_install_dir(install_dir) self.compile_source_files(compile_dir, name) # Switch back to original dir os.chdir(orig_dir) # Cache any dimension maps that were calculated during the generation # process return install_dir