def simulate(spk_interval=1000, sim_time=2000): nest.ResetKernel() #ip=nest.Create('spike_generator',params={'spike_times':[10.,20.]}) sg = nest.Create('spike_generator', 1) st = gen_spikes(3, 100, 900) st = numpy.arange(20., sim_time, spk_interval) pp(nest.SetStatus(sg, {'spike_times': st})) # pp(nest.GetStatus(sg)) n = nest.Create('my_aeif_cond_exp', 1) df = nest.GetDefaults('my_aeif_cond_exp')['receptor_types'] # df=nest.GetDefaults('my_aeif_cond_exp')['recordables'] receptor = 'AMPA_1' recordables = ['g_AMPA_1'] print nest.version() # nest.Connect(sg, n,model='tsodyks_synapse', params={ # 'U':0.0192, # 'tau_fac': 623., # 'tau_rec':559. , # 'receptor_type':df[receptor]}) nest.Connect(sg, n, model='tsodyks_synapse', params={ 'U': 0.29, 'tau_fac': 53., 'tau_rec': 902.0, 'tau_psc': 8., 'receptor_type': df[receptor] }) print nest.version() # dic['nest']['M1_SN_gaba']['U'] = 0.0192 # dic['nest']['M1_SN_gaba']['tau_fac'] = 623. # dic['nest']['M1_SN_gaba']['tau_rec'] = 559. # dic['nest']['M1_SN_gaba']['tau_psc'] = 5.2 pp(df) p_mm = { "withgid": True, 'to_file': False, 'to_memory': True, 'record_from': recordables } mm = nest.Create('multimeter', params=p_mm) pp(nest.GetStatus(mm)) nest.Connect(mm, n) nest.Simulate(st[-1]) t = nest.GetStatus(mm)[0]['events']['times'] y = nest.GetStatus(mm)[0]['events']['g_AMPA_1'] return t, y
def simulate(spk_interval=1000, sim_time=2000): nest.ResetKernel() #ip=nest.Create('spike_generator',params={'spike_times':[10.,20.]}) sg=nest.Create('spike_generator',1) st=gen_spikes(3, 100, 900) st=numpy.arange(20.,sim_time,spk_interval) pp(nest.SetStatus(sg, {'spike_times':st})) # pp(nest.GetStatus(sg)) n=nest.Create('my_aeif_cond_exp',1) df=nest.GetDefaults('my_aeif_cond_exp')['receptor_types'] # df=nest.GetDefaults('my_aeif_cond_exp')['recordables'] receptor='AMPA_1' recordables=['g_AMPA_1'] print nest.version() # nest.Connect(sg, n,model='tsodyks_synapse', params={ # 'U':0.0192, # 'tau_fac': 623., # 'tau_rec':559. , # 'receptor_type':df[receptor]}) nest.Connect(sg, n,model='tsodyks_synapse', params={ 'U':0.29, 'tau_fac': 53., 'tau_rec':902.0, 'tau_psc':8. , 'receptor_type':df[receptor]}) print nest.version() # dic['nest']['M1_SN_gaba']['U'] = 0.0192 # dic['nest']['M1_SN_gaba']['tau_fac'] = 623. # dic['nest']['M1_SN_gaba']['tau_rec'] = 559. # dic['nest']['M1_SN_gaba']['tau_psc'] = 5.2 pp(df) p_mm={"withgid": True, 'to_file':False, 'to_memory':True, 'record_from':recordables } mm=nest.Create('multimeter', params=p_mm) pp(nest.GetStatus(mm)) nest.Connect(mm,n) nest.Simulate(st[-1]) t=nest.GetStatus(mm)[0]['events']['times'] y=nest.GetStatus(mm)[0]['events']['g_AMPA_1'] return t,y
def save_data(sim_type, conn_type, output_dir): """Saves the expected results""" from bmtk import __version__ as bmtk_version import platform if not os.path.exists('expected'): os.mkdir('expected') save_file = 'expected/sim_output_{}.h5'.format(sim_type) sample_data = h5py.File(save_file, 'w') root_group = sample_data['/'] root_group.attrs['bmtk'] = bmtk_version root_group.attrs['date'] = str(datetime.datetime.now()) root_group.attrs['python'] = '{}.{}'.format(*sys.version_info[0:2]) root_group.attrs['NEST'] = nest.version() root_group.attrs['system'] = platform.system() root_group.attrs['arch'] = platform.machine() # spikes data input_spikes = SpikesFile(os.path.join(output_dir, 'spikes.h5')) spikes_df = input_spikes.to_dataframe() sample_data.create_dataset('/spikes/gids', data=np.array(spikes_df['gids'])) sample_data.create_dataset('/spikes/timestamps', data=np.array(spikes_df['timestamps'])) sample_data['/spikes/gids'].attrs['sorting'] = 'time'
def extract_nestvalid_dict(d, param_type='neuron'): """ Verify whether the parameters dictionary are in the correct format, with adequate keys, in agreement with the nest parameters dictionaries so that they can later be passed as direct input to nest. :param d: parameter dictionary :param param_type: type of parameters - kernel, neuron, population, network, connections, topology :return: valid dictionary """ if param_type == 'neuron' or param_type == 'synapse' or param_type == 'device': assert d['model'] in nest.Models( ), "Model %s not currently implemented in %s" % (d['model'], nest.version()) accepted_keys = nest.GetDefaults(d['model']).keys() accepted_keys.remove('model') nest_dict = {k: v for k, v in d.iteritems() if k in accepted_keys} elif param_type == 'kernel': accepted_keys = nest.GetKernelStatus().keys() nest_dict = {k: v for k, v in d.iteritems() if k in accepted_keys} else: # TODO logger.error("{!s} not implemented yet".format(param_type)) exit(-1) return nest_dict
def index(): response = { 'name': 'NEST server simulation', 'version': nest.version().split(' ')[1], 'ref': 'http://www.github.com/babsey/nest-server-simulation', 'env': dict(filter(lambda item: 'NEST' in item[0], os.environ.items())) } return jsonify(response)
def versions(): req = {} try: import nest nest = nest.version() except: pass nest = 'failed' return jsonify(nest=nest)
def index(): response = { 'server': { 'version': __version__, 'git': { 'ref': 'http://www.github.com/babsey/nest-server', 'tag': 'v' + '.'.join(__version__.split('.')[:-1]) } }, 'simulator': { 'version': nest.version().split('-')[1], }, } return jsonify(response)
def get_version(): """Trys to get NEST version major, minor, and patch (optional) of the current running version of nest. Will return as a list of ints [major, minor, patch], although patch may be None. :return: [major, minor, patch] if able to parse version, None if fails. 'patch' may be None value. """ # Try to get the version string version_str = None try: # NEST 2.* uses .version() to get version string version_str = nest.version() except AttributeError: pass if version_str is None: try: # For NEST 3.1 it uses __version__ attribute to store string version_str = nest.__version__ if version_str.upper() == 'UNKNOWN': return [3, None, None] except AttributeError: pass if version_str is None: return None # parse the version string to get major, minor and patch numbers try: version_pattern = re.compile(r'.*nest-(\d+)\.(\d+)(?:\.(\d+))?.*') m = re.match(version_pattern, version_str) n_groups = len(m.groups()) ver_major = int(m.group(1)) ver_minor = int(m.group(2)) ver_patch = int( m.group(3)) if n_groups >= 3 and m.group(3) is not None else None return [ver_major, ver_minor, ver_patch] except (AttributeError, IndexError, ValueError, TypeError) as err: return None
def get_default_module_paths(home_module): # if nest.version()=='NEST 2.2.2': # if my_socket.determine_computer()=='milner': # s='nest-2.2.2-wo-music' # else: # s='nest-2.2.2' # else: # s='nest-'+nest.version()[-5:] # if nest.version()=='NEST 2.4.1': # s='nest-2.4.1' # if nest.version()=='NEST 2.4.2': # s='nest-2.4.2' # if nest.version()=='NEST 2.4.2': # s='nest-2.6.0' if (nest.version() in ['NEST 2.2.2', 'NEST 2.12.0']): path='ml_module' sli_path='' else: path= (home_module+'/lib/nest/ml_module') sli_path =(home_module+'/share/ml_module/sli') return path, sli_path
def is_installed(version): PyNestEngine.environment_vars = NestEngine.get_nest_environment() ret = True try: import nest try: version = nest.version() except: version = '???' if is_verbose(): inform("NEST version: %s is installed with Python support..." % version, indent=2) inform("Env vars: %s" % PyNestEngine.environment_vars, indent=2) except Exception as err: inform("Couldn't import NEST into Python: ", err, indent=1) ret = False return ret
def get_default_module_paths(home_module): # if nest.version()=='NEST 2.2.2': # if my_socket.determine_computer()=='milner': # s='nest-2.2.2-wo-music' # else: # s='nest-2.2.2' # else: # s='nest-'+nest.version()[-5:] # if nest.version()=='NEST 2.4.1': # s='nest-2.4.1' # if nest.version()=='NEST 2.4.2': # s='nest-2.4.2' # if nest.version()=='NEST 2.4.2': # s='nest-2.6.0' if (nest.version() in ['NEST 2.2.2', 'NEST 2.12.0']): path = 'ml_module' sli_path = '' else: path = (home_module + '/lib/nest/ml_module') sli_path = (home_module + '/share/ml_module/sli') return path, sli_path
M_INFO = 10 ########################### PARAMETER SECTION ################################# if len(sys.argv) != 5: raise ValueError( "user arguments should be scale, num_vp, plastic and user_rule") user_scale = int(sys.argv[1]) user_nvp = int(sys.argv[2]) user_plastic = bool(sys.argv[3]) user_rule = str(sys.argv[4]) print( 'GIT: ({}) \nuser_scale: {:>3} \nuser_nvp: {:>5} \nuser_plastic: {} \nuser_rule: {:>5}' .format(nest.version(), user_scale, user_nvp, user_plastic, user_rule)) if user_rule not in ['all', 'in', 'out', 'tot', 'bern']: raise ValueError("User rule not valid.") # define all relevant parameters: changes should be made here params = { 'nvp': user_nvp, # total number of virtual processes 'scale': user_scale, # scaling factor of the network size, # total network size = scale*5000*20 neurons 'plastic': user_plastic, 'd_min': 1.5, 'd_max': 1.5, 'rule': user_rule, 'inisimtime': 10., # initial simulation time given in ms: calibration etc 'dt': 0.1, # simulation step
def __init__(self, network, sim_spec): """ Simulation class. An instance of the simulation class with the given parameters. Can be created as a member class of a multiarea_model instance or standalone. Parameters ---------- network : multiarea_model An instance of the multiarea_model class that specifies the network to be simulated. params : dict custom simulation parameters that overwrite the default parameters defined in default_params.py """ print('GIT: ({})'.format(nest.version())) self.params = deepcopy(sim_params) if isinstance(sim_spec, dict): check_custom_params(sim_spec, self.params) self.custom_params = sim_spec else: fn = os.path.join(data_path, sim_spec, '_'.join( ('custom_params', sim_spec))) with open(fn, 'r') as f: self.custom_params = json.load(f)['sim_params'] nested_update(self.params, self.custom_params) self.network = network self.label = dicthash.generate_hash_from_dict({ 'params': self.params, 'network_label': self.network.label }) print("Simulation label: {}".format(self.label)) self.data_dir = os.path.join(data_path, self.label) try: os.mkdir(self.data_dir) os.mkdir(os.path.join(self.data_dir, 'recordings')) except OSError: pass self.copy_files() print("Copied files.") d = { 'sim_params': self.custom_params, 'network_params': self.network.custom_params, 'network_label': self.network.label } with open( os.path.join(self.data_dir, '_'.join( ('custom_params', self.label))), 'w') as f: json.dump(d, f) print("Initialized simulation class.") self.areas_simulated = self.params['areas_simulated'] self.areas_recorded = self.params['recording_dict']['areas_recorded'] self.T = self.params['t_sim'] self.time_create = 0 self.time_connect = 0
except: print("No PyNN...") print('\n========================================================\n') try: import neuron print(">> NEURON: version %s"%neuron.h.nrnversion()) except: print("No NEURON...") print('\n========================================================\n') try: import nest print(">> NEST: version %s"%nest.version()) except: print("No PyNEST...") print('\n========================================================\n') try: import brian print(">> Brian: version %s"%brian.__version__) except: print("No Brian v1...") print('\n========================================================\n') try: import brian2
import nest print("Version: %s" % nest.version())
import nest print("Test 2: NEST version: %s"%nest.version())
path = dirname(dirname(dirname(__file__))) dotenv_path = join(path, '.env') f=open(dotenv_path) for l in f.readlines(): l=l.strip('\n').strip('\r') key, val=l.split('=') os.environ[key]=val # load_dotenv(dotenv_path) os.environ['BGMODEL_HOME']=path os.environ['BGMODEL_HOME_CODE']=join(path, 'python') # Add library path if nest.version()=='NEST 2.2.2': os.environ['LD_LIBRARY_PATH'] = (os.environ['LD_LIBRARY_PATH'] + os.path.join(path, 'nest', 'dist', 'install', 'nest-2.2.2', 'lib', 'nest')) # import misc # import data_to_disk # import network # import my_axes # import my_nest # import my_population
import nest from distutils.version import LooseVersion from dipde.internals.internalpopulation import InternalPopulation from dipde.internals.externalpopulation import ExternalPopulation from dipde.internals.network import Network from dipde.internals.connection import Connection as Connection from dipde.examples.potjans_diesmann_cortical_column import population_list,\ connection_list assert LooseVersion(nest.version().split()[1]) >= LooseVersion("2.6.0") class Kernel(object): def __init__(self, population_list=[], connection_list=[]): self.simulation = Network(population_list, connection_list) # def Create("poisson_generator", number_of_neurons, params={"rate": float(firing_rate), 'start':float(start)/.001}) # # # def Connect(self): if __name__ == "__main__": kernel = Kernel() # # Settings: # t0 = 0. # dt = .0001 # dv = .0001 # tf = .1
_config['with_plot'] = True __all__.append('plot') except ImportError as e: _log_message(_logger, "DEBUG", "An error occured, plot module will not be loaded: " + str(e)) _config['with_plot'] = False # look for nest if _config['load_nest']: try: # silence nest _sys.argv.append('--quiet') import nest from . import simulation _config['with_nest'] = nest.version() __all__.append("simulation") # remove quiet from sys.argv try: idx = _sys.argv.index('--quiet') del _sys.argv[idx] except ValueError: pass except ImportError as e: _log_message(_logger, "DEBUG", "NEST not found; nngt.simulation not loaded: " + str(e)) _config["with_nest"] = False # load database module if required if _config["use_database"]:
# products derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # import numpy as np import re try: import nest m = re.search(r'.*(\d+)\.(\d+)\.(\d+).*', nest.version()) ver_major = int(m.group(1)) ver_minor = int(m.group(2)) built_in_glifs = ver_major >= 2 and ver_minor >= 20 except Exception as e: built_in_glifs = False def lif_aibs_converter(config, tau_syn=[5.5, 8.5, 2.8, 5.8]): """ :param config: :return: """ coeffs = config['coeffs']
sim_time, seed, {}, threads, start_rec) misc.pickle_save([layer_dic, mr], save_result_at) else: layer_dic, mr = misc.pickle_load(save_result_at) save_result_at = OUTPUT_PATH + '/simulate_15000.plk' if 1: layer_dic2, mr = simulate_15000(params_msn_d1, params_msn_d2, params_stn, synapse_models, sim_time, seed, {}, threads, start_rec) misc.pickle_save([layer_dic2, mr], save_result_at) else: layer_dic2, mr = misc.pickle_load(save_result_at) save_result_at = OUTPUT_PATH + '/simulate_1500.plk2' + nest.version() if 1: layer_dic3, mr = simulate_1500(params_msn_d1, params_msn_d2, params_stn, synapse_models, sim_time, seed, {}, threads, start_rec) misc.pickle_save([layer_dic3, mr], save_result_at) else: layer_dic3, mr = misc.pickle_load(save_result_at) #Inspect results plot_settings.set_mode(pylab, mode='by_fontsize', w=1100.0, h=450.0 + 275.0, fontsize=12) font_size_text = 8
import future.utils import warnings import nest if future.utils.PY3 and nest.version().split()[1] == '2.12.0': # Undo monkey patch of warning performed by pyNEST 2.12.0 as it # isn't compatible with Python 3 warnings.showwarning = warnings._showwarning_orig from .cells import Cell, CellMetaClass # @IgnorePep8 from .code_gen import CodeGenerator # @IgnorePep8 from .simulation import Simulation # @IgnorePep8 from .network import ( # @IgnorePep8 Network, ComponentArray, Selection, ConnectionGroup, PyNNCellWrapperMetaClass) from .units import UnitHandler # @IgnorePep8
def index(): return jsonify({'nest': nest.version()})
import nest print("Test 2: NEST version: %s" % nest.version())
import nest import nest.pynestkernel as _kernel import os import time import core.directories as dr from copy import deepcopy from core.parallelization import comm, Barrier from core.misc import Stopwatch from core import my_socket import pprint pp = pprint.pprint kernal_time = None ver = int(''.join(nest.version().split(' ')[1].split('.'))) if ver < 242: pushsli = _kernel.pushsli runsli = _kernel.runsli else: pushsli = nest.engine.push runsli = nest.engine.run def _Create_mpi(*args, **kwargs): with Barrier(): return nest.Create(*args, **kwargs) def Create(*args, **kwargs): if comm.is_mpi_used():
import nest import numpy as np import nest.topology as ntop print(nest.version()) nest.ResetKernel() import configparser import shelve from simu_fun import * from vLSPS_fun import * import pickle nest.ResetKernel() #read configure file config = configparser.ConfigParser() config.read('simu.conf') local_num_threads = int(config['parallel_computing']['local_num_threads']) if local_num_threads != 1: print('Setlocal_num_threads') nest.SetKernelStatus({"local_num_threads": local_num_threads}) save_filename = 'BrainSimu.pickle' # flag c means if the file doesn't exist, then create a new file with open(save_filename, 'rb') as handle: pickle_file = pickle.load(handle) handle.close() simulate_region = pickle_file['cerebralcortex']['vS1'] vS1_Layer_Name = simulate_region['structure_info']['Layer_Name'] vS1_layer_size = simulate_region['structure_info']['region_size'] vS1_Layer_Thickness = simulate_region['structure_info']['layer_thickness']
'allow_multapses': False, 'allow_autapses': False, 'connection_type': 'convergent', 'mask': { 'rectangular': { 'lower_left': [-0.5, -0.5], 'upper_right': [0.5, 0.5] } }, 'kernel': 0.5, 'sources': { 'model': 'iaf_neuron' }, 'targets': { 'model': 'iaf_neuron' } } tp.ConnectLayers(l, l, projections) tp.ConnectLayers(l_dummy1, l, projections) tp.ConnectLayers(l_dummy2, l, projections) if nest.version() == 'NEST 2.0.0': sources = nest.GetChildren(l) if nest.version() == 'NEST 2.2.1': sources = nest.GetChildren(l)[0] t = time.time() targets = tp.GetTargetNodes(sources, l) print time.time() - t
nest.SetStatus(multimeter, {"withtime":True, "record_from":["V_m"]}) spikedetector = nest.Create("spike_detector", params={"withgid": True, "withtime": True}) nest.CopyModel("static_synapse", "excitatory", { "weight": 0.25, "delay": 0.5, # "receptor_type": rec["AMPA_1"] "receptor_type": 0 }) nest.Connect(multimeter, [m[0]]) if nest.version()=='NEST 2.12.0': syn_dict = { "model": 'excitatory', } conn_dict = { "rule": "one_to_one", } nest.Connect(n, m, conn_dict, syn_dict) nest.Connect(m, spikedetector) # print 'Connecting ' + ' my_nest.GetConnections ', len(nest.GetConnections(n)), len(n) if nest.version()=='NEST 2.2.2': nest.Connect(n, m, **{ "model":"excitatory"
class CodeGenerator(BaseCodeGenerator): SIMULATOR_NAME = 'nest' SIMULATOR_VERSION = nest.version().split()[1] ODE_SOLVER_DEFAULT = 'gsl' REGIME_VARNAME = '__regime__' SS_SOLVER_DEFAULT = None MAX_STEP_SIZE_DEFAULT = 0.01 # Used for CVODE/IDA, FIXME: not sure best value!!! @IgnorePep8 ABS_TOLERANCE_DEFAULT = 1e-3 REL_TOLERANCE_DEFAULT = 0.0 GSL_JACOBIAN_APPROX_STEP_DEFAULT = 0.01 V_THRESHOLD_DEFAULT = 0.0 MAX_SIMULTANEOUS_TRANSITIONS = 1000 BASE_TMPL_PATH = path.abspath( path.join(path.dirname(__file__), 'templates')) UnitHandler = UnitHandler _inline_random_implementations = {} def __init__(self, build_cores=1, **kwargs): super(CodeGenerator, self).__init__(**kwargs) self._build_cores = build_cores self.nest_config = os.path.join(self.get_nest_install_prefix(), 'bin', 'nest-config') compiler, _ = self.run_command( [self.nest_config, '--compiler'], fail_msg=("Could not run nest-config at '{}': {{}}".format( self.nest_config))) self._compiler = compiler.strip() # strip trailing \n def generate_source_files(self, component_class, src_dir, name=None, debug_print=None, **kwargs): if name is None: name = component_class.name # Get the initial regime and check that it refers to a regime in the # component class tmpl_args = { 'component_name': name, 'component_class': component_class, 'version': pype9.__version__, 'src_dir': src_dir, 'timestamp': datetime.now().strftime('%a %d %b %y %I:%M:%S%p'), 'unit_handler': UnitHandler(component_class), 'sorted_regimes': sorted(component_class.regimes, key=lambda r: component_class.index_of(r)), 'jacobian_approx_step': kwargs.get('jacobian_approx_step', self.GSL_JACOBIAN_APPROX_STEP_DEFAULT), 'max_step_size': kwargs.get('max_step_size', self.MAX_STEP_SIZE_DEFAULT), 'abs_tolerance': kwargs.get('max_step_size', self.ABS_TOLERANCE_DEFAULT), 'rel_tolerance': kwargs.get('max_step_size', self.REL_TOLERANCE_DEFAULT), 'max_simultaneous_transitions': kwargs.get('max_simultaneous_transitions', self.MAX_SIMULTANEOUS_TRANSITIONS), 'parameter_scales': [], 'v_threshold': kwargs.get('v_threshold', self.V_THRESHOLD_DEFAULT), 'regime_varname': self.REGIME_VARNAME, 'debug_print': [] if debug_print is None else debug_print } ode_solver = kwargs.get('ode_solver', self.ODE_SOLVER_DEFAULT) ss_solver = kwargs.get('ss_solver', self.SS_SOLVER_DEFAULT) if ode_solver is None: raise Pype9BuildError("'ode_solver' cannot be None") switches = {'ode_solver': ode_solver, 'ss_solver': ss_solver} # Render C++ header file self.render_to_file('header.tmpl', tmpl_args, name + '.h', src_dir, switches=switches) # Render C++ class file self.render_to_file('main.tmpl', tmpl_args, name + '.cpp', src_dir, switches=switches, post_hoc_subs=self._inline_random_implementations) # Render Loader header file self.render_to_file('module-header.tmpl', tmpl_args, name + 'Module.h', src_dir) # Render Loader C++ class self.render_to_file('module-cpp.tmpl', tmpl_args, name + 'Module.cpp', src_dir) # Render SLI initializer self.render_to_file('module_sli_init.tmpl', tmpl_args, name + 'Module-init.sli', path.join(src_dir, 'sli')) def configure_build_files(self, name, src_dir, compile_dir, install_dir, **kwargs): # @UnusedVariable # Generate Makefile if it is not present if not path.exists(path.join(compile_dir, 'Makefile')): if not path.exists(compile_dir): os.mkdir(compile_dir) logger.info("Configuring build files in '{}' directory".format( compile_dir)) orig_dir = os.getcwd() config_args = { 'name': name, 'src_dir': src_dir, # NB: ODE solver currently ignored # 'ode_solver': kwargs.get('ode_solver', # self.ODE_SOLVER_DEFAULT), 'version': pype9.__version__, 'executable': sys.executable } self.render_to_file('CMakeLists.txt.tmpl', config_args, 'CMakeLists.txt', src_dir) os.chdir(compile_dir) stdout, stderr = self.run_command( [ 'cmake', '-Dwith-nest={}'.format(self.nest_config), '-DCMAKE_INSTALL_PREFIX={}'.format(install_dir), src_dir ], fail_msg=("Cmake of '{}' NEST module failed (see src " "directory '{}'):\n\n {{}}".format(name, src_dir))) if stderr: raise Pype9BuildError( "Configure of '{}' NEST module failed (see src " "directory '{}'):\n\n{}\n{}".format( name or src_dir, src_dir, stdout, stderr)) logger.debug("cmake '{}':\nstdout:\n{}stderr:\n{}\n".format( compile_dir, stdout, stderr)) os.chdir(orig_dir) def compile_source_files(self, compile_dir, component_name): # Run configure script, make and make install os.chdir(compile_dir) logger.info("Compiling NEST model class in '{}' directory.".format( compile_dir)) stdout, stderr = self.run_command( ['make', '-j{}'.format(self._build_cores)], fail_msg=("Compilation of '{}' NEST module failed (see compile " "directory '{}'):\n\n {{}}".format( component_name, compile_dir))) if re.search(r'error:', stderr): # Ignores warnings raise Pype9BuildError( "Compilation of '{}' NEST module directory failed:\n\n{}\n{}". format(compile_dir, stdout, stderr)) logger.debug("make '{}':\nstdout:\n{}stderr:\n{}\n".format( compile_dir, stdout, stderr)) stdout, stderr = self.run_command( ['make', 'install'], fail_msg=("Installation of '{}' NEST module failed (see compile " "directory '{}'):\n\n {{}}".format( component_name, compile_dir))) if stderr: raise Pype9BuildError( "Installation of '{}' NEST module directory failed:\n\n{}\n{}". format(compile_dir, stdout, stderr)) logger.debug("make install'{}':\nstdout:\n{}stderr:\n{}\n".format( compile_dir, stdout, stderr)) logger.info("Compilation of '{}' NEST module completed " "successfully".format(component_name)) def clean_src_dir(self, src_dir, name): # Clean existing src directories from previous builds. prefix = path.join(src_dir, name) if path.exists(src_dir): remove_ignore_missing(prefix + '.h') remove_ignore_missing(prefix + '.cpp') remove_ignore_missing(prefix + 'Module.h') remove_ignore_missing(prefix + 'Module.cpp') remove_ignore_missing( path.join(src_dir, 'sli', name + 'Module-init.sli')) sli_path = path.join(src_dir, 'sli') if not path.exists(sli_path): os.makedirs(sli_path) def clean_compile_dir(self, compile_dir, purge=False, **kwargs): # @UnusedVariable @IgnorePep8 if purge: try: shutil.rmtree(compile_dir) except OSError as e: if e.errno != errno.ENOENT: # Ignore if missing raise if not path.exists(compile_dir): try: os.makedirs(compile_dir) except OSError as e: raise Pype9BuildError( "Could not make compile directory '{}': {}".format( compile_dir, e)) else: orig_dir = os.getcwd() os.chdir(compile_dir) try: stdout, stderr = self.run_command(['make', 'clean']) os.chdir(orig_dir) except (sp.CalledProcessError, OSError): os.chdir(orig_dir) shutil.rmtree(compile_dir, ignore_errors=True) try: os.makedirs(compile_dir) except OSError as e: raise Pype9BuildError( "Could not create build directory ({}), please check " "the required permissions or specify a different " "build directory:\n{}".format(e)) if stderr and 'No rule to make target' not in stderr: raise Pype9BuildError( "Clean of '{}' NEST module directory failed:\n\n{}\n{}". format(compile_dir, stdout, stderr)) logger.debug("make clean '{}':\nstdout:\n{}stderr:\n{}\n".format( compile_dir, stdout, stderr)) def simulator_specific_paths(self): path = [] if 'NEST_INSTALL_DIR' in os.environ: path.append(path.join(os.environ['NEST_INSTALL_DIR'], 'bin')) return path def load_libraries(self, name, url, **kwargs): # @UnusedVariable install_dir = self.get_install_dir(name, url) lib_dir = os.path.join(install_dir, 'lib') add_lib_path(lib_dir) # Add module install directory to NEST path nest.sli_run('({}) addpath'.format( os.path.join(install_dir, 'share', 'sli'))) # Install nest module nest.Install(name + 'Module') @classmethod def get_nest_install_prefix(cls): # Make doubly sure that the loaded nest install appears first on the # PYTHONPATH (not sure if this is necessary, but can't hurt) pynest_install_dir = os.path.join(os.path.dirname(nest.__file__), '..') env = os.environ.copy() env['PYTHONPATH'] = os.pathsep.join( (pynest_install_dir, env.get('PYTHONPATH', ''))) try: process = sp.Popen( [sys.executable, '-c', "import nest; nest.sysinfo()"], stdout=sp.PIPE, stderr=sp.PIPE, env=env) stdout, _ = process.communicate() except sp.CalledProcessError as e: raise Pype9BuildError( "Error trying to run 'import nest; nest.sysinfo()' in " "subprocess:\n{}".format(e)) if PY3: stdout = str(stdout.decode('utf-8')) match = re.search(r'\(([^\)]+)/share/nest/sli\)', stdout) if match is None: raise Pype9BuildError( "Could not find nest install prefix by searching for " "'share/nest/sli' in output from nest.sysinfo:\n{}".format( stdout)) return match.group(1)
print('\n========================================================\n') import neuron print(">> NEURON: version %s"%neuron.h.nrnversion()) print('\n========================================================\n') try: import nest print(">> NEST: version %s"%nest.version()) except: print("No PyNEST...") print('\n========================================================\n') try: import brian print(">> Brian: version %s"%brian.__version__) except:
import nest import nest.pynestkernel as _kernel import os import time import core.directories as dr from copy import deepcopy from core.parallelization import comm, Barrier from core.misc import Stopwatch from core import my_socket import pprint pp=pprint.pprint kernal_time=None ver=int(''.join(nest.version().split(' ')[1].split('.'))) if ver<242: pushsli=_kernel.pushsli runsli=_kernel.runsli else: pushsli=nest.engine.push runsli=nest.engine.run def _Create_mpi(*args, **kwargs): with Barrier(): return nest.Create(*args, **kwargs) def Create(*args, **kwargs): if comm.is_mpi_used(): return _Create_mpi(*args, **kwargs) else:
Created on Sep 11, 2014 @author: mikael ''' import nest import numpy import pylab from os.path import expanduser import mpi4py import pprint pp=pprint.pprint if nest.version()=='NEST 2.2.2': s='nest-2.2.2' if nest.version()=='NEST 2.4.1': s='nest-2.4.1' if nest.version()=='NEST 2.4.2': s='nest-2.4.2' pp(nest.Models()) HOME = expanduser("~") MODULE_PATH= (HOME+'/opt/NEST/module/' +'install-module-130701-'+s+'/lib/nest/ml_module') MODULE_SLI_PATH= (HOME+'/opt/NEST/module/' +'install-module-130701-'+s+'/share/ml_module/sli') nest.sr('('+MODULE_SLI_PATH+') addpath')
import nest print("Version: %s"%nest.version())
import warnings import numpy as np import matplotlib.pyplot as plt import nest from .. import lib as _plib from ..analysis import (activity_types, find_idx_nearest, data_from_nest, interburst_properties, spiking_properties) # ------------------ # # Check NEST version # # ------------------ # nest_version = nest.version() while not nest_version[0].isdigit(): nest_version = nest_version[1:] dot_pos = nest_version.find('.') dot_pos2 = nest_version[dot_pos + 1:].find('.') version_major = int(nest_version[:dot_pos]) version_minor = int(nest_version[dot_pos + 1:dot_pos + 1 + dot_pos2]) useGIDCollection = False if version_major > 2: useGIDCollection = True elif version_minor <= 11: raise ImportError("NEST >= 2.12.0 required!")
'edge_wrap': True, 'extent':[1.,1.], 'center' : [0.,0.] } ) projections = {'allow_multapses': False, 'allow_autapses' : False, 'delays':{'uniform':{'min':(1-dev)*delay,'max':(1.+dev)*delay}}, 'weights':{'uniform':{'min':(1+dev)*weight,'max':(1-dev)*weight}}, 'connection_type' : 'convergent', 'mask':{'rectangular' : { 'lower_left' : [ -0.5 , -0.5 ], 'upper_right' : [ 0.5, 0.5 ]}}, 'kernel': 0.5, 'sources': {'model' : 'iaf_neuron' }, 'targets': {'model' : 'iaf_neuron' }} tp.ConnectLayers(l, l, projections) if nest.version()=='NEST 2.0.0': sources=nest.GetChildren(l) if nest.version()=='NEST 2.2.1': sources=nest.GetChildren(l)[0] sd=nest.Create('spike_detector') nest.SetStatus(sd, {"withgid": True,'start':start_rec }) nest.ConvergentConnect(sources, sd) mm=nest.Create('multimeter') recodables=['V_m'] nest.SetStatus(mm, {'interval': 0.1, 'record_from': recodables, 'start':start_rec}) nest.Connect(mm, [sources[0]]) nest.Simulate(sim_time)