def write_indices(self, chx, loc=""): """ Create NIX Source objects to represent individual indices based on the provided ``chx`` (ChannelIndex) write them to the NIX file at the parent ChannelIndex object. :param chx: The Neo ChannelIndex :param loc: Path to the CHX """ nixsource = self._get_mapped_object(chx) for idx, channel in enumerate(chx.index): channame = "{}.ChannelIndex{}".format(chx.annotations["nix_name"], idx) if channame in nixsource.sources: nixchan = nixsource.sources[channame] else: nixchan = nixsource.create_source(channame, "neo.channelindex") nixchan.metadata = nixsource.metadata.create_section( nixchan.name, "neo.channelindex.metadata") nixchan.definition = nixsource.definition chanmd = nixchan.metadata if len(chx.channel_names): neochanname = stringify(chx.channel_names[idx]) chanmd["neo_name"] = nix.Value(neochanname) chanmd["index"] = nix.Value(int(channel)) if chx.coordinates is not None: coords = chx.coordinates[idx] coordunits = stringify(coords[0].dimensionality) nixcoords = tuple( nix.Value(c.rescale(coordunits).magnitude.item()) for c in coords) if "coordinates" in chanmd: del chanmd["coordinates"] chanprop = chanmd.create_property("coordinates", nixcoords) chanprop.unit = coordunits
def test_properties(tmpdir): nixfilepath = os.path.join(str(tmpdir), "proptest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite, backend="h5py") sec = nix_file.create_section("test section", "proptest") sec.create_property("test property", nix.Value(0)) sec.create_property("test str", nix.DataType.String) sec.create_property("other property", nix.DataType.Int64) sec.create_property("prop Int32", nix.DataType.Int32) sec.create_property("prop Int64", nix.DataType.Int64) sec.create_property("prop UInt32", nix.DataType.UInt32) sec.create_property("prop UInt64", nix.DataType.UInt64) sec.create_property("prop Float", nix.DataType.Float) sec.create_property("prop Double", nix.DataType.Double) sec.create_property("prop String", nix.DataType.String) sec.create_property("prop Bool", nix.DataType.Bool) sec.props[0].mapping = "mapping" sec.props[1].definition = "def" sec.props[0].values = [nix.Value(101)] sec.props[1].values = [ nix.Value("foo"), nix.Value("bar"), nix.Value("baz") ] sec.props["prop Float"].values = [ nix.Value(10.0), nix.Value(33.3), nix.Value(1.345), nix.Value(90.2) ] nix_file.close()
def test_value_bool(self): value = nix.Value(True) other = nix.Value(False) assert (value.data_type == nix.DataType.Bool) assert (value == value) assert (value != other) assert (value) value.value = False assert (value == other)
def _write_channelindex(self, chx, nixblock): """ Convert the provided Neo ChannelIndex to a NIX Source and write it to the NIX file. For each index in the ChannelIndex object, a child NIX Source is also created. :param chx: The Neo ChannelIndex to be written :param nixblock: NIX Block where the Source will be created """ if "nix_name" in chx.annotations: nix_name = chx.annotations["nix_name"] else: nix_name = "neo.channelindex.{}".format(self._generate_nix_name()) chx.annotate(nix_name=nix_name) nixsource = nixblock.create_source(nix_name, "neo.channelindex") nixsource.metadata = nixblock.metadata.create_section( nix_name, "neo.channelindex.metadata" ) metadata = nixsource.metadata neoname = chx.name if chx.name is not None else "" metadata["neo_name"] = neoname nixsource.definition = chx.description if chx.annotations: for k, v in chx.annotations.items(): self._write_property(metadata, k, v) for idx, channel in enumerate(chx.index): channame = "{}.ChannelIndex{}".format(nix_name, idx) nixchan = nixsource.create_source(channame, "neo.channelindex") nixchan.metadata = nixsource.metadata.create_section( nixchan.name, "neo.channelindex.metadata" ) nixchan.definition = nixsource.definition chanmd = nixchan.metadata chanmd["index"] = nix.Value(int(channel)) if len(chx.channel_names): neochanname = stringify(chx.channel_names[idx]) chanmd["neo_name"] = nix.Value(neochanname) if len(chx.channel_ids): chanid = chx.channel_ids[idx] chanmd["channel_id"] = nix.Value(chanid) if chx.coordinates is not None: coords = chx.coordinates[idx] coordunits = stringify(coords[0].dimensionality) nixcoords = tuple(nix.Value(c.magnitude.item()) for c in coords) chanprop = chanmd.create_property("coordinates", nixcoords) chanprop.unit = coordunits # Descend into Units for unit in chx.units: self._write_unit(unit, nixsource)
def insert_metadata(root, d): for k, v in d.iteritems(): if isinstance(v, dict): sec = root.create_section(k, 'relacs.{0:s}'.format(k)) insert_metadata(sec, v) else: value, unit = get_number_and_unit(str(v)) if unit is None: root.create_property(k, [nix.Value(str(v))]) else: p = root.create_property(k, [nix.Value(value)]) p.unit = unit
def test_value_float(self): value = nix.Value(47.11) other = nix.Value(3.14) assert (value.data_type == nix.DataType.Double) assert (value == value) assert (value == 47.11) assert (value != other) assert (value != 3.14) value.value = 66.6 assert (value == nix.Value(66.6)) assert (value == 66.6) assert (value.value == 66.6)
def test_value_str(self): value = nix.Value("foo") other = nix.Value("bar") assert (value.data_type == nix.DataType.String) assert (value == value) assert (value == "foo") assert (value != other) assert (value != "bar") value.value = "wrtlbrmpft" assert (value == nix.Value("wrtlbrmpft")) assert (value == "wrtlbrmpft") assert (value.value == "wrtlbrmpft")
def test_value_int(self): value = nix.Value(10) other = nix.Value(11) assert (value.data_type == nix.DataType.Int64) assert (value == value) assert (value == 10) assert (value != other) assert (value != 11) value.value = 20 assert (value == nix.Value(20)) assert (value == 20) assert (value.value == 20)
def write_recording(recording, save_path, overwrite=False): if not HAVE_NIXIO: raise ImportError(missing_nixio_msg) if os.path.exists(save_path) and not overwrite: raise FileExistsError("File exists: {}".format(save_path)) nf = nix.File.open(save_path, nix.FileMode.Overwrite) # use the file name to name the top-level block fname = os.path.basename(save_path) block = nf.create_block(fname, "spikeinterface.recording") da = block.create_data_array("traces", "spikeinterface.traces", data=recording.get_traces()) da.unit = "uV" labels = recording.get_channel_ids() if not labels: # channel IDs not specified; just number them labels = list(range(recording.get_num_channels())) chandim = da.append_set_dimension() chandim.labels = labels sfreq = recording.get_sampling_frequency() timedim = da.append_sampled_dimension(sampling_interval=1. / sfreq) timedim.unit = "s" # In NIX, channel properties are stored as follows # Traces metadata (nix.Section) # | # |--- Channel 0 (nix.Section) # | | # | |---- Location (nix.Property) # | | # | |---- Other property a (nix.Property) # | | # | `---- Other property b (nix.Property) # | # `--- Channel 1 (nix.Section) # | # |---- Location (nix.Property) # | # |---- Other property a (nix.Property) # | # `---- Other property b (nix.Property) traces_md = nf.create_section("traces.metadata", "spikeinterface.properties") da.metadata = traces_md channels = recording.get_channel_ids() for chanid in channels: chan_md = traces_md.create_section(str(chanid), "spikeinterface.properties") for propname in recording.get_channel_property_names(chanid): propvalue = recording.get_channel_property(chanid, propname) if nf.version <= (1, 1, 0): if isinstance(propvalue, Iterable): values = list(map(nix.Value, propvalue)) else: values = nix.Value(propvalue) else: values = propvalue chan_md.create_property(propname, values) nf.close()
def _write_property(self, section, name, v): """ Create a metadata property with a given name and value on the provided metadata section. :param section: The metadata section to hold the new property :param name: The name of the property :param v: The value to write :return: The newly created property """ if isinstance(v, pq.Quantity): if len(v.shape): section[name] = list(nix.Value(vv) for vv in v.magnitude) else: section[name] = nix.Value(v.magnitude.item()) section.props[name].unit = str(v.dimensionality) elif isinstance(v, datetime): section[name] = nix.Value(calculate_timestamp(v)) elif isinstance(v, string_types): section[name] = nix.Value(v) elif isinstance(v, bytes): section[name] = nix.Value(v.decode()) elif isinstance(v, Iterable): values = [] unit = None for item in v: if isinstance(item, pq.Quantity): unit = str(item.dimensionality) item = nix.Value(item.magnitude.item()) elif isinstance(item, Iterable): self.logger.warn("Multidimensional arrays and nested " "containers are not currently supported " "when writing to NIX.") return None elif type(item).__module__ == "numpy": item = nix.Value(item.item()) else: item = nix.Value(item) values.append(item) section[name] = values section.props[name].unit = unit elif type(v).__module__ == "numpy": section[name] = nix.Value(v.item()) else: section[name] = nix.Value(v) return section.props[name]
def fix_property(args): nix_file = nix.File.open(args.file, nix.FileMode.ReadWrite) b = nix_file.blocks[0] props = find_property(nix_file, args.property, True, True) props = filter_props(props, args) if len(props.keys()) > 1: nix_file.close() raise ValueError("Property name %s is not unique! Exiting!" % args.property) if len(props.keys()) == 0: if args.add_property: if len(args.section.strip()) > 0: secs = find_section(nix_file, args.section, True, True) if len(secs) > 0: s = secs[-1] else: s = b.metadata.create_section(args.section, args.section) else: s = b.metadata if is_number(args.value): v = nix.Value(float(args.value)) p = s.create_property(args.property, nix.Value(float(args.value))) if len(args.unit.strip()) > 0: p.unit = args.unit else: s[args.property] = args.value else: print("Property %s was not found!" % args.property) else: p = props[list(props.keys())[0]][0] p.delete_values() if is_number(args.value): if p.data_type == np.bytes_: p.values = [nix.Value(args.value)] else: v = nix.Value(float(args.value)) p.values = [v] if len(args.unit.strip()) > 0: p.unit = args.unit else: p.values = [nix.Value(args.value)] nix_file.close()
def test_section_properties(self): assert (len(self.section) == 0) prop = self.section.create_property("test prop", nix.DataType.String) assert (len(self.section) == 1) for p in self.section: assert (p in self.section) assert (self.section.has_property_by_name("test prop")) assert (not self.section.has_property_by_name("notexist")) assert (self.section.get_property_by_name("test prop") is not None) assert (self.section.get_property_by_name("notexist") is None) assert (len(self.section.inherited_properties()) == 1) assert (prop in self.section) assert (prop.id in self.section) assert (prop.name in self.section) assert ("notexist" not in self.section) props = dict(self.section.items()) assert (props["test prop"] == prop) assert (prop.id == self.section.props[0].id) assert (prop.id == self.section.props[-1].id) # easy prop creation self.section['ep_str'] = 'str' self.section['ep_int'] = 23 self.section['ep_float'] = 42.0 self.section['ep_list'] = [1, 2, 3] self.section['ep_val'] = nix.Value(1.0) self.section['ep_val'] = 2.0 res = [x in self.section for x in ['ep_str', 'ep_int', 'ep_float']] assert (all(res)) assert (self.section['ep_str'] == 'str') assert (self.section['ep_int'] == 23) assert (self.section['ep_float'] == 42.0) assert (self.section['ep_list'] == [1, 2, 3]) def create_hetero_section(): self.section['ep_ex'] = [1, 1.0] self.assertRaises(ValueError, create_hetero_section) sections = [x.id for x in self.section] for x in sections: del self.section[x] assert (len(self.section) == 0)
def setUp(self): self.file = nix.File.open(self.testfilename, nix.FileMode.Overwrite, backend=self.backend) self.section = self.file.create_section("test section", "recordingsession") self.prop = self.section.create_property("test property", nix.Value(0)) self.prop_s = self.section.create_property("test str", nix.DataType.String) self.other = self.section.create_property("other property", nix.DataType.Int64)
def write_sorting(sorting, save_path, overwrite=False): if not HAVE_NIXIO: raise ImportError(missing_nixio_msg) if os.path.exists(save_path) and not overwrite: raise FileExistsError("File exists: {}".format(save_path)) sfreq = sorting.get_sampling_frequency() if sfreq is None: unit = None elif sfreq == 1: unit = "s" else: unit = "{} s".format(1. / sfreq) nf = nix.File.open(save_path, nix.FileMode.Overwrite) # use the file name to name the top-level block fname = os.path.basename(save_path) block = nf.create_block(fname, "spikeinterface.sorting") commonmd = nf.create_section(fname, "spikeinterface.sorting.metadata") if sfreq is not None: commonmd["sampling_frequency"] = sfreq spikes_das = list() for unit_id in sorting.get_unit_ids(): spikes = sorting.get_unit_spike_train(unit_id) name = "spikes-{}".format(unit_id) da = block.create_data_array(name, "spikeinterface.spikes", data=spikes) da.unit = unit da.label = str(unit_id) spikes_das.append(da) spikes_md = nf.create_section("spikes.metadata", "spikeinterface.properties") for da in spikes_das: da.metadata = spikes_md units = sorting.get_unit_ids() for unit_id in units: unit_md = spikes_md.create_section(str(unit_id), "spikeinterface.properties") for propname in sorting.get_unit_property_names(unit_id): propvalue = sorting.get_unit_property(unit_id, propname) if nf.version <= (1, 1, 0): if isinstance(propvalue, Iterable): values = list(map(nix.Value, propvalue)) else: values = nix.Value(propvalue) else: values = propvalue unit_md.create_property(propname, values) nf.close()
def test_properties(self): sec = self.write_file.create_section("test section", "proptest") sec.create_property("test property", nix.Value(0)) sec.create_property("test str", nix.DataType.String) sec.create_property("other property", nix.DataType.Int64) sec.create_property("prop Int32", nix.DataType.Int32) sec.create_property("prop Int64", nix.DataType.Int64) sec.create_property("prop UInt32", nix.DataType.UInt32) sec.create_property("prop UInt64", nix.DataType.UInt64) sec.create_property("prop Float", nix.DataType.Float) sec.create_property("prop Double", nix.DataType.Double) sec.create_property("prop String", nix.DataType.String) sec.create_property("prop Bool", nix.DataType.Bool) sec.props[0].mapping = "mapping" sec.props[1].definition = "def" self.check_compatibility() wsec = self.write_file.sections[0] rsec = self.read_file.sections[0] self.check_attributes(wsec, rsec) for wprop, rprop in zip(wsec.props, rsec.props): self.check_attributes(wprop, rprop) sec.props[0].values = [nix.Value(101)] for wprop, rprop in zip(wsec.props, rsec.props): self.check_attributes(wprop, rprop) sec.props[1].values = [ nix.Value("foo"), nix.Value("bar"), nix.Value("baz") ] for wprop, rprop in zip(wsec.props, rsec.props): self.check_attributes(wprop, rprop)
def addBrianQuantity2Section(sec: nixio.pycore.Section, name: str, qu: Quantity) -> nixio.pycore.Property: propStr = qu.in_best_unit() if qu.shape == (): propFloatStr, propUnit = propStr.split(" ") propFloat = float(propFloatStr) pr = sec.create_property(name, [nixio.Value(propFloat)]) elif len(qu.shape) == 1: propFloatStr, propUnit = propStr.split("] ") values = list(map(float, propFloatStr[2:].split())) pr = sec.create_property(name, [nixio.Value(val) for val in values]) else: raise (ValueError("Only scalar or 1D Brian Quantities as supported")) pr.unit = propUnit return pr
def test_value_attrs(self): value = nix.Value(0) value.reference = "a" assert (value.reference == "a") value.filename = "b" assert (value.filename == "b") value.filename = "c" assert (value.filename == "c") value.checksum = "d" assert (value.checksum == "d") value.uncertainty = 0.5 assert (value.uncertainty == 0.5)
def convert_value(v): global info if v.dtype == "binary": info["skipped binary values"] += 1 return None data = v.data if data is None: info["skipped none values"] += 1 return None if v.dtype in ("date", "time", "datetime"): data = convert_datetime(v.data) try: nixv = nix.Value(data) except TypeError as exc: print("Unsuported data type: {}".format(type(data))) info["type errors"] += 1 return None nixv.unit = v.unit nixv.uncertainty = v.uncertainty nixv.reference = v.reference return nixv
# Ajayrama Kumaraswamy, 2016 # Ginjang Project, LMU ''' This file contains some functions useful when converting between objects of neo and NIX ''' import nixio as nix import neo import quantities as qu import numpy as np qu2Val = lambda x: nix.Value(float(x)) quUnitStr = lambda x: x.dimensionality.string #*********************************************************************************************************************** def addAnalogSignal2Block(blk, analogSignal): ''' Create a new data array in the block blk and add the data in analogSignal to it :param blk: nix.block :param analogSignal: neo.analogsignal :return: data, nix.data_array, the newly added data_array ''' assert hasattr(analogSignal, 'name'), 'Analog signal has no name' data = blk.create_data_array(analogSignal.name, 'nix.regular_sampled', data=analogSignal.magnitude) data.unit = quUnitStr(analogSignal)
def _write_property(self, section, name, v): """ Create a metadata property with a given name and value on the provided metadata section. :param section: The metadata section to hold the new property :param name: The name of the property :param v: The value to write :return: The newly created property """ if isinstance(v, pq.Quantity): if len(v.shape): section[name] = list(nix.Value(vv) for vv in v.magnitude) else: section[name] = nix.Value(v.magnitude.item()) section.props[name].unit = str(v.dimensionality) elif isinstance(v, datetime): section[name] = nix.Value(calculate_timestamp(v)) elif isinstance(v, string_types): section[name] = nix.Value(v) elif isinstance(v, bytes): section[name] = nix.Value(v.decode()) elif isinstance(v, Iterable): values = [] unit = None definition = None if len(v) == 0: # empty list can't be saved in NIX property # but we can store an empty string and use the # definition to signify that it should be restored # as an iterable (list) values = "" definition = EMPTYANNOTATION elif hasattr(v, "ndim") and v.ndim == 0: values = v.item() if isinstance(v, pq.Quantity): unit = str(v.dimensionality) else: for item in v: if isinstance(item, string_types): item = nix.Value(item) elif isinstance(item, pq.Quantity): unit = str(item.dimensionality) item = nix.Value(item.magnitude.item()) elif isinstance(item, Iterable): self.logger.warn("Multidimensional arrays and nested " "containers are not currently " "supported when writing to NIX.") return None else: item = nix.Value(item) values.append(item) section[name] = values section.props[name].unit = unit if definition: section.props[name].definition = definition elif type(v).__module__ == "numpy": section[name] = nix.Value(v.item()) else: section[name] = nix.Value(v) return section.props[name]
def test_full_file_read(tmpdir): nixfilepath = os.path.join(str(tmpdir), "filetest-readpy.nix") runcpp("writefullfile", nixfilepath) nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.ReadOnly, backend="h5py") # Check object counts assert 4 == len(nix_file.blocks), "Block count mismatch" check_block_children_counts(nix_file.blocks[0], 2, 4, 1, 1) check_block_children_counts(nix_file.blocks[1], 2, 2, 0, 0) check_block_children_counts(nix_file.blocks[2], 2, 3, 1, 1) check_block_children_counts(nix_file.blocks[3], 0, 12, 0, 0) check_group_children_counts(nix_file.blocks[0].groups[0], 1, 1, 0) check_group_children_counts(nix_file.blocks[0].groups[1], 0, 0, 0) check_group_children_counts(nix_file.blocks[1].groups[0], 0, 0, 0) check_group_children_counts(nix_file.blocks[1].groups[1], 0, 0, 0) check_group_children_counts(nix_file.blocks[2].groups[0], 0, 1, 1) check_group_children_counts(nix_file.blocks[2].groups[1], 0, 0, 0) block = nix_file.blocks[0] # Check first block attrs before descending compare("blockyblock", block.name) compare("ablocktype of thing", block.type) compare("I am a test block", block.definition) block = nix_file.blocks[1] # Check second block attrs (no children) compare("I am another block", block.name) compare("void", block.type) compare("Void block of stuff", block.definition) for bidx, block in enumerate(nix_file.blocks): for gidx, group in enumerate(block.groups): compare("grp0{}{}".format(bidx, gidx), group.name) compare("grp", group.type) compare("group {}".format(gidx), group.definition) compare(block.created_at, group.created_at) # DataArray block = nix_file.blocks[0] group = block.groups[0] da = block.data_arrays[0] compare(da.id, group.data_arrays[0].id) compare("bunchodata", da.name) compare("recordings", da.type) compare("A silly little data array", da.definition) # Data compare([[1., 2., 10.], [9., 1., 3.]], da[:]) compare([2, 3], da.shape) compare(nix.DataType.Double, da.data_type) # DataArray dimensions dim = da.dimensions[0] compare(nix.DimensionType.Sample, dim.dimension_type) compare(0.1, dim.sampling_interval) compare("ms", dim.unit) compare("time", dim.label) dim = da.dimensions[1] compare(nix.DimensionType.Set, dim.dimension_type) compare(["a", "b"], dim.labels) # Tag tag = block.tags[0] compare("tagu", tag.name) compare("tagging", tag.type) compare("tags ahoy", tag.definition) compare([1, 0], tag.position) compare([1, 10], tag.extent) compare(["mV", "s"], tag.units) compare(da.id, tag.references[0].id) compare(group.tags[0].id, tag.id) feature = tag.features["feat-da"] compare(nix.LinkType.Untagged, feature.link_type) compare(feature.data.id, block.data_arrays[1].id) compare("feat-da", feature.data.name) compare((6, ), feature.data.shape) compare([0.4, 0.41, 0.49, 0.1, 0.1, 0.1], feature.data[:]) # MultiTag mtag = block.multi_tags[0] compare("mtagu", mtag.name) compare("multi tagging", mtag.type) compare(None, mtag.definition) posmt = mtag.positions extmt = mtag.extents compare(posmt.id, block.data_arrays[posmt.name].id) compare(extmt.id, block.data_arrays[extmt.name].id) # MultiTag data compare("tag-data", posmt.name) compare("multi-tagger", posmt.type) compare("tag-extents", extmt.name) compare("multi-tagger", extmt.type) compare([1, 3], posmt.shape) compare([[0, 0.1, 10.1]], posmt[:]) compare(nix.DataType.Double, posmt.data_type) compare([1, 3], extmt.shape) compare([[0.5, 0.5, 0.5]], extmt[:]) compare(nix.DataType.Double, extmt.data_type) # MultiTag Position and Extent dimensions compare(2, len(posmt.dimensions)) dim = posmt.dimensions[1] compare(nix.DimensionType.Set, dim.dimension_type) dim = posmt.dimensions[0] compare(nix.DimensionType.Sample, dim.dimension_type) compare(0.01, dim.sampling_interval) compare("s", dim.unit) compare(2, len(extmt.dimensions)) dim = extmt.dimensions[1] compare(nix.DimensionType.Set, dim.dimension_type) dim = extmt.dimensions[0] compare(nix.DimensionType.Sample, dim.dimension_type) compare(0.01, dim.sampling_interval) compare("s", dim.unit) # Tag and MultiTag Block and Group membership for idx in range(1, len(nix_file.blocks)): assert tag.id not in nix_file.blocks[idx].tags,\ "Tag found in incorrect Block" assert mtag.id not in nix_file.blocks[idx].multi_tags,\ "MultiTag found in incorrect Block" group = block.groups[0] assert mtag.id not in group.multi_tags, "MultiTag found in incorrect Group" for idx in range(1, len(block.groups)): tag.id not in block.groups[idx].tags, "Tag found in incorrect Group" mtag.id not in block.groups[idx].multi_tags,\ "MultiTag found in incorrect Group" # Second block DataArray block = nix_file.blocks[1] da = block.data_arrays[0] compare("FA001", da.name) compare("Primary data", da.type) compare(nix.DataType.Int64, da.data_type) # Sources block = nix_file.blocks[0] compare(1, len(block.sources)) src = block.sources["root-source"] compare("top-level-source", src.type) for da in block.data_arrays: compare(da.sources[0].id, src.id) compare(2, len(src.sources)) compare("d1-source", src.sources[0].name) compare("d1-source-2", src.sources[1].name) compare("second-level-source", src.sources[0].type) compare("second-level-source", src.sources[1].type) for s in src.sources: compare(0, len(s.sources)) da = block.data_arrays[0] compare(2, len(da.sources)) compare(da.sources[1].id, block.sources[0].sources[0].id) # Metadata # 3 root sections compare(3, len(nix_file.sections)) compare("mda", nix_file.sections[0].name) compare("mdb", nix_file.sections[1].name) compare("mdc", nix_file.sections[2].name) for s in nix_file.sections: compare("root-section", s.type) mdc = nix_file.sections[2] compare(6, len(mdc.sections)) for idx in range(6): compare("d1-section", mdc.sections["{:03d}-md".format(idx)].type) mdb = nix_file.sections[1] compare(nix_file.blocks[0].metadata.id, mdb.id) compare(nix_file.blocks[2].metadata.id, mdb.id) compare(nix_file.blocks[1].data_arrays[0].metadata.id, nix_file.sections["mda"].id) compare(nix_file.blocks[0].tags[0].metadata.id, nix_file.sections["mdc"].sections[3].id) block = nix_file.blocks[2] tag = block.tags[0] compare("POI", tag.name) compare("TAG", tag.type) compare([0, 0], tag.position) compare([1920, 1080], tag.extent) compare(["mm", "mm"], tag.units) compare(tag.id, block.groups[0].tags[0].id) feature = tag.features["some-sort-of-image?"] compare(nix.LinkType.Indexed, feature.link_type) compare(feature.data.id, block.data_arrays[0].id) compare("some-sort-of-image?", feature.data.name) compare([3840, 2160], feature.data.shape) mtag = block.multi_tags[0] compare("nu-mt", mtag.name) compare("multi-tag (new)", mtag.type) posmt = mtag.positions compare("nu-pos", posmt.name) compare("multi-tag-positions", posmt.type) compare([10, 3], posmt.shape) compare(nix.DataType.Double, posmt.data_type) compare(posmt.id, block.data_arrays[1].id) compare(mtag.id, block.groups[0].multi_tags[0].id) # Data with range dimension block = nix_file.blocks[2] da = block.data_arrays["the ticker"] compare([0, 1, 23], da[:]) compare([3], da.shape) compare("range-dim-array", da.type) compare("uA", da.unit) compare(nix.DataType.Int32, da.data_type) dim = da.dimensions[0] compare(nix.DimensionType.Range, dim.dimension_type) # Alias range dimension block = nix_file.blocks[1] da = block.data_arrays["alias da"] compare("dimticks", da.type) compare("F", da.unit) compare("alias dimension label", da.label) compare([24], da.shape) dim = da.dimensions[0] compare(nix.DimensionType.Range, dim.dimension_type) assert dim.is_alias compare(da[:], dim.ticks) # Metadata types mdb = nix_file.sections["mdb"] compare(1, len(mdb.sections)) proptypesmd = mdb.sections["prop-test-parent"] compare("test metadata section", proptypesmd.type) compare(2, len(proptypesmd.sections)) numbermd = proptypesmd.sections[0] compare("numerical metadata", numbermd.name) compare("test metadata section", numbermd.type) compare(4, len(numbermd.props)) prop = numbermd.props["integer"] compare(1, len(prop.values)) compare([nix.Value(42)], prop.values) prop = numbermd.props["float"] compare(1, len(prop.values)) # TODO: Almost equal # compare([nix.Value(4.2)], prop.values) prop = numbermd.props["integers"] compare(6, len(prop.values)) compare([nix.Value(40 + v) for v in range(6)], prop.values) prop = numbermd.props["floats"] compare(2, len(prop.values)) # TODO: Almost equal othermd = proptypesmd.sections[1] compare("other metadata", othermd.name) compare("test metadata section", othermd.type) compare(5, len(othermd.props)) prop = othermd.props["bool"] compare(1, len(prop.values)) compare([nix.Value(True)], prop.values) prop = othermd.props["false bool"] compare(1, len(prop.values)) compare([nix.Value(False)], prop.values) prop = othermd.props["bools"] compare(3, len(prop.values)) compare([nix.Value(True), nix.Value(False), nix.Value(True)], prop.values) prop = othermd.props["string"] compare(1, len(prop.values)) compare([nix.Value("I am a string. Rawr.")], prop.values) prop = othermd.props["strings"] compare(3, len(prop.values)) compare([nix.Value(v) for v in ["one", "two", "twenty"]], prop.values) # TODO: Check type compatibilities # for idx in range(len(dtypes)): # da = block.data_arrays[idx] # dt = dtypes[idx] # compare(dt, da.data_type) # compare([1], da.shape) nix_file.close()
def test_property_values(self): self.prop.values = [nix.Value(10)] assert (self.prop.data_type == nix.DataType.Int64) assert (len(self.prop.values) == 1) assert (self.prop.values[0] == nix.Value(10)) assert (nix.Value(10) in self.prop.values) assert (self.prop.values[0] == 10) assert (10 in self.prop.values) assert (self.prop.values[0] != nix.Value(1337)) assert (nix.Value(1337) not in self.prop.values) assert (self.prop.values[0] != 42) assert (42 not in self.prop.values) self.prop.delete_values() assert (len(self.prop.values) == 0) self.prop_s.values = [nix.Value("foo"), nix.Value("bar")] assert (self.prop_s.data_type == nix.DataType.String) assert (len(self.prop_s.values) == 2) assert (self.prop_s.values[0] == nix.Value("foo")) assert (nix.Value("foo") in self.prop_s.values) assert (self.prop_s.values[0] == "foo") assert ("foo" in self.prop_s.values) assert (self.prop_s.values[0] != nix.Value("bla")) assert (nix.Value("bla") not in self.prop_s.values) assert (self.prop_s.values[0] != "bla") assert ("bla" not in self.prop_s.values)
def saveNixFile(smrFile, nixFile, metaData, startStop=None, askShouldReplace=True, forceUnits=False): if startStop is None: startStopStr = metaData['recPeriod'] if startStopStr is None: startStop = None else: startStop = parseStartStopStr(startStopStr) calibStrings = {} calibStrings['voltageCalibStr'] = metaData['voltCalibStr'] calibStrings['vibrationCalibStr'] = metaData['stimCalibStr'] calibStrings['currentCalibStr'] = metaData['currCalibStr'] ints2Exclude = metaData["int2Exclude"] voltageSignal, vibrationSignal, currentSignal = parseSpike2Data( smrFile, calibStrings, startStop, ints2Exclude, forceUnits) if os.path.isfile(nixFile): if askShouldReplace: ch = raw_input('File Already Exists. Overwrite?(y/n):') if ch != 'y': exit('Aborted.') os.remove(nixFile) if not os.path.isfile(smrFile): print( 'File not found {}\n Hence cannot import it. Ignoring it.'.format( smrFile)) return nixFileO = nix.File.open(nixFile, nix.FileMode.Overwrite) vibStimSec = nixFileO.create_section('VibrationStimulii-Raw', 'Recording') vibStimSec.create_property('NatureOfResponse', [nix.Value(metaData['resp'])]) vibStimSec.create_property('SpontaneousActivity', [nix.Value(metaData['spont'])]) contStimSec = vibStimSec.create_section('ContinuousStimulii', 'Stimulii/Sine') if any(metaData["freqs"]): addQuantity2section(contStimSec, metaData['freqs'], 'FrequenciesUsed') if all(map(len, metaData['pulse'])): pulseStimSec = vibStimSec.create_section('PulseStimulii', 'Stimulii/Pulse') addQuantity2section(pulseStimSec, 265 * qu.Hz, 'FrequenciesUsed') addQuantity2section(pulseStimSec, metaData['pulse'][0], 'PulseDurations') addQuantity2section(pulseStimSec, metaData['pulse'][1], 'PulseIntervals') rawDataBlk = nixFileO.create_block('RawDataTraces', 'RecordingData') vibSig = addAnalogSignal2Block(rawDataBlk, vibrationSignal) voltSig = addAnalogSignal2Block(rawDataBlk, voltageSignal) if currentSignal is not None: curSig = addAnalogSignal2Block(rawDataBlk, currentSignal) nixFileO.close()
def _write_data(self, nixobj, attr, path): if isinstance(nixobj, list): metadata = nixobj[0].metadata metadata["t_start.units"] = nix.Value(attr["t_start.units"]) for obj in nixobj: obj.unit = attr["data.units"] if attr["type"] == "analogsignal": timedim = obj.append_sampled_dimension( attr["sampling_interval"]) timedim.unit = attr["sampling_interval.units"] elif attr["type"] == "irregularlysampledsignal": timedim = obj.append_range_dimension(attr["times"]) timedim.unit = attr["times.units"] timedim.label = "time" timedim.offset = attr["t_start"] else: metadata = nixobj.metadata nixobj.positions.unit = attr["data.units"] blockpath = "/" + path.split("/")[1] parentblock = self._get_object_at(blockpath) if "extents" in attr: extname = nixobj.name + ".durations" exttype = nixobj.type + ".durations" if extname in parentblock.data_arrays: del parentblock.data_arrays[extname] extents = parentblock.create_data_array(extname, exttype, data=attr["extents"]) extents.unit = attr["extents.units"] nixobj.extents = extents if "labels" in attr: labeldim = nixobj.positions.append_set_dimension() labeldim.labels = attr["labels"] if "t_start" in attr: metadata["t_start"] = nix.Value(attr["t_start"]) metadata["t_start.units"] = nix.Value(attr["t_start.units"]) if "t_stop" in attr: metadata["t_stop"] = nix.Value(attr["t_stop"]) metadata["t_stop.units"] = nix.Value(attr["t_stop.units"]) if "waveforms" in attr: wfname = nixobj.name + ".waveforms" if wfname in parentblock.data_arrays: del metadata.sections[wfname] del parentblock.data_arrays[wfname] del nixobj.features[0] wfda = parentblock.create_data_array(wfname, "neo.waveforms", data=attr["waveforms"]) wfda.metadata = nixobj.metadata.create_section( wfda.name, "neo.waveforms.metadata") wfda.unit = attr["waveforms.units"] nixobj.create_feature(wfda, nix.LinkType.Indexed) wfda.append_set_dimension() wfda.append_set_dimension() wftime = wfda.append_sampled_dimension( attr["sampling_interval"]) metadata["sampling_interval.units"] =\ attr["sampling_interval.units"] wftime.unit = attr["times.units"] wftime.label = "time" if "left_sweep" in attr: self._write_property(wfda.metadata, "left_sweep", attr["left_sweep"])
def create_full_nix_file(cls, filename): nixfile = nix.File.open(filename, nix.FileMode.Overwrite, backend="h5py") nix_block_a = nixfile.create_block(cls.rword(10), "neo.block") nix_block_a.definition = cls.rsentence(5, 10) nix_block_b = nixfile.create_block(cls.rword(10), "neo.block") nix_block_b.definition = cls.rsentence(3, 3) nix_block_a.metadata = nixfile.create_section( nix_block_a.name, nix_block_a.name + ".metadata") nix_block_b.metadata = nixfile.create_section( nix_block_b.name, nix_block_b.name + ".metadata") nix_blocks = [nix_block_a, nix_block_b] for blk in nix_blocks: for ind in range(3): group = blk.create_group(cls.rword(), "neo.segment") group.definition = cls.rsentence(10, 15) group_md = blk.metadata.create_section( group.name, group.name + ".metadata") group.metadata = group_md blk = nix_blocks[0] group = blk.groups[0] allspiketrains = list() allsignalgroups = list() # analogsignals for n in range(3): siggroup = list() asig_name = "{}_asig{}".format(cls.rword(10), n) asig_definition = cls.rsentence(5, 5) asig_md = group.metadata.create_section(asig_name, asig_name + ".metadata") for idx in range(3): da_asig = blk.create_data_array("{}.{}".format(asig_name, idx), "neo.analogsignal", data=cls.rquant(100, 1)) da_asig.definition = asig_definition da_asig.unit = "mV" da_asig.metadata = asig_md timedim = da_asig.append_sampled_dimension(0.01) timedim.unit = "ms" timedim.label = "time" timedim.offset = 10 da_asig.append_set_dimension() group.data_arrays.append(da_asig) siggroup.append(da_asig) allsignalgroups.append(siggroup) # irregularlysampledsignals for n in range(2): siggroup = list() isig_name = "{}_isig{}".format(cls.rword(10), n) isig_definition = cls.rsentence(12, 12) isig_md = group.metadata.create_section(isig_name, isig_name + ".metadata") isig_times = cls.rquant(200, 1, True) for idx in range(10): da_isig = blk.create_data_array("{}.{}".format(isig_name, idx), "neo.irregularlysampledsignal", data=cls.rquant(200, 1)) da_isig.definition = isig_definition da_isig.unit = "mV" da_isig.metadata = isig_md timedim = da_isig.append_range_dimension(isig_times) timedim.unit = "s" timedim.label = "time" da_isig.append_set_dimension() group.data_arrays.append(da_isig) siggroup.append(da_isig) allsignalgroups.append(siggroup) # SpikeTrains with Waveforms for n in range(4): stname = "{}-st{}".format(cls.rword(20), n) times = cls.rquant(400, 1, True) times_da = blk.create_data_array("{}.times".format(stname), "neo.spiketrain.times", data=times) times_da.unit = "ms" mtag_st = blk.create_multi_tag(stname, "neo.spiketrain", times_da) group.multi_tags.append(mtag_st) mtag_st.definition = cls.rsentence(20, 30) mtag_st_md = group.metadata.create_section( mtag_st.name, mtag_st.name + ".metadata") mtag_st.metadata = mtag_st_md mtag_st_md.create_property("t_stop", nix.Value(max(times_da).item() + 1)) waveforms = cls.rquant((10, 8, 5), 1) wfname = "{}.waveforms".format(mtag_st.name) wfda = blk.create_data_array(wfname, "neo.waveforms", data=waveforms) wfda.unit = "mV" mtag_st.create_feature(wfda, nix.LinkType.Indexed) wfda.append_set_dimension() # spike dimension wfda.append_set_dimension() # channel dimension wftimedim = wfda.append_sampled_dimension(0.1) wftimedim.unit = "ms" wftimedim.label = "time" wfda.metadata = mtag_st_md.create_section( wfname, "neo.waveforms.metadata") wfda.metadata.create_property("left_sweep", [nix.Value(20)] * 5) allspiketrains.append(mtag_st) # Epochs for n in range(3): epname = "{}-ep{}".format(cls.rword(5), n) times = cls.rquant(5, 1, True) times_da = blk.create_data_array("{}.times".format(epname), "neo.epoch.times", data=times) times_da.unit = "s" extents = cls.rquant(5, 1) extents_da = blk.create_data_array("{}.durations".format(epname), "neo.epoch.durations", data=extents) extents_da.unit = "s" mtag_ep = blk.create_multi_tag(epname, "neo.epoch", times_da) group.multi_tags.append(mtag_ep) mtag_ep.definition = cls.rsentence(2) mtag_ep.extents = extents_da label_dim = mtag_ep.positions.append_set_dimension() label_dim.labels = cls.rsentence(5).split(" ") # reference all signals in the group for siggroup in allsignalgroups: mtag_ep.references.extend(siggroup) # Events for n in range(2): evname = "{}-ev{}".format(cls.rword(5), n) times = cls.rquant(5, 1, True) times_da = blk.create_data_array("{}.times".format(evname), "neo.event.times", data=times) times_da.unit = "s" mtag_ev = blk.create_multi_tag(evname, "neo.event", times_da) group.multi_tags.append(mtag_ev) mtag_ev.definition = cls.rsentence(2) label_dim = mtag_ev.positions.append_set_dimension() label_dim.labels = cls.rsentence(5).split(" ") # reference all signals in the group for siggroup in allsignalgroups: mtag_ev.references.extend(siggroup) # CHX nixchx = blk.create_source(cls.rword(10), "neo.channelindex") nixchx.metadata = nix_blocks[0].metadata.create_section( nixchx.name, "neo.channelindex.metadata") chantype = "neo.channelindex" # 3 channels for idx in [2, 5, 9]: channame = cls.rword(20) nixrc = nixchx.create_source(channame, chantype) nixrc.definition = cls.rsentence(13) nixrc.metadata = nixchx.metadata.create_section( nixrc.name, "neo.channelindex.metadata") nixrc.metadata.create_property("index", nix.Value(idx)) dims = tuple(map(nix.Value, cls.rquant(3, 1))) nixrc.metadata.create_property("coordinates", dims) nixrc.metadata.create_property("coordinates.units", nix.Value("um")) nunits = 1 stsperunit = np.array_split(allspiketrains, nunits) for idx in range(nunits): unitname = "{}-unit{}".format(cls.rword(5), idx) nixunit = nixchx.create_source(unitname, "neo.unit") nixunit.definition = cls.rsentence(4, 10) for st in stsperunit[idx]: st.sources.append(nixchx) st.sources.append(nixunit) # pick a few signal groups to reference this CHX randsiggroups = np.random.choice(allsignalgroups, 5, False) for siggroup in randsiggroups: for sig in siggroup: sig.sources.append(nixchx) return nixfile
for propName, propVal in DLInt2PropsDict.items(): addBrianQuantity2Section(DLInt2PropsSec, propName, propVal) inputSec = nixFile.create_section("Input Parameters", "Sinusoidal Pulses") for parName, parVal in inputPars.items(): addBrianQuantity2Section(inputSec, parName, parVal) addBrianQuantity2Section(inputSec, "simSettleTime", simSettleTime) brianSimSettingsSec = nixFile.create_section("Simulation Parameters", "Brian Simulation") addBrianQuantity2Section(brianSimSettingsSec, "simStepSize", simStepSize) addBrianQuantity2Section(brianSimSettingsSec, "totalSimDuration", totalSimDur) brianSimSettingsSec.create_property("method", nixio.Value("euler")) synPropsSec = nixFile.create_section("Synapse Models", "Model Parameters") if DLInt1SynapsePropsE: JODLInt1SynESec = synPropsSec.create_section("JODLInt1Exi", "DoubleExpSyn") JODLInt1SynEDict = getattr(synapsePropsList, DLInt1SynapsePropsE) for propName, propVal in JODLInt1SynEDict.items(): addBrianQuantity2Section(JODLInt1SynESec, propName, propVal) JODLInt1SynESec.create_property("PreSynaptic Neuron", nixio.Value("JO")) JODLInt1SynESec.create_property("PostSynaptic Neuron", nixio.Value("DLInt1"))
def runJODLInt1DLInt2(simStepSize: Quantity, simDuration: Quantity, simSettleTime: Quantity, inputParsName: str, showBefore: Quantity, showAfter: Quantity, DLInt1ModelProps: str, DLInt2ModelProps: str, DLInt1SynapsePropsE: str, DLInt1SynapsePropsI: str, DLInt2SynapseProps: str, DLInt1DLInt2SynProps: str, askReplace=True): sns.set(style="whitegrid", rc=mplPars) DLInt1SynapseProps = "".join((DLInt1SynapsePropsE, DLInt1SynapsePropsI)) opDir = os.path.join(homeFolder, DLInt1ModelProps + DLInt2ModelProps, DLInt1SynapseProps + DLInt2SynapseProps + DLInt1DLInt2SynProps, inputParsName) opFile = os.path.join(opDir, 'Traces.png') OPNixFile = os.path.join(opDir, 'SimResults.h5') if askReplace: if os.path.isfile(opFile): ch = input('Results already exist at {}. Delete?(y/n):'.format(opFile)) if ch == 'y': os.remove(opFile) if os.path.isfile(OPNixFile): os.remove(OPNixFile) else: sys.exit('User Abort!') elif not os.path.isdir(opDir): os.makedirs(opDir) else: if os.path.isfile(opFile): os.remove(opFile) if os.path.isfile(OPNixFile): os.remove(OPNixFile) elif not os.path.isdir(opDir): os.makedirs(opDir) inputPars = getattr(inputParsList, inputParsName) net = Network() JO = JOSpikes265(nOutputs=1, simSettleTime=simSettleTime, **inputPars) net.add(JO.JOSGG) DLInt1PropsDict = getattr(AdExpPars, DLInt1ModelProps) dlint1 = VSNeuron(**AdExp, inits=DLInt1PropsDict, name='dlint1') dlint1.recordSpikes() dlint1.recordMembraneV() if DLInt1SynapsePropsE: dlint1.addSynapse(synName="ExiJO", sourceNG=JO.JOSGG, **exp2Syn, synParsInits=getattr(synapsePropsList, DLInt1SynapsePropsE), synStateInits=exp2SynStateInits, sourceInd=0, destInd=0 ) if DLInt1SynapsePropsI: dlint1.addSynapse(synName="InhJO", sourceNG=JO.JOSGG, **exp2Syn, synParsInits=getattr(synapsePropsList, DLInt1SynapsePropsI), synStateInits=exp2SynStateInits, sourceInd=0, destInd=0 ) dlint1.addToNetwork(net) DLInt2PropsDict = getattr(AdExpPars, DLInt2ModelProps) dlint2 = VSNeuron(**AdExp, inits=DLInt2PropsDict, name='dlint2') dlint2.recordMembraneV() dlint2.recordSpikes() if DLInt2SynapseProps: dlint2.addSynapse(synName="JOExi", sourceNG=JO.JOSGG, **exp2Syn, synParsInits=getattr(synapsePropsList, DLInt2SynapseProps), synStateInits=exp2SynStateInits, sourceInd=0, destInd=0 ) if DLInt1DLInt2SynProps: dlint2.addSynapse(synName="DLInt1", sourceNG=dlint1.ng, **exp2Syn, synParsInits=getattr(synapsePropsList, DLInt1DLInt2SynProps), synStateInits=exp2SynStateInits, sourceInd=0, destInd=0 ) dlint2.addToNetwork(net) defaultclock.dt = simStepSize totalSimDur = simDuration + simSettleTime net.run(totalSimDur, report='text') simT, DLInt1_memV = dlint1.getMemVTrace() DLInt1_spikeTimes = dlint1.getSpikes() fig, axs = plt.subplots(nrows=3, figsize=(10, 6.25), sharex='col') axs[0].plot(simT / units.ms, DLInt1_memV / units.mV) spikesY = DLInt1_memV.min() + 1.05 * (DLInt1_memV.max() - DLInt1_memV.min()) axs[0].plot(DLInt1_spikeTimes / units.ms, [spikesY / units.mV] * DLInt1_spikeTimes.shape[0], 'k^') axs[0].set_ylabel('DLInt1 \nmemV (mV)') axs[0].set_xlim([(simSettleTime - showBefore) / units.ms, (totalSimDur + showAfter) / units.ms]) simT, DLInt2_memV = dlint2.getMemVTrace() DLInt2_spikeTimes = dlint2.getSpikes() axs[1].plot(simT / units.ms, DLInt2_memV / units.mV) spikesY = DLInt2_memV.min() + 1.05 * (DLInt2_memV.max() - DLInt2_memV.min()) axs[1].plot(DLInt2_spikeTimes / units.ms, [spikesY / units.mV] * DLInt2_spikeTimes.shape[0], 'k^') axs[1].set_ylabel('DLInt2 \nmemV (mV)') sineInput = getSineInput(simDur=simDuration, simStepSize=simStepSize, sinPulseDurs=inputPars['sinPulseDurs'], sinPulseStarts=inputPars['sinPulseStarts'], freq=265 * units.Hz, simSettleTime=simSettleTime) axs[2].plot(simT / units.ms, sineInput, 'r-', label='Vibration Input') axs[2].plot(JO.spikeTimes / units.ms, [sineInput.max() * 1.05] * len(JO.spikeTimes), 'k^', label='JO Spikes') axs[2].legend(loc='upper right') axs[2].set_xlabel('time (ms)') axs[2].set_ylabel('Vibration \nInput/JO\n Spikes') fig.tight_layout() fig.canvas.draw() fig.savefig(opFile, dpi=150) plt.close(fig.number) del fig dlint1MemVAS = AnalogSignal(signal=DLInt1_memV /units.mV, sampling_period=(simStepSize / units.ms) * qu.ms, t_start=0 * qu.mV, units="mV", name="DLInt1 MemV") dlint2MemVAS = AnalogSignal(signal=DLInt2_memV / units.mV, sampling_period=(simStepSize / units.ms) * qu.ms, t_start=0 * qu.mV, units="mV", name="DLInt2 MemV") inputAS = AnalogSignal(signal=sineInput, sampling_period=(simStepSize / units.ms) * qu.ms, t_start=0 * qu.mV, units="um", name="Input Vibration Signal") dlint1SpikesQU = (DLInt1_spikeTimes / units.ms) * qu.ms dlint2SpikesQU = (DLInt2_spikeTimes / units.ms) * qu.ms joSpikesQU = (JO.spikeTimes / units.ms) * qu.ms nixFile = nixio.File.open(OPNixFile, mode=nixio.FileMode.ReadWrite) neuronModels = nixFile.create_section("Neuron Models", "Model Parameters") DLInt1PropsSec = neuronModels.create_section("DL-Int-1", "AdExp") for propName, propVal in DLInt1PropsDict.items(): addBrianQuantity2Section(DLInt1PropsSec, propName, propVal) DLInt2PropsSec = neuronModels.create_section("DL-Int-2", "AdExp") for propName, propVal in DLInt2PropsDict.items(): addBrianQuantity2Section(DLInt2PropsSec, propName, propVal) inputSec = nixFile.create_section("Input Parameters", "Sinusoidal Pulses") for parName, parVal in inputPars.items(): addBrianQuantity2Section(inputSec, parName, parVal) addBrianQuantity2Section(inputSec, "simSettleTime", simSettleTime) brianSimSettingsSec = nixFile.create_section("Simulation Parameters", "Brian Simulation") addBrianQuantity2Section(brianSimSettingsSec, "simStepSize", simStepSize) addBrianQuantity2Section(brianSimSettingsSec, "totalSimDuration", totalSimDur) brianSimSettingsSec.create_property("method", nixio.Value("euler")) synPropsSec = nixFile.create_section("Synapse Models", "Model Parameters") if DLInt1SynapsePropsE: JODLInt1SynESec = synPropsSec.create_section("JODLInt1Exi", "DoubleExpSyn") JODLInt1SynEDict = getattr(synapsePropsList, DLInt1SynapsePropsE) for propName, propVal in JODLInt1SynEDict.items(): addBrianQuantity2Section(JODLInt1SynESec, propName, propVal) JODLInt1SynESec.create_property("PreSynaptic Neuron", nixio.Value("JO")) JODLInt1SynESec.create_property("PostSynaptic Neuron", nixio.Value("DLInt1")) if DLInt1SynapsePropsI: JODLInt1SynISec = synPropsSec.create_section("JODLInt1Inh", "DoubleExpSyn") JODLInt1SynIDict = getattr(synapsePropsList, DLInt1SynapsePropsI) for propName, propVal in JODLInt1SynIDict.items(): addBrianQuantity2Section(JODLInt1SynISec, propName, propVal) JODLInt1SynISec.create_property("PreSynaptic Neuron", nixio.Value("JO")) JODLInt1SynISec.create_property("PostSynaptic Neuron", nixio.Value("DLInt1")) if DLInt2SynapseProps: JODLInt2SynESec = synPropsSec.create_section("JODLInt2Exi", "DoubleExpSyn") JODLInt2SynEDict = getattr(synapsePropsList, DLInt2SynapseProps) for propName, propVal in JODLInt2SynEDict.items(): addBrianQuantity2Section(JODLInt2SynESec, propName, propVal) JODLInt2SynESec.create_property("PreSynaptic Neuron", nixio.Value("JO")) JODLInt2SynESec.create_property("PostSynaptic Neuron", nixio.Value("DLInt2")) if DLInt1DLInt2SynProps: DLInt1DLInt2SynSec = synPropsSec.create_section("DLInt1DLInt2Inh", "DoubleExpSyn") DLInt1DLInt2SynDict = getattr(synapsePropsList, DLInt1DLInt2SynProps) for propName, propVal in DLInt1DLInt2SynDict.items(): addBrianQuantity2Section(DLInt1DLInt2SynSec, propName, propVal) DLInt1DLInt2SynSec.create_property("PreSynaptic Neuron", nixio.Value("DLInt1")) DLInt1DLInt2SynSec.create_property("PostSynaptic Neuron", nixio.Value("DLInt2")) blk = nixFile.create_block("Simulation Traces", "Brian Output") DLInt1DA = addAnalogSignal2Block(blk, dlint1MemVAS) DLInt2DA = addAnalogSignal2Block(blk, dlint2MemVAS) inputDA = addAnalogSignal2Block(blk, inputAS) addMultiTag("DLInt1 Spikes", type="Spikes", positions=dlint1SpikesQU, blk=blk, refs=[DLInt1DA]) addMultiTag("DLInt2 Spikes", type="Spikes", positions=dlint2SpikesQU, blk=blk, refs=[DLInt2DA]) addMultiTag("JO Spikes", type="Spikes", positions=joSpikesQU, blk=blk, refs=[inputDA]) nixFile.close()