Example #1
0
def metadata_from_file(file_name, options):
    if os.environ['NWB_DATA'] is not None:
        file_path = os.path.join(os.environ['NWB_DATA'], file_name)
    else:
        data_dir = os.path.join(",".join(options.data_path.split("/")[0:-1]))
        file_path = os.path.join(data_dir, file_name)
    description = " "
    if os.path.isfile(file_path):
        description = nwb_utils.load_file(file_path)
    else:
        print "Warning: missing file " + file_name
    return description
Example #2
0
def create_general_top(fname):
    settings = {}    
    # settings["filename"] = fname
    settings["file_name"] = fname
    # settings["identifier"] = nwb.create_identifier("general top test")
    settings["identifier"] = utils.create_identifier("general top test")
    # settings["overwrite"] = True
    settings["mode"] = "w"
    settings["description"] = "test top-level elements in /general"
    # neurodata = nwb.NWB(**settings)
    f = nwb_file.open(**settings)
    
    
    #
#     neurodata.set_metadata(DATA_COLLECTION, "DATA_COLLECTION")
#     neurodata.set_metadata(EXPERIMENT_DESCRIPTION, "EXPERIMENT_DESCRIPTION")
#     neurodata.set_metadata(EXPERIMENTER, "EXPERIMENTER")
#     neurodata.set_metadata(INSTITUTION, "INSTITUTION")
#     neurodata.set_metadata(LAB, "LAB")
#     neurodata.set_metadata(NOTES, "NOTES")
#     neurodata.set_metadata(PROTOCOL, "PROTOCOL")
#     neurodata.set_metadata(PHARMACOLOGY, "PHARMACOLOGY")
#     neurodata.set_metadata(RELATED_PUBLICATIONS, "RELATED_PUBLICATIONS")
#     neurodata.set_metadata(SESSION_ID, "SESSION_ID")
#     neurodata.set_metadata(SLICES, "SLICES")
#     neurodata.set_metadata(STIMULUS, "STIMULUS")
#     neurodata.set_metadata(SURGERY, "SURGERY")
#     neurodata.set_metadata(VIRUS, "VIRUS")
#     #
#     neurodata.set_metadata_from_file("source_script", __file__)
    #
    
    f.set_dataset("data_collection","DATA_COLLECTION")
    f.set_dataset("experiment_description","EXPERIMENT_DESCRIPTION")    
    f.set_dataset("experimenter","EXPERIMENTER")
    f.set_dataset("institution","INSTITUTION")     
    f.set_dataset("lab","LAB")
    f.set_dataset("notes","NOTES")    
    f.set_dataset("protocol","PROTOCOL")
    f.set_dataset("pharmacology","PHARMACOLOGY")
    f.set_dataset("related_publications", "RELATED_PUBLICATIONS")
    f.set_dataset("session_id","SESSION_ID")    
    f.set_dataset("slices","SLICES")
    f.set_dataset("stimulus","STIMULUS")     
    f.set_dataset("surgery","SURGERY")
    f.set_dataset("virus", "VIRUS")
    
    # f.neurodata.set_metadata_from_file("source_script", __file__)
    f.set_dataset("source_script", utils.load_file(__file__))

       
    # neurodata.close()
    f.close()
Example #3
0
def create_general_top(fname):
    settings = {}
    settings["file_name"] = fname
    settings["identifier"] = utils.create_identifier("general top test")
    settings["mode"] = "w"
    settings["description"] = "test top-level elements in /general"
    settings["verbosity"] = "none"
    f = nwb_file.open(**settings)

    #
    #     neurodata.set_metadata(DATA_COLLECTION, "DATA_COLLECTION")
    #     neurodata.set_metadata(EXPERIMENT_DESCRIPTION, "EXPERIMENT_DESCRIPTION")
    #     neurodata.set_metadata(EXPERIMENTER, "EXPERIMENTER")
    #     neurodata.set_metadata(INSTITUTION, "INSTITUTION")
    #     neurodata.set_metadata(LAB, "LAB")
    #     neurodata.set_metadata(NOTES, "NOTES")
    #     neurodata.set_metadata(PROTOCOL, "PROTOCOL")
    #     neurodata.set_metadata(PHARMACOLOGY, "PHARMACOLOGY")
    #     neurodata.set_metadata(RELATED_PUBLICATIONS, "RELATED_PUBLICATIONS")
    #     neurodata.set_metadata(SESSION_ID, "SESSION_ID")
    #     neurodata.set_metadata(SLICES, "SLICES")
    #     neurodata.set_metadata(STIMULUS, "STIMULUS")
    #     neurodata.set_metadata(SURGERY, "SURGERY")
    #     neurodata.set_metadata(VIRUS, "VIRUS")
    #     #
    #     neurodata.set_metadata_from_file("source_script", __file__)
    #

    f.set_dataset("data_collection", "DATA_COLLECTION")
    f.set_dataset("experiment_description", "EXPERIMENT_DESCRIPTION")
    f.set_dataset("experimenter", "EXPERIMENTER")
    f.set_dataset("institution", "INSTITUTION")
    f.set_dataset("lab", "LAB")
    f.set_dataset("notes", "NOTES")
    f.set_dataset("protocol", "PROTOCOL")
    f.set_dataset("pharmacology", "PHARMACOLOGY")
    f.set_dataset("related_publications", "RELATED_PUBLICATIONS")
    f.set_dataset("session_id", "SESSION_ID")
    f.set_dataset("slices", "SLICES")
    f.set_dataset("stimulus", "STIMULUS")
    f.set_dataset("surgery", "SURGERY")
    f.set_dataset("virus", "VIRUS")

    # f.neurodata.set_metadata_from_file("source_script", __file__)
    f.set_dataset("source_script", utils.load_file(__file__))

    # neurodata.close()
    f.close()
interval = if1.make_group("<IntervalSeries>", "intervals")
interval.set_attr("description", "Sample interval series -- two series are overlaid here, one with a code '1' and another with the code '2'")
interval.set_attr("comments", "For example, '1' represents sound on(+1)/off(-1) and '2' represents light on(+2)/off(-2)")
# create 
evts = [ 1, -1, 2, -2, 1, -1, 2, 1, -1, -2, 1, 2, -1, -2 ]
interval.set_dataset("data", evts)

# note: some timestamps will be duplicated if two different events start 
#   and/or stop at the same time
t = [ 1, 2, 2, 3, 5, 6, 6, 7, 8, 8, 10, 10, 11, 15 ]
interval.set_dataset("timestamps", t)

# Add additional information to the BehavioralEpochs interface.  This is defined in
# the extension "extensions/e-BehavioralEpochs.py"
if1.set_dataset("my_extra_info", "extra info added to 'BehavioralEpochs' interface",
    attrs={"eia": "attribute for extra info"})



########################################################################
# it can sometimes be useful to import documenting data from a file
# in this case, we'll store this script in the metadata section of the
#   file, for a record of how the file was created
script_name = sys.argv[0]
f.set_dataset("source_script", ut.load_file(script_name), attrs= {
    "file_name": script_name})

# when all data is entered, close the file
f.close()

Example #5
0
units = ["degrees", "Hz", "radians", "degrees"]
# store them
abs.set_dataset("features", features)
abs.set_dataset("feature_units", units)

# specify the source of the abstract features.  All TimeSeries types should have a
# source, description and comments specified; otherwise a warning is generated.
abs.set_attr(
    "source",
    "Simulated data. Normally this would be the device presenting stimulus")

# create some pretend data
data = np.arange(4000).reshape(1000, 4)

# add data to the time series. for now, ignore the last 3 parameters
t = np.arange(1000) * 0.001
abs.set_dataset("data", data)
abs.set_dataset("timestamps", t)

########################################################################
# it can sometimes be useful to import documenting data from a file
# in this case, we'll store this script in the metadata section of the
#   file, for a record of how the file was created
script_name = sys.argv[0]
f.set_dataset("source_script",
              ut.load_file(script_name),
              attrs={"file_name": script_name})

# when all data is entered, close the file
f.close()
experimenters = parse_h5_obj(meta_h5["experimenters"])[0]
experimenters = extract_encoded_str(experimenters)
nuo.set_dataset("experimenter", experimenters)
ref_atlas = parse_h5_obj(meta_h5["referenceAtlas"])[0]
ref_atlas = extract_encoded_str(ref_atlas)
nuo.set_custom_dataset("reference_atlas", ref_atlas)
sex = parse_h5_obj(meta_h5["sex"])[0]
sex = extract_encoded_str(sex)
print "sex='%s', type=%s" % (sex, type(sex))
s.set_dataset("sex", sex)
s.set_dataset("age", ">P60")
species = parse_h5_obj(meta_h5["species"])[0]
species = extract_encoded_str(species)
s.set_dataset("species", species)
# surgical_man = meta_h5["surgicalManipulation"]["surgicalManipulation"].value
gg.set_dataset("surgery", ut.load_file(SOURCE_DATA_DIR + "surgery.txt"))
gg.set_dataset("data_collection", ut.load_file(SOURCE_DATA_DIR + "data_collection.txt"))
gg.set_dataset("experiment_description", ut.load_file(SOURCE_DATA_DIR + "experiment_description.txt"))
# nuo.set_dataset("surgery", surgical_man)
# weight_after = meta_h5["weightAfter"].value
# weight_before = meta_h5["weightBefore"].value
# weight = "Weight after: " + weight_after +\
# "; weight before: " + weight_before
# metadata file incomplete here. pulled data from pdf file
s.set_dataset("weight", "Before: 20, After: 21")
whisker_config = parse_h5_obj(meta_h5["whiskerConfig"])[0]
whisker_config = extract_encoded_str(whisker_config)
nuo.set_custom_dataset("whisker_configuration", whisker_config)

probe = []
sites = parse_h5_obj(check_entry(meta_h5, "extracellular/siteLocations"))
Example #7
0
settings["file_name"] = OUTPUT_DIR + BASE + ".nwb"
settings["identifier"] = ut.create_identifier("buzsaki test")
settings["mode"] = "w"
settings["start_time"] = DATE
settings[
    "description"] = "extracellular ephys CA1/MEC recordings in live behaving rat"
buz = nwb_file.open(**settings)
########################################################################
# metadata section

buz.set_dataset("session_id", BASE)
buz.set_dataset("experimenter", "Kenji Mizuseki")
buz.set_dataset("institution", "Rutgers University")
buz.set_dataset("lab", "Gyuri Buzsaki")
buz.set_dataset("related_publications",
                ut.load_file(SOURCE_DATA_DIR + "bz_files/publications.txt"))
buz.set_dataset(
    "notes",
    "Rat running on linear track. Electrode impedances between 1 and 3 MOhm")
buz.set_dataset("data_collection",
                ut.load_file(SOURCE_DATA_DIR + "bz_files/data_collection.txt"))

buz.set_dataset("pharmacology", "----")
buz.set_dataset("surgery",
                ut.load_file(SOURCE_DATA_DIR + "bz_files/surgery.txt"))
buz.set_dataset("protocol", "----")
buz.set_dataset("subject_id", "ec013")
sg = buz.make_group("subject", abort=False)
sg.set_dataset("description", "----")
sg.set_dataset("species", "Long Evans rat")
sg.set_dataset("genotype", "wt")
# create nwb file
settings = {}
settings["file_name"] = OUTPUT_DIR + BASE + ".nwb"
settings["identifier"] = ut.create_identifier("buzsaki test")
settings["mode"] = "w"
settings["start_time"] = DATE
settings["description"] = "extracellular ephys CA1/MEC recordings in live behaving rat"
buz =  nwb_file.open(**settings)
########################################################################
# metadata section

buz.set_dataset("session_id", BASE)
buz.set_dataset("experimenter", "Kenji Mizuseki")
buz.set_dataset("institution", "Rutgers University")
buz.set_dataset("lab", "Gyuri Buzsaki")
buz.set_dataset("related_publications", ut.load_file(SOURCE_DATA_DIR + "bz_files/publications.txt"))
buz.set_dataset("notes", "Rat running on linear track. Electrode impedances between 1 and 3 MOhm")
buz.set_dataset("data_collection", ut.load_file(SOURCE_DATA_DIR + "bz_files/data_collection.txt"))

buz.set_dataset("pharmacology", "----")
buz.set_dataset("surgery", ut.load_file(SOURCE_DATA_DIR + "bz_files/surgery.txt"))
buz.set_dataset("protocol", "----")
buz.set_dataset("subject_id", "ec013")
sg = buz.make_group("subject", abort=False)
sg.set_dataset("description", "----")
sg.set_dataset("species", "Long Evans rat")
sg.set_dataset("genotype", "wt")
sg.set_dataset("sex", "male")
sg.set_dataset("age", "----")
sg.set_dataset("weight", "250-400 g")
buz.set_dataset("virus", "n/a")
Example #9
0
experimenters = parse_h5_obj(meta_h5["experimenters"])[0]
experimenters = extract_encoded_str(experimenters)
nuo.set_dataset('experimenter', experimenters)
ref_atlas = parse_h5_obj(meta_h5["referenceAtlas"])[0]
ref_atlas = extract_encoded_str(ref_atlas)
nuo.set_custom_dataset("reference_atlas", ref_atlas)
sex = parse_h5_obj(meta_h5["sex"])[0]
sex = extract_encoded_str(sex)
print "sex='%s', type=%s" % (sex, type(sex))
s.set_dataset("sex", sex)
s.set_dataset("age", ">P60")
species = parse_h5_obj(meta_h5["species"])[0]
species = extract_encoded_str(species)
s.set_dataset("species", species)
#surgical_man = meta_h5["surgicalManipulation"]["surgicalManipulation"].value
gg.set_dataset("surgery", ut.load_file(SOURCE_DATA_DIR + "surgery.txt"))
gg.set_dataset("data_collection", ut.load_file(SOURCE_DATA_DIR + "data_collection.txt"))
gg.set_dataset("experiment_description", ut.load_file(SOURCE_DATA_DIR + "experiment_description.txt"))
#nuo.set_dataset("surgery", surgical_man)
#weight_after = meta_h5["weightAfter"].value
#weight_before = meta_h5["weightBefore"].value
#weight = "Weight after: " + weight_after +\
# "; weight before: " + weight_before
# metadata file incomplete here. pulled data from pdf file
s.set_dataset("weight", "Before: 20, After: 21")
whisker_config = parse_h5_obj(meta_h5["whiskerConfig"])[0]
whisker_config = extract_encoded_str(whisker_config)
nuo.set_custom_dataset("whisker_configuration", whisker_config)

probe = []
sites = parse_h5_obj(check_entry(meta_h5, "extracellular/siteLocations"))
Example #10
0
    # an_gene_mod1 = orig_h5["metaDataHash/value/5/5"].value[0,0]
    # an_gene_mod2 = orig_h5["metaDataHash/value/8/8"].value[0,0]
    an_gene_mod  = "animalGeneModification1: " + an_gene_mod1 + "; animalGeneModification2: " + an_gene_mod2
    sg.set_dataset("genotype", an_gene_mod)
    an_id = meta[8]
    # an_id = orig_h5["metaDataHash/value/9/9"].value[0,0]
    sg.set_dataset("subject_id", an_id)
    sex = meta[9]
    # sex = orig_h5["metaDataHash/value/10/10"].value[0,0]
    sg.set_dataset("sex", sex)
    experimenters = meta[12]
    # experimenters = orig_h5["metaDataHash/value/13/13"].value[0,0]
    gg.set_dataset("experimenter", experimenters)
    gg.set_dataset("lab", "Svoboda lab")
    gg.set_dataset("institution", "Janelia Farm")
    gg.set_dataset("experiment_description", ut.load_file(path + "svoboda_files/experiment_description.txt"))
    gg.set_dataset("surgery", ut.load_file(path + "svoboda_files/surgery.txt"))
    gg.set_dataset("data_collection", ut.load_file(path + "svoboda_files/data_collection.txt"))


    print "Reading time series"
    read_whisker(orig_h5, simon)
    read_licks(orig_h5, simon)
    
    pole_pos_path = "trialPropertiesHash/value/3/3"
    pole_pos = parse_h5_obj(orig_h5[pole_pos_path])[0]
    trial_t = orig_h5["trialStartTimes/trialStartTimes"].value * 0.001
    rate = (trial_t[-1] - trial_t[0])/(len(trial_t)-1)
    comments = parse_h5_obj(orig_h5["trialPropertiesHash/keyNames"])[0][2]
    descr = parse_h5_obj(orig_h5["trialPropertiesHash/descr"])[0][2]
    zts = simon.make_group("<TimeSeries>", "zaber_motor_pos", path="/stimulus/presentation")
Example #11
0
# interval data is stored in an interval time series -- IntervalSeries
# create it
interval = if1.make_group("<IntervalSeries>", "intervals")
interval.set_attr("description", "Sample interval series -- two series are overlaid here, one with a code '1' and another with the code '2'")
interval.set_attr("comments", "For example, '1' represents sound on(+1)/off(-1) and '2' represents light on(+2)/off(-2)")
# create 
evts = [ 1, -1, 2, -2, 1, -1, 2, 1, -1, -2, 1, 2, -1, -2 ]
interval.set_dataset("data", evts)

# note: some timestamps will be duplicated if two different events start 
#   and/or stop at the same time
t = [ 1, 2, 2, 3, 5, 6, 6, 7, 8, 8, 10, 10, 11, 15 ]
interval.set_dataset("timestamps", t)

# multiple interfaces can be added to a module, and multiple time series
#   can be added to an interface using the same approach. this example
#   only imports one


########################################################################
# it can sometimes be useful to import documenting data from a file
# in this case, we'll store this script in the metadata section of the
#   file, for a record of how the file was created
script_name = sys.argv[0]
f.set_dataset("source_script", ut.load_file(script_name), attrs= {
    "file_name": script_name})

# when all data is entered, close the file
f.close()