Exemple #1
0
def test_dict_deep_update():

    a = dict(a=1, b='hello', c=dict(a=2), d=[1, 2, 3])

    b = dict(a=3, b='goodbye', c=dict(b=1), d=[4, 5, 6])

    result = dict_deep_update(a, b)

    correct_result = {
        'a': 3,
        'b': 'goodbye',
        'c': {
            'a': 2,
            'b': 1
        },
        'd': [1, 2, 3, 4, 5, 6]
    }

    compare_dicts(result, correct_result)

    result2 = dict_deep_update(a, b, append_list=False)

    correct_result2 = {
        'a': 3,
        'b': 'goodbye',
        'c': {
            'a': 2,
            'b': 1
        },
        'd': [4, 5, 6]
    }

    compare_dicts(result2, correct_result2)
Exemple #2
0
def main(output_file, dir_oephys, file_expo, file_ttl, file_metadata):
    # Source data
    source_data = dict(
        OpenEphysRecordingExtractorInterface=dict(folder_path=str(dir_oephys)),
        ExpoDataInterface=dict(expo_file=str(file_expo),
                               ttl_file=str(file_ttl)))

    # Initialize converter
    converter = MovshonOpenEphysNWBConverter(source_data=source_data)

    # Get metadata from source data and modify any values you want
    metadata = converter.get_metadata()
    if file_metadata:
        m = load_metadata_from_file(file_metadata)
        metadata = dict_deep_update(metadata, m)
        converter.validate_metadata(metadata)

    # Get conversion options and modify any values you want
    conversion_options = converter.get_conversion_options()

    # Run conversion
    converter.run_conversion(metadata=metadata,
                             nwbfile_path=output_file,
                             save_to_file=True,
                             overwrite=True,
                             conversion_options=conversion_options)
Exemple #3
0
def main(output_file, file_recording_raw, file_recording_processed, file_sorting, file_metadata):
    # Source data
    source_data = dict(
        BlackrockRaw=dict(filename=file_recording_raw),
        BlackrockProcessed=dict(filename=file_recording_processed),
        BlackrockSorting=dict(
            filename=file_sorting,
            nsx_to_load=6
        )
    )

    # Initialize converter
    converter = MovshonBlackrockNWBConverter(source_data=source_data)

    # Get metadata from source data and modify any values you want
    metadata = converter.get_metadata()
    if file_metadata:
        m = load_metadata_from_file(file_metadata)
        metadata = dict_deep_update(metadata, m)
        converter.validate_metadata(metadata)

    # Get conversion options and modify any values you want
    conversion_options = converter.get_conversion_options()

    # Run conversion
    converter.run_conversion(
        metadata=metadata, 
        nwbfile_path=output_file, 
        save_to_file=True,
        overwrite=True,
        conversion_options=conversion_options
    )
Exemple #4
0
def main(output_file, file_raw, file_lfp, file_expo, file_ttl, file_metadata):
    # Source data
    source_data = dict(SpikeGLXRaw=dict(file_path=str(file_raw)),
                       SpikeGLXLFP=dict(file_path=str(file_lfp)),
                       ExpoDataInterface=dict(expo_file=str(file_expo),
                                              ttl_file=str(file_ttl)))

    # Initialize converter
    converter = MovshonSpikeglxNWBConverter(source_data=source_data)

    # Get metadata from source data and modify any values you want
    metadata = converter.get_metadata()
    if file_metadata:
        m = load_metadata_from_file(file_metadata)
        metadata = dict_deep_update(metadata, m)
        converter.validate_metadata(metadata)

    # Get conversion options and modify any values you want
    conversion_options = converter.get_conversion_options()

    # Run conversion
    converter.run_conversion(metadata=metadata,
                             nwbfile_path=output_file,
                             save_to_file=True,
                             overwrite=True,
                             conversion_options=conversion_options)
Exemple #5
0
def convert_session(session_path: FolderPathType, nwbfile_path: FilePathType):
    """Wrap converter for Parallel use."""
    print(f"Processsing {session_path}...")
    session_name = session_path.stem
    subject_name = session_path.parent.name
    dat_file_path = session_path / f"{session_name}.dat"
    eeg_file_path = session_path / f"{session_name}.eeg"

    source_data = dict(
        NeuroscopeRecording=dict(file_path=str(dat_file_path),
                                 gain=conversion_factor),
        NeuroscopeLFP=dict(file_path=str(eeg_file_path),
                           gain=conversion_factor),
        YutaVCBehavior=dict(folder_path=str(session_path)),
        PhySorting=dict(folder_path=str(session_path),
                        exclude_cluster_groups=["noise", "mua"]),
    )
    converter = YutaVCNWBConverter(source_data=source_data)
    conversion_options = dict(
        NeuroscopeRecording=dict(stub_test=stub_test,
                                 es_key="ElectricalSeries_raw"),
        NeuroscopeLFP=dict(stub_test=stub_test),
        PhySorting=dict(stub_test=stub_test),
    )
    metadata = converter.get_metadata()
    metadata["Subject"].update(genotype=subject_genotypes[subject_name])
    metadata_from_yaml = load_metadata_from_file(metadata_path)
    metadata = dict_deep_update(metadata, metadata_from_yaml)
    converter.run_conversion(nwbfile_path=str(nwbfile_path),
                             conversion_options=conversion_options,
                             metadata=metadata,
                             overwrite=True)
    sys.stdout.flush()  # Needed for verbosity in Parallel
Exemple #6
0
        def trigger_conversion(trigger, pause, output_nwbfile, alert_is_open):
            ctx = dash.callback_context
            trigger_source = ctx.triggered[0]['prop_id'].split('.')[0]

            if trigger_source == 'metadata-output-update-finished-verification' and self.convert_controller:
                # run conversion
                alerts, metadata_form_data = self.metadata_forms.data_to_nested(
                )

                _, conversion_options_data = self.conversion_options_forms.data_to_nested(
                )  # use this metadata to conversion

                metadata = dict_deep_update(self.start_metadata_data,
                                            metadata_form_data)

                if alerts is not None:
                    return 0, True, alerts

                nwbfile_path = output_nwbfile

                self.msg_buffer.truncate(0)

                self.t = threading.Thread(target=self.conversion,
                                          daemon=True,
                                          args=(metadata, nwbfile_path,
                                                conversion_options_data))
                self.t.start()

                self.conversion_msg_controller = True
                return -1, False, []  # run loop

            elif trigger_source == 'pause_loop' and pause is not None:
                # Pause interval component that reads conversion messages and terminate conversion thread
                if self.t.is_alive():
                    self.t.terminate()
                return 0, False, []

            return dash.no_update
Exemple #7
0
        def export_metadata(trigger, fileoption_is_open, req_is_open):
            """
            Export Metadata Form data to JSON and YAML file
            This function is triggered when metadata internal dict is updated
            and export controller is setted to true.
            If export controller is not setted to true but the metadata internal dict was updated
            the function will return the current application state
            """
            # Prevent default
            if not self.export_controller or not trigger:
                return fileoption_is_open, req_is_open, []

            if self.export_controller and fileoption_is_open:
                self.export_controller = False
                return not fileoption_is_open, req_is_open, []

            alerts, output = self.metadata_forms.data_to_nested()

            # If required fields missing return alert
            if alerts is not None:
                return fileoption_is_open, not req_is_open, alerts

            updated_data = dict_deep_update(self.start_metadata_data, output)

            # Make temporary files on server side
            # JSON
            exported_file_path = self.downloads_path / 'exported_metadata.json'
            with open(exported_file_path, 'w') as outfile:
                json.dump(updated_data, outfile, indent=4)

            # YAML
            exported_file_path = self.downloads_path / 'exported_metadata.yaml'
            with open(exported_file_path, 'w') as outfile:
                yaml.dump(updated_data, outfile, default_flow_style=False)

            return not fileoption_is_open, req_is_open, []
Exemple #8
0
def convert_session(session_path, nwbfile_path):
    print("----------------")
    print(session_path)
    print(nwbfile_path)

    session_id = session_path.name
    lfp_file_path = session_path / f"{session_path.name}.lfp"
    raw_file_path = session_path / f"{session_id}.dat"
    xml_file_path = session_path / f"{session_id}.xml"
    spikes_matfile_path = session_path / f"{session_id}.spikes.cellinfo.mat"
    behavior_matfile_path = session_path / f"{session_id}.behavior.mat"

    print("raw file available", raw_file_path.is_file())
    print("lfp file available", lfp_file_path.is_file())
    print("behavior / position mat file available",
          behavior_matfile_path.is_file())
    source_data = dict()
    conversion_options = dict()

    source_data = dict(NeuroscopeLFP=dict(file_path=str(lfp_file_path),
                                          gain=conversion_factor,
                                          xml_file_path=str(xml_file_path)), )
    conversion_options.update(NeuroscopeLFP=dict(stub_test=stub_test))
    # conversion_options.update(NeuroscopeLFP=dict(stub_test=stub_test, es_key="lfp"))

    if raw_file_path.is_file():
        source_data.update(
            NeuroscopeRecording=dict(file_path=str(raw_file_path),
                                     gain=conversion_factor,
                                     xml_file_path=str(xml_file_path)))
        conversion_options.update(NeuroscopeRecording=dict(
            stub_test=stub_test, es_key="ElectricalSeries_raw"))

    clu_matches_in_session = len(list(session_path.glob("*.clu*")))
    res_matches_in_session = len(list(session_path.glob("*.res*")))

    if spikes_matfile_path.is_file():
        print("cell explorer spiking data is used")
        source_data.update(CellExplorerSorting=dict(
            file_path=str(spikes_matfile_path)))
    else:
        if clu_matches_in_session > 0 and res_matches_in_session > 0:
            print("neuroscope spiking data is used")
            source_data.update(
                NeuroscopeSorting=dict(folder_path=str(session_path),
                                       keep_mua_units=False,
                                       xml_file_path=str(xml_file_path)))
            conversion_options.update(NeuroscopeSorting=dict(
                stub_test=stub_test))
        else:
            print("not spiking data available")

    if behavior_matfile_path.is_file():
        source_data.update(TingleySeptalBehavior=dict(
            folder_path=str(session_path)))

    converter = TingleySeptalNWBConverter(source_data)

    metadata = None
    metadata = converter.get_metadata()
    metadata_from_yaml = load_dict_from_file(metadata_path)
    metadata = dict_deep_update(metadata, metadata_from_yaml)

    converter.run_conversion(
        nwbfile_path=str(nwbfile_path),
        metadata=metadata,
        conversion_options=conversion_options,
        overwrite=True,
    )
    print("Done with conversion")
    sys.stdout.flush()  # Needed for verbosity in Parallel