Ejemplo n.º 1
0
    def initiate_next_module(next_module, npx_directory, json_directory):
        logger_dict[npx_directory].info('initiating {} for {}'.format(
            next_module, npx_directory))
        now = datetime.datetime.now()
        this_module_info = module_info(None, None, None, now, None, None)
        info_dict[npx_directory][next_module] = this_module_info
        failed = 0
        try:
            session_id = os.path.basename(npx_directory)
            input_json = os.path.join(
                json_directory, session_id + '_' + next_module + '-input.json')
            output_json = os.path.join(
                json_directory,
                session_id + '_' + next_module + '-output.json')
            dir_name, file_name = os.path.split(npx_directory)
            file_name = file_name + '_sorted'

            extract_from = npx_directories[npx_directory].backup1
            extract_to = os.path.join(dir_name, file_name)
            info = createInputJson(input_json,
                                   npx_directory=extract_from,
                                   extracted_data_directory=extract_to)
            command_string = [
                "python", "-m", "ecephys_spike_sorting.modules." + next_module,
                "--input_json", input_json, "--output_json", output_json
            ]
            logger_dict[npx_directory].info(command_string)
            start_module(npx_directory, next_module, command_string, info)
        except Exception as E:
            logger_dict[npx_directory].error("Error initiating " +
                                             next_module + " for " +
                                             npx_directory)
            logger_dict[npx_directory].exception(E)
            failed = E
        return next_module, this_module_info, failed
Ejemplo n.º 2
0
        continuous_file = os.path.join(input_data_directory, fileName)
        metaName = runName + '_g' + spec[2] + '_t' + repr(
            first_trig) + '.imec.ap.meta'
        input_meta_fullpath = os.path.join(input_data_directory, metaName)

        print(input_meta_fullpath)

        info = createInputJson(catGT_input_json[i],
                               npx_directory=input_data_directory,
                               continuous_file=continuous_file,
                               kilosort_output_directory=catGT_dest,
                               spikeGLX_data=True,
                               input_meta_path=input_meta_fullpath,
                               catGT_run_name=spec[0],
                               gate_string=spec[2],
                               trigger_string=trigger_str,
                               probe_string='0',
                               catGT_stream_string='-ap',
                               catGT_car_mode=car_mode,
                               catGT_loccar_min_um=loccar_min,
                               catGT_loccar_max_um=loccar_max,
                               catGT_cmd_string=catGT_cmd_string + ' ' +
                               extract_string,
                               extracted_data_directory=catGT_sub)

        #create json files for the other modules
        session_id.append(spec[0] + '_imec' + prb)

        module_input_json.append(
            os.path.join(json_directory, session_id[i] + '-input.json'))
Ejemplo n.º 3
0
    prb_list = SpikeGLX_utils.ParseProbeStr(spec[3])

    # build path to the first probe folder
    run_folder_name = spec[0] + '_g' + spec[1]
    prb0_fld_name = run_folder_name + '_imec' + prb_list[0]
    prb0_fld = os.path.join(npx_directory, run_folder_name, prb0_fld_name)
    first_trig, last_trig = SpikeGLX_utils.ParseTrigStr(spec[2], prb0_fld)
    trigger_str = repr(first_trig) + ',' + repr(last_trig)

    print('Creating json file for preprocessing')
    info = createInputJson(input_json,
                           npx_directory=npx_directory,
                           continuous_file=None,
                           spikeGLX_data='True',
                           kilosort_output_directory=catGT_dest,
                           catGT_run_name=session_id,
                           gate_string=spec[1],
                           trigger_string=trigger_str,
                           probe_string=spec[3],
                           catGT_stream_string=catGT_stream_string,
                           catGT_cmd_string=catGT_cmd_string,
                           extracted_data_directory=catGT_dest)

    # CatGT operates on whole runs with multiple probes, so gets called in just
    # once per run_spec
    if run_CatGT:
        command = "python -W ignore -m ecephys_spike_sorting.modules." + 'catGT_helper' + " --input_json " + input_json \
            + " --output_json " + output_json
        subprocess.check_call(command.split(' '))

        # parse the CatGT log and write results to command line
        logPath = os.getcwd()
    # get region specific parameters
    ks_Th = ksTh_dict.get(spec[1][0])
    refPerMS = refPerMS_dict.get(spec[1][0])

    info = createInputJson(module_input_json[i],
                           npx_directory=npx_directory,
                           continuous_file=continuous_file,
                           spikeGLX_data=True,
                           input_meta_path=input_meta_fullpath,
                           kilosort_output_directory=kilosort_output_dir,
                           ks_make_copy=ks_make_copy,
                           noise_template_use_rf=False,
                           catGT_run_name=session_id[i],
                           ks_remDup=ks_remDup,
                           ks_finalSplits=1,
                           ks_labelGood=1,
                           ks_saveRez=ks_saveRez,
                           ks_copy_fproc=ks_copy_fproc,
                           ks_minfr_goodchannels=ks_minfr_goodchannels,
                           ks_whiteningRadius_um=ks_whiteningRadius_um,
                           ks_Th=ks_Th,
                           ks_CSBseed=1,
                           ks_LTseed=1,
                           ks_templateRadius_um=ks_templateRadius_um,
                           extracted_data_directory=npx_directory,
                           c_Waves_snr_um=c_Waves_snr_um,
                           qm_isi_thresh=refPerMS / 1000)

    # copy json file to data directory as record of the input parameters

# loop over files again for processing.
    input_json = os.path.join(json_directory, session_id + '-input.json')

    # kilosort_postprocessing and noise_templates moduules alter the files
    # that are input to phy. If using these modules, keep a copy of the
    # original phy output
    if ('kilosort_postprocessing' in modules) or ('noise_templates'
                                                  in modules):
        ks_make_copy = True
    else:
        ks_make_copy = False

    print('Creating json file for KS2 and postprocessing')
    print('npx_directory:', npx_directory)
    print('continuous file: ', os.path.join(npx_directory, name))
    info = createInputJson(input_json,
                           npx_directory=npx_directory,
                           continuous_file=os.path.join(npx_directory, name),
                           spikeGLX_data='True',
                           kilosort_output_directory=kilosort_output_dir,
                           ks_make_copy=ks_make_copy,
                           extracted_data_directory=npx_directory,
                           noise_template_use_rf=False,
                           CSBseed=run_seed)

    for module in modules:
        output_json = os.path.join(json_directory,
                                   session_id + '-' + module + '-output.json')
        command = "python -W ignore -m ecephys_spike_sorting.modules." + module + " --input_json " + input_json \
            + " --output_json " + output_json
        subprocess.check_call(command.split(' '))
#'/mnt/md0/data/mouse412804/766640955_412804_20181022_probeC_sorted/continuous/Neuropix-3a-100.0']
#npx_directories = [r'L:\766640955_412804_20181022_probeC']

probe_type = '3A'

json_directory = r'C:\Users\svc_neuropix\Documents\json_files'
#json_directory = r'C:\Users\svc_neuropix\Documents\json_files'

for directory in sorted_directories:

    session_id = os.path.basename(directory)

    input_json = os.path.join(json_directory, session_id + '-input.json')
    output_json = os.path.join(json_directory, session_id + '-output.json')

    info = createInputJson(input_json, npx_directory=directory)

    modules = [  #'extract_from_npx',
        #'depth_estimation'
        #'median_subtraction',
        #'kilosort_helper',
        #'kilosort_postprocessing'
        #'noise_templates',
        'mean_waveforms',
        'quality_metrics'
    ]

    for module in modules:

        command = "python -W ignore -m ecephys_spike_sorting.modules." + module + " --input_json " + input_json \
                  + " --output_json " + output_json
						os.path.join(local_sorting_directory,'probe_info.json'))

		session_id = os.path.basename(local_sorting_directory)

		target_directory = os.path.join(r'\\10.128.50.77',
										'sd5.3',
										'RE-SORT',
										session_id[:-7], 
										session_id, 
										'continuous', 
										'Neuropix-3a-100.0')

		input_json = os.path.join(json_directory, session_id + '_resort-input.json')
		output_json = os.path.join(json_directory, session_id + '_resort-output.json')

		info = createInputJson(input_json, kilosort_output_directory=local_sorting_directory,
											extracted_data_directory=local_sorting_directory)

		modules = [ 'kilosort_helper',
					'kilosort_postprocessing',
					'noise_templates',
					'mean_waveforms',
					'quality_metrics'] 

		for module in modules:

			command = "python -W ignore -m ecephys_spike_sorting.modules." + module + " --input_json " + input_json \
			          + " --output_json " + output_json

			subprocess.check_call(command.split(' '))

		copy_sorted_files(local_sorting_directory, target_directory)
]
#npx_directories = [r'L:\766640955_412804_20181022_probeC']

probe_type = '3A'

json_directory = '/mnt/md0/data/json_files'
#json_directory = r'C:\Users\svc_neuropix\Documents\json_files'

for directory in sorted_directories:

    session_id = os.path.basename(directory)

    input_json = os.path.join(json_directory, session_id + '-input.json')
    output_json = os.path.join(json_directory, session_id + '-output.json')

    info = createInputJson(input_json, kilosort_output_directory=directory)

    modules = [  #'extract_from_npx',
        #'depth_estimation',
        ##'median_subtraction',
        #'kilosort_helper',
        #'kilosort_postprocessing',
        #'noise_templates',
        #'mean_waveforms',
        'quality_metrics'
    ]

    for module in modules:

        command = "python -W ignore -m ecephys_spike_sorting.modules." + module + " --input_json " + input_json \
                  + " --output_json " + output_json
Ejemplo n.º 9
0



    input_json = os.path.join(fi, session_id +'-inputClassifier.json')
    output_json = os.path.join(fi, session_id +'-outputClassifier.json')

    
   
    
    print( 'Creating json file postprocessing')
    info = createInputJson(input_json, npx_directory=npx_directory, 
	                                   continuous_file = npx_file[0],
									   kilosort_output_directory=fi, 
                                       ks_make_copy = False,
                                       noise_template_use_rf = False,
                                       cluster_group_file_name = 'classifier_cluster_heuristic.txt',
                                       extracted_data_directory = npx_directory

                                       )
  

    print(info['noise_waveform_params']['use_preclustered'])
    for module in modules:
        command = "python -W ignore -m ecephys_spike_sorting.modules." + module + " --input_json " + input_json \
		          + " --output_json " + output_json
        subprocess.check_call(command.split(' '))
        
        
        
	
Ejemplo n.º 10
0

probe_type = 'NP1'

json_directory = r'D:\ecephys_fork\json_files'

for directory in sorted_directories:

	session_id = os.path.basename(directory)

	input_json = os.path.join(json_directory, session_id + '-input.json')
	output_json = os.path.join(json_directory, session_id + '-output.json')

	info = createInputJson(input_json, npx_directory=None, 
	                                   continuous_file = None,
									   kilosort_output_directory=directory,
									   kilosort_output_tmp=directory,
									   probe_type=probe_type)

	modules = [
				'kilosort_postprocessing'
				]


	for module in modules:

		command = "python -W ignore -m ecephys_spike_sorting.modules." + module + " --input_json " + input_json \
		          + " --output_json " + output_json

		subprocess.check_call(command.split(' '))
        local_probe_directory = os.path.join(local_data_path, session_id)
        destination_directory = os.path.join(dest_path, os.path.basename(remote_probe_directory), session_id)
        
        if not os.path.exists(local_probe_directory):
            os.mkdir(local_probe_directory)

        continuous_file = glob.glob(os.path.join(remote_probe_directory, 'continuous','Neuropix-*-100.0','*.dat'))[0]

        if os.path.exists(continuous_file):

            input_json = os.path.join(json_directory, session_id + '-input.json')
            output_json = os.path.join(json_directory, session_id + '-output.json')

            info = createInputJson(input_json, 
                continuous_file=continuous_file,
                extracted_data_directory=remote_probe_directory,
                kilosort_output_directory=local_probe_directory)

            modules = [ #'extract_from_npx',
                        #'depth_estimation',
                        ##'median_subtraction',
                        'kilosort_helper',
                        'kilosort_postprocessing',
                        'noise_templates',
                        'mean_waveforms',
                        'quality_metrics']

            for module in modules:

                command = "python -W ignore -m ecephys_spike_sorting.modules." + module + " --input_json " + input_json \
                          + " --output_json " + output_json
Ejemplo n.º 12
0
    session_id = os.path.basename(local_directory)

    extracted_data_directory = local_directory + '_sorted'
    target_directory = os.path.join(r'\\sd5', 'sd5.3', session_id[:-7],
                                    session_id)

    print(remote_directory)
    print(local_directory)
    print(extracted_data_directory)
    print(target_directory)

    input_json = os.path.join(json_directory, session_id + '-input.json')
    output_json = os.path.join(json_directory, session_id + '-output.json')

    info = createInputJson(input_json,
                           npx_directory=local_directory,
                           extracted_data_directory=extracted_data_directory)

    modules = ['extract_from_npx', 'depth_estimation', 'median_subtraction']

    for module in modules:

        command = "python -W ignore -m ecephys_spike_sorting.modules." + module + " --input_json " + input_json \
                  + " --output_json " + output_json

        subprocess.check_call(command.split(' '))

    copy_to_remote(extracted_data_directory, target_directory)

    shutil.rmtree(local_directory)
    shutil.rmtree(extracted_data_directory)
Ejemplo n.º 13
0
        pkl_file = glob.glob(mouse_directory + '/*.stim.pkl')[0]
        session_id = os.path.basename(pkl_file).split('.')[0]

        print(session_id)

        print(mouse_directory)

        for module in modules:

            input_json = os.path.join(
                json_directory, session_id + '-' + module + '-input.json')
            output_json = os.path.join(
                json_directory, session_id + '-' + module + '-output.json')

            info, last_unit_id = createInputJson(mouse_directory,
                                                 probe_data_directory, module,
                                                 input_json, last_unit_id)

            if not os.path.exists(info['output_path']):

                print('Running ' + module)

                command_string = [
                    "python", "-W", "ignore", "-m", module, "--input_json",
                    input_json, "--output_json", output_json
                ]

                subprocess.check_call(command_string)
#except:
#    print('Error processing')