def main(outputfilename, outputmetafilename, tempdir):
    tempfiles = [f for f in os.listdir(tempdir) if not f.startswith('.')]
    numwins = len(tempfiles)

    if not os.path.exists(outputmetafilename):
        temp0file = os.path.join(tempdir, "temp0.npz")
        os.remove(temp0file)
        # remove success file
        outfile = os.path.basename(outputfilename)
        success_flag_name = outfile.replace('fragmodel.npz',
                                            'frag_success.txt')
        success_flag_name = os.path.join(walk_up_folder(tempdir, 2),
                                         success_flag_name)
        os.remove(success_flag_name)
        sys.stdout.write("File Removed {}!".format(success_flag_name))
        raise RuntimeError(
            "Number of windows is not the same as the number of"
            "files in the temp dir. Should be {}".format(numwins))

    # run merge
    merger = RunMergeModel(tempdir, numwins)
    merger.loadmetafile(outputmetafilename)
    metadata = merger.metadata
    if not metadata['numwins'] == numwins:
        # remove success file
        outfile = os.path.basename(outputfilename)
        success_flag_name = outfile.replace('fragmodel.npz',
                                            'frag_success.txt')
        success_flag_name = os.path.join(walk_up_folder(tempdir, 2),
                                         success_flag_name)
        os.remove(success_flag_name)
        sys.stdout.write("File Removed {}!".format(success_flag_name))

        raise RuntimeError(
            "Number of windows is not the same as the number of"
            "files in the temp dir. Should be {} vs #files: {}".format(
                metadata['numwins'], numwins))

    merger.loadmetadata(metadata)
    merger.mergefragilitydata(outputfilename=outputfilename)
    merger.writemetadata_tofile(outputmetafilename=outputmetafilename)
예제 #2
0
    outputjson_filepath = args.outputjson_filepath
    modality = args.datatype
    excelfilepath = args.exceldatafilepath

    # time the program
    start = time.time()

    outputjson_filepath = outputjson_filepath.replace('_raw.fif', '.json')

    print("saving to ", outputjson_filepath)
    ''' EXTRACT USEFUL INFORMATION FROM THE OUTPUTJSON FILEPATH '''
    # extract the clinical center from the path name
    clinical_center = os.path.basename(os.path.normpath(rawdatadir))

    # extract root directory
    pat_id = os.path.basename(walk_up_folder(outputjson_filepath, 4))
    dataset_id = ''.join(
        os.path.basename(outputjson_filepath.replace(
            '_raw', '')).split('.')[0].split('_')[1:])

    # load in metadata and clinical df object
    metadata = loadmetadata(outputjson_filepath)
    clindf = loadclinicaldf(excelfilepath)

    if 'la' in pat_id:
        dataset_id = dataset_id.replace("scalp", '').replace("sz", '')

    print("looking at: ", pat_id, dataset_id)

    # merge in data based on patient id and dataset id
    metadata = merge_metadata(metadata,
예제 #3
0
def pair_name_edffiles(config, ignore_interictal=False, ignore_ictal=False):
    # hack inserted to make sure all modalities are for ieeg
    modality = config['modality']
    # if modality == 'ieeg':
    #     modality = 'seeg'

    edf_path = Path(os.path.join(config['rawdatadir'], config['center'])).glob(
        "*/{}/edf/*.edf".format(modality))

    if not edf_path:
        modality = 'ieeg'
        edf_path = Path(os.path.join(config['rawdatadir'],
                                     config['center'])).glob(
                                         "*/{}/edf/*.edf".format(modality))
    # pair edf name to infile path using a dictionary
    edf_infiles_dict = {}
    for f in edf_path:
        # get the actual dataset name
        edf_name = f.name

        if edf_name.startswith('.'):
            continue

        # seeg datasets to ignore in this rule
        if modality == 'ieeg' or modality == 'seeg':
            if edf_name.startswith('.'):
                continue

        # create the new filepath we want the output of this pipeline to be
        fifdir = os.path.dirname(str(f)).replace('edf', 'fif')
        edfdir = fifdir.replace('fif', 'edf')

        edf_filepath = rename_file(os.path.join(edfdir, f.name))
        edf_name = os.path.basename(edf_filepath)

        # use preformatter filename mapping - TRIMMING DATASET FILENAME
        fif_name = BaseMeta.map_filename(edf_name)

        # if not any(x  in fif_name for x in [
        # 'nl01'
        #     # 'la21',
        #     # 'la22',
        #     # 'la23',
        #     # 'la24'
        #     # 'tvb'
        # ]):
        #     continue

        # extract root directory
        pat_id = os.path.basename(walk_up_folder(edf_filepath, 4))
        if pat_id not in fif_name:
            fif_name = pat_id + fif_name

        if modality == 'scalp':
            fif_name = fif_name.replace('_scalp', '')
        elif modality == 'seeg':
            fif_name = fif_name.replace("_seeg", "")

        if ignore_interictal:
            if 'ii' in fif_name:
                continue
        if ignore_ictal:
            if 'sz' in fif_name:
                continue

        # set the dictionary
        edf_infiles_dict[str(os.path.join(fifdir,
                                          fif_name))] = str(edf_filepath)

    return edf_infiles_dict
예제 #4
0
if __name__ == '__main__':
    args = parser.parse_args()
    rawdatadir = args.rawdatadir
    inputedf_filepath = args.inputedf_filepath
    outputjson_filepath = args.outputjson_filepath
    datatype = args.datatype

    # time the program
    start = time.time()
    ''' EXTRACT USEFUL INFORMATION FROM THE OUTPUTJSON FILEPATH '''
    # extract the clinical center from the path name
    clinical_center = os.path.basename(os.path.normpath(rawdatadir))

    # extract root directory
    pat_id = os.path.basename(walk_up_folder(outputjson_filepath, 4))
    dataset_id = '_'.join(
        os.path.basename(outputjson_filepath.replace(
            '_raw', '')).split('.')[0].split('_')[1:])
    patdatadir = os.path.join(rawdatadir, pat_id)

    # extract the actual json filename
    outputjson_filename = os.path.basename(outputjson_filepath)

    # set the directories according to how we decided
    seegdir = os.path.join(walk_up_folder(outputjson_filepath, 3), 'edf')
    fifdir = seegdir.replace('edf', 'fif')
    if not os.path.exists(fifdir):
        os.makedirs(fifdir)

    print("seegdir ", seegdir)
예제 #5
0
    def mergefragilitydata(self, outputfilename):
        """
        Function to merge fragility computed data (i.e. LTVN and Perturbation model)
        Performs a loop over files in the self.tempdir

        :param outputfilename: (os.PathLike) output filepath to save the resulting data
        as .npz file type.
        :return:
        """
        saveflag = True

        # self.logger.info("Merging windowed analysis compute together of "
        #                  "{} windows!".format(len(self.alltempfiles)))

        # save adjacency matrix
        for idx, tempfile in enumerate(self.alltempfiles):
            # get the window numbr of this file just to check
            buff = tempfile.split('_')
            winnum = int(buff[-1].split('.')[0])
            if winnum != idx:
                raise ValueError("Win num {} should match idx {}".format(
                    winnum, idx))

            tempfilename = os.path.join(self.tempdir, tempfile)
            # load result data
            try:
                data = super(RunMergeModel, self)._loadnpzfile(tempfilename)
            except:
                saveflag = False

                # remove that tempfilename
                os.remove(tempfilename)
                print("File Removed {}!".format(tempfilename))

                # remove success file
                outfile = os.path.basename(outputfilename)
                success_flag_name = os.path.join(
                    walk_up_folder(self.tempdir, 2),
                    outfile.replace('fragmodel.npz', 'frag_success.txt'))

                try:
                    os.remove(success_flag_name)
                    print("File Removed {}!".format(success_flag_name))
                except Exception as e:
                    print(e)
                    print("Success flag file already removed!")
                    # self.logger.error("Success flag file already removed at "
                    #                   "{} window!".format(idx))

                continue

            try:
                pertmat = data['pertmat']
                # delfreqs = pertmat_data['delfreqs']
                delvecs = data['delvecs']
                adjmat = data['adjmat']
            except:
                saveflag = False

                # remove that tempfilename
                os.remove(tempfilename)
                print("File Removed {}!".format(tempfilename))

                # remove success file
                outfile = os.path.basename(outputfilename)
                success_flag_name = os.path.join(
                    walk_up_folder(self.tempdir, 2),
                    outfile.replace('fragmodel.npz', 'frag_success.txt'))

                try:
                    os.remove(success_flag_name)
                    print("File Removed {}!".format(success_flag_name))
                except Exception as e:
                    print(e)
                    print("Success flag file already removed!")
                    # self.logger.error("Success flag file already removed at "
                    #                   "{} window!".format(idx))

                continue

            if idx == 0:
                numchans, _ = pertmat.shape
                # initializ adjacency matrices over time
                pertmats = np.zeros((numchans, len(self.alltempfiles)))
                delvecs_array = np.zeros(
                    (numchans, numchans, len(self.alltempfiles)),
                    dtype='complex')
                adjmats = np.zeros(
                    (len(self.alltempfiles), numchans, numchans))

                # delfreqs_array = np.zeros((numchans, len(alltempfiles)))
            pertmats[:, idx] = pertmat.ravel()
            delvecs_array[:, :, idx] = delvecs
            adjmats[idx, :, :] = adjmat
            # delfreqs_array[:,idx] = delfreqs

        if saveflag:
            # save adjmats, pertmats and delvecs array along with metadata
            super(RunMergeModel, self)._writenpzfile(outputfilename,
                                                     adjmats=adjmats,
                                                     pertmats=pertmats,
                                                     delvecs=delvecs_array)
def get_patientdir(rawdatasetpath):
    patdir = walk_up_folder(rawdatasetpath, 4)
    return patdir
    reference = args.reference
    modality = args.modality

    gnuind = args.gnuind
    gnusize = args.gnusize

    print("\n\nArguments are: {}".format(args), flush=True)
    print(root_dir, modality, flush=True)
    print(jsonfilepath, flush=True)

    for dirpath, dirnames, filenames in os.walk(root_dir):
        for filename in [f for f in filenames if f == jsonfilepath]:
            jsonfilepath = os.path.join(dirpath, filename)

    # reassign root dir
    root_dir = walk_up_folder(jsonfilepath, 4)
    modality = os.path.basename(walk_up_folder(jsonfilepath, 3))

    root_dir = os.path.join(root_dir, modality, "fif")
    print("\nInside run frag...\n", flush=True)
    print(root_dir, modality, flush=True)
    print(jsonfilepath, flush=True)

    # load in the data
    eegts = load_raw_data(root_dir,
                          jsonfilepath,
                          reference,
                          apply_mask=True,
                          remove_wm_contacts=True,
                          modality=modality)