示例#1
0
def test_readDicom():
    dicomDir = os.path.join(os.path.dirname(__file__), 'test_input')
    dicomFile = os.path.join(dicomDir, test_dicomFile)
    dicomImg1 = rd.readDicomFromFile(dicomFile)
    vol1 = rd.parseDicomVolume(dicomImg1, 64)
    assert vol1 is not None

    with open(dicomFile, 'rb') as fp:
        data = fp.read()
    dicomImg2 = rd.readDicomFromBuffer(data)
    vol2 = rd.parseDicomVolume(dicomImg2, 64)
    assert vol2 is not None
    assert (vol1 == vol2).all()

    fileInterface = FileInterface()
    fileInterface.initWatch(dicomDir, '*.dcm', 0)
    dicomImg3 = rd.readRetryDicomFromFileInterface(fileInterface, dicomFile)
    vol3 = rd.parseDicomVolume(dicomImg3, 64)
    assert vol3 is not None
    assert (vol1 == vol3).all()

    # read in a truncated file, should fail and return None.
    trucatedDicomFile = os.path.join(dicomDir, test_dicomTruncFile)
    dicomImg4 = rd.readRetryDicomFromFileInterface(fileInterface,
                                                   trucatedDicomFile)
    assert dicomImg4 is None

    # Test convert to nifti
    niftiObject = dicomreaders.mosaic_to_nii(dicomImg3)
    assert niftiObject is not None

    fileInterface.fileWatcher.__del__()
    fileInterface.fileWatcher = None
示例#2
0
def main(argv=None):
    """
    This is the main function that is called when you run 'intialize.py'.
    
    Here, you will load the configuration settings specified in the toml configuration 
    file, initiate the class fileInterface, and set up some directories and other 
    important things through 'initialize()'
    """

    # define the parameters that will be recognized later on to set up fileIterface
    argParser = argparse.ArgumentParser()
    argParser.add_argument('--config', '-c', default=defaultConfig, type=str,
                           help='experiment config file (.json or .toml)')
    # This parameter is used for projectInterface
    argParser.add_argument('--commpipe', '-q', default=None, type=str,
                           help='Named pipe to communicate with projectInterface')
    argParser.add_argument('--filesremote', '-x', default=False, action='store_true',
                           help='retrieve files from the remote server')
    argParser.add_argument('--addr', '-a', default='localhost', type=str, 
               help='server ip address')
    argParser.add_argument('--runs', '-r', default='', type=str,
                       help='Comma separated list of run numbers')
    argParser.add_argument('--scans', '-s', default='', type=str,
                       help='Comma separated list of scan number')
    args = argParser.parse_args(argv)

    # load the experiment configuration file
    cfg = utils.loadConfigFile(args.config)
    cfg = initialize(cfg, args)

    # build subject folders on server
    if args.filesremote:
        buildSubjectFoldersOnServer(cfg)

        # open up the communication pipe using 'projectInterface'
        projectComm = projUtils.initProjectComm(args.commpipe, args.filesremote)

        # initiate the 'fileInterface' class, which will allow you to read and write 
        #   files and many other things using functions found in 'fileClient.py'
        #   INPUT:
        #       [1] args.filesremote (to retrieve dicom files from the remote server)
        #       [2] projectComm (communication pipe that is set up above)
        fileInterface = FileInterface(filesremote=args.filesremote, commPipes=projectComm)

        # next, transfer transformation files from local --> server for online processing
        projUtils.uploadFolderToCloud(fileInterface,cfg.local.wf_dir,cfg.server.wf_dir)

        # upload ROI folder to cloud server - we would need to do this if we were using
        # a standard mask, but we're not in this case
        #projUtils.uploadFolderToCloud(fileInterface,cfg.local.maskDir,cfg.server.maskDir)

        # upload all transformed masks to the cloud
        projUtils.uploadFilesFromList(fileInterface,cfg.local_MASK_transformed,cfg.subject_reg_dir)
    return 0
示例#3
0
def main(argv=None):
    """
    This is the main function that is called when you run 'finalize.py'.
    
    Here, you will load the configuration settings specified in the toml configuration 
    file, initiate the class fileInterface, and set up some directories and other 
    important things through 'finalize()'
    """

    # define the parameters that will be recognized later on to set up fileIterface
    argParser = argparse.ArgumentParser()
    argParser.add_argument('--config',
                           '-c',
                           default=defaultConfig,
                           type=str,
                           help='experiment config file (.json or .toml)')
    # This parameter is used for projectInterface
    argParser.add_argument(
        '--commpipe',
        '-q',
        default=None,
        type=str,
        help='Named pipe to communicate with projectInterface')
    argParser.add_argument('--filesremote',
                           '-x',
                           default=False,
                           action='store_true',
                           help='retrieve files from the remote server')
    args = argParser.parse_args(argv)

    # load the experiment configuration file
    cfg = utils.loadConfigFile(args.config)

    # open up the communication pipe using 'projectInterface'
    projectComm = projUtils.initProjectComm(args.commpipe, args.filesremote)

    # initiate the 'fileInterface' class, which will allow you to read and write
    #   files and many other things using functions found in 'fileClient.py'
    #   INPUT:
    #       [1] args.filesremote (to retrieve dicom files from the remote server)
    #       [2] projectComm (communication pipe that is set up above)
    fileInterface = FileInterface(filesremote=args.filesremote,
                                  commPipes=projectComm)

    # now that we have the necessary variables, call the function 'finalize' in
    #   order to actually start reading dicoms and doing your analyses of interest!
    #   INPUT:
    #       [1] cfg (configuration file with important variables)
    #       [2] fileInterface (this will allow a script from the cloud to access files
    #               from the stimulus computer)
    #       [3] projectComm (communication pipe to talk with projectInterface)
    finalize(cfg, fileInterface, projectComm)
    return 0
示例#4
0
def main():
    logger = logging.getLogger()
    logger.setLevel(logLevel)
    logging.info('GREEN EYES: first log message!')
    argParser = argparse.ArgumentParser()
    argParser.add_argument('--config', '-c', default=defaultConfig, type=str,
                       help='experiment config file (.json or .toml)')
    argParser.add_argument('--runs', '-r', default='', type=str,
                       help='Comma separated list of run numbers')
    argParser.add_argument('--scans', '-s', default='', type=str,
                       help='Comma separated list of scan number')
    argParser.add_argument('--deleteTmpNifti', '-d', default='1', type=str,
                       help='Set to 0 if rerunning during a single scanning after error')
    # creates pipe communication link to send/request responses through pipe
    argParser.add_argument('--commpipe', '-q', default=None, type=str,
                       help='Named pipe to communicate with projectInterface')
    argParser.add_argument('--filesremote', '-x', default=False, action='store_true',
                       help='dicom files retrieved from remote server')

    args = argParser.parse_args()
    print(args)
    cfg = initializeGreenEyes(args.config,args)

    # DELETE ALL FILES IF FLAGGED TO # 
    if args.deleteTmpNifti == '1':
        deleteTmpFiles(cfg)
    else:
        print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
        print('NOT DELETING NIFTIS IN tmp/convertedNiftis')
        print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
    # DELETE ALL FILES IF FLAGGED TO # 

    # comm pipe
    projComm = projUtils.initProjectComm(args.commpipe,args.filesremote)
    # initialize file interface class -- for now only local
    fileInterface = FileInterface(filesremote=args.filesremote, commPipes=projComm)
    # intialize watching in particular directory
    fileInterface.initWatch(cfg.dicomDir, cfg.dicomNamePattern, cfg.minExpectedDicomSize) 
    story_TRs = cfg.story_TR_2 - cfg.story_TR_1 + 1
    #### MAIN PROCESSING ###
    nRuns = len(cfg.runNum)
    for runIndex in np.arange(nRuns):
        runData = StructDict()
        runData.cheating_probability = np.zeros((cfg.nStations,))
        runData.zTransferred = np.zeros((cfg.nStations,))
        runData.correct_prob = np.zeros((cfg.nStations,))
        runData.interpretation = getSubjectInterpretation(cfg)
        runData.badVoxels = {}
        runData.dataForClassification = {}
        all_data = np.zeros((cfg.nVox,cfg.nTR_run + 1)) # adding 1 because we're not starting at 0 with the indexing
        runData.story_data = np.zeros((cfg.nVox,story_TRs))

        makeRunHeader(cfg,runIndex)
        run = cfg.runNum[runIndex]
        scanNum = cfg.scanNum[runIndex]
        storyTRCount = 0
        stationInd=0
        for TRFilenum in np.arange(cfg.nTR_skip+1,cfg.nTR_run+1):
        # for TRFilenum in np.arange(11,54):
            if TRFilenum == cfg.nTR_skip+1: # wait until run starts
                timeout_file = 180
            else:
                timeout_file = 5
            A = time.time()
            dicomData = readRetryDicomFromFileInterface(fileInterface, getDicomFileName(cfg, scanNum, TRFilenum), timeout=timeout_file)
            full_nifti_name = convertToNifti(TRFilenum,scanNum,cfg,dicomData)
            registeredFileName = registerNewNiftiToMNI(cfg,full_nifti_name)
            maskedData = apply_mask(registeredFileName,cfg.mask_filename)
            all_data[:,TRFilenum] = maskedData
            B = time.time()
            print('read to mask time: {:5f}'.format(B-A))
            if TRFilenum >= cfg.fileNum_story_TR_1 and TRFilenum <= cfg.fileNum_story_TR_2: # we're at a story TR now
                runData.story_data[:,storyTRCount] = maskedData
                if np.any(storyTRCount == cfg.last_tr_in_station.astype(int)):
                    # NOW PREPROCESS AND CLASSIFY
                    stationInd = np.argwhere(storyTRCount == cfg.last_tr_in_station.astype(int))[0][0]
                    A = time.time()
                    runData = preprocessAndPredict(cfg,runData,storyTRCount)
                    B = time.time()
                    print('preprocessAndPredict time: {:5f}'.format(B-A))
                    text_to_save = '{0:05f}'.format(runData.correct_prob[stationInd])
                    file_name_to_save = getStationClassoutputFilename(run, stationInd)
                    if cfg.mode == 'cloud':
                        full_filename_to_save = os.path.join(cfg.intelrt.subject_full_day_path,file_name_to_save) 
                    else:
                        full_filename_to_save = os.path.join(cfg.subject_full_day_path,file_name_to_save) 
                    fileInterface.putTextFile(full_filename_to_save,text_to_save)
                    
                    if args.commpipe:    
                        # JUST TO PLOT ON WEB SERVER

                        projUtils.sendResultToWeb(projComm, run,int(stationInd) ,runData.correct_prob[stationInd] )
                storyTRCount += 1
            TRheader = makeTRHeader(cfg,runIndex,TRFilenum,storyTRCount-1,stationInd,runData.correct_prob[stationInd])

        # SAVE OVER RUN NP FILE
        runData.scanNum = scanNum # save scanning number
        runData.subjectName = cfg.subjectName
        runData.dicomDir = cfg.dicomDir
        run_filename = getRunFilename(cfg.sessionId, run)
        full_run_filename_to_save = os.path.join(cfg.subject_full_day_path,run_filename)
        #try:
        sio.savemat(full_run_filename_to_save, runData, appendmat=False)
        #except Exception as err:
        #    errorReply = self.createReplyMessage(msg, MsgResult.Errsor)
        #    errorReply.data = "Error: Unable to save blkGrpFile %s: %r" % (blkGrpFilename, err)
        #    return errorReply

    # DELETE ALL FILES IF FLAGGED TO # 
    # REPEAT AT THE END OF THE RUN AS WELL
    if args.deleteTmpNifti == '1':
        deleteTmpFiles(cfg)
    else:
        print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
        print('NOT DELETING NIFTIS IN tmp/convertedNiftis')
        print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
    # DELETE ALL FILES IF FLAGGED TO # 
    sys.exit(0)
示例#5
0
文件: sample.py 项目: Chibee/rt-cloud
def main(argv=None):
    """
    This is the main function that is called when you run 'sample.py'.

    Here, you will load the configuration settings specified in the toml configuration
    file, initiate the class fileInterface, and then call the function 'doRuns' to
    actually start doing the experiment.
    """

    # define the parameters that will be recognized later on to set up fileIterface
    argParser = argparse.ArgumentParser()
    argParser.add_argument('--config',
                           '-c',
                           default=defaultConfig,
                           type=str,
                           help='experiment config file (.json or .toml)')
    argParser.add_argument('--runs',
                           '-r',
                           default='',
                           type=str,
                           help='Comma separated list of run numbers')
    argParser.add_argument('--scans',
                           '-s',
                           default='',
                           type=str,
                           help='Comma separated list of scan number')
    # This parameter is used for projectInterface
    argParser.add_argument(
        '--commpipe',
        '-q',
        default=None,
        type=str,
        help='Named pipe to communicate with projectInterface')
    argParser.add_argument('--filesremote',
                           '-x',
                           default=False,
                           action='store_true',
                           help='retrieve dicom files from the remote server')
    args = argParser.parse_args(argv)

    # load the experiment configuration file
    cfg = loadConfigFile(args.config)

    # obtain paths for important directories (e.g. location of dicom files)
    if cfg.imgDir is None:
        cfg.imgDir = os.path.join(currPath, 'dicomDir')
    cfg.codeDir = currPath

    # open up the communication pipe using 'projectInterface'
    projectComm = projUtils.initProjectComm(args.commpipe, args.filesremote)

    # initiate the 'fileInterface' class, which will allow you to read and write
    #   files and many other things using functions found in 'fileClient.py'
    #   INPUT:
    #       [1] args.filesremote (to retrieve dicom files from the remote server)
    #       [2] projectComm (communication pipe that is set up above)
    fileInterface = FileInterface(filesremote=args.filesremote,
                                  commPipes=projectComm)

    # now that we have the necessary variables, call the function 'doRuns' in order
    #   to actually start reading dicoms and doing your analyses of interest!
    #   INPUT:
    #       [1] cfg (configuration file with important variables)
    #       [2] fileInterface (this will allow a script from the cloud to access files
    #               from the stimulus computer that receives dicoms from the Siemens
    #               console computer)
    #       [3] projectComm (communication pipe to talk with projectInterface)
    doRuns(cfg, fileInterface, projectComm)

    return 0
示例#6
0
    def test_fileInterface(self, bigTestfile):
        projectComm = projUtils.initProjectComm(None, True)
        fileInterface = FileInterface(filesremote=True, commPipes=projectComm)

        # Read in original data
        with open(bigTestfile, 'rb') as fp:
            data = fp.read()

        # Read via fileClient
        startTime = time.time()
        try:
            responseData = fileInterface.getFile(bigTestfile)
        except Exception as err:
            assert False, str(err)
        assert responseData == data
        print('Read Bigfile time: {}'.format(time.time() - startTime))

        # Write bigFile
        startTime = time.time()
        try:
            fileInterface.putBinaryFile(bigTestfile, data)
        except Exception as err:
            assert False, str(err)
        print('Write Bigfile time: {}'.format(time.time() - startTime))
        # Read back written data and compare to original
        writtenPath = os.path.join(CommonOutputDir, bigTestfile)
        with open(writtenPath, 'rb') as fp:
            writtenData = fp.read()
        assert writtenData == data

        # test get allowedFileTypes
        allowedTypes = fileInterface.allowedFileTypes()
        assert allowedTypes == fileTypeList

        # test list files
        filepattern = os.path.join(testDir, 'test_input', '*.dcm')
        try:
            filelist = fileInterface.listFiles(filepattern)
        except Exception as err:
            assert False, str(err)
        # get list locally
        filelist2 = [x for x in glob.iglob(filepattern)]
        filelist.sort()
        filelist2.sort()
        assert filelist == filelist2

        # test downloadFilesFromCloud and uploadFilesToCloud
        # 0. remove any previous test directories
        shutil.rmtree('/tmp/d2', ignore_errors=True)
        shutil.rmtree('/tmp/d3', ignore_errors=True)
        # 1. create a tmp sub-dir with some files in it
        text1 = 'test file 1'
        text2 = 'test file 2'
        bindata1 = b'\xFE\xED\x01\x23'
        bindata2 = b'\xAA\xBB\xCC\xDD'
        utils.writeFile('/tmp/d1/test1.txt', text1, binary=False)
        utils.writeFile('/tmp/d1/test2.txt', text2, binary=False)
        utils.writeFile('/tmp/d1/test3.bin', bindata1)
        utils.writeFile('/tmp/d1/test4.bin', bindata2)
        # 2. download files from cloud
        projUtils.downloadFilesFromCloud(fileInterface, '/tmp/d1/test*.txt',
                                         '/tmp/d2')
        projUtils.downloadFilesFromCloud(fileInterface, '/tmp/d1/test*.bin',
                                         '/tmp/d2')
        # 3. upload files to cloud
        projUtils.uploadFilesToCloud(fileInterface, '/tmp/d2/test*.txt',
                                     '/tmp/d3')
        projUtils.uploadFilesToCloud(fileInterface, '/tmp/d2/test*.bin',
                                     '/tmp/d3')
        # check that all files in d1 are same as files in d3
        d3text1 = utils.readFile('/tmp/d3/test1.txt', binary=False)
        d3text2 = utils.readFile('/tmp/d3/test2.txt', binary=False)
        d3bin1 = utils.readFile('/tmp/d3/test3.bin')
        d3bin2 = utils.readFile('/tmp/d3/test4.bin')
        assert d3text1 == text1
        assert d3text2 == text2
        assert d3bin1 == bindata1
        assert d3bin2 == bindata2
def getLocalDicomData(cfg, fullpath):
    projComm = projUtils.initProjectComm(None, False)
    fileInterface = FileInterface(filesremote=False, commPipes=projComm)
    fileInterface.initWatch(cfg.dicomDir, cfg.dicomNamePattern, 300000)
    dicomData = readRetryDicomFromFileInterface(fileInterface, fullpath)
    return dicomData
 def setup_class(cls):
     TestFileInterface.fileWatcher = FileInterface()
示例#9
0
def main():
    logger = logging.getLogger()
    logger.setLevel(logLevel)
    logging.info('Face matching: first log message!')
    argParser = argparse.ArgumentParser()
    argParser.add_argument('--config',
                           '-c',
                           default=defaultConfig,
                           type=str,
                           help='experiment config file (.json or .toml)')
    argParser.add_argument('--runs',
                           '-r',
                           default='',
                           type=str,
                           help='Comma separated list of run numbers')
    argParser.add_argument('--scans',
                           '-s',
                           default='',
                           type=str,
                           help='Comma separated list of scan number')
    argParser.add_argument(
        '--deleteTmpNifti',
        '-d',
        default='1',
        type=str,
        help='Set to 0 if rerunning during a single scanning after error')
    # creates pipe communication link to send/request responses through web pipe
    argParser.add_argument(
        '--commpipe',
        '-q',
        default=None,
        type=str,
        help='Named pipe to communicate with projectInterface')
    argParser.add_argument('--filesremote',
                           '-x',
                           default=False,
                           action='store_true',
                           help='dicom files retrieved from remote server')

    args = argParser.parse_args()
    print(args)
    cfg = initializeFaceMatching(args.config, args)

    # DELETE ALL FILES IF FLAGGED TO #
    if args.deleteTmpNifti == '1':
        deleteTmpFiles(cfg)
    else:
        print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
        print('NOT DELETING NIFTIS IN tmp/convertedNiftis')
        print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
    # DELETE ALL FILES IF FLAGGED TO #

    # comm pipe
    projComm = projUtils.initProjectComm(args.commpipe, args.filesremote)
    fileInterface = FileInterface(filesremote=args.filesremote,
                                  commPipes=projComm)
    # intialize watching in particular directory
    fileInterface.initWatch(cfg.dicomDir, cfg.dicomNamePattern,
                            cfg.minExpectedDicomSize)
    #### MAIN PROCESSING ###
    nRuns = len(cfg.runNum)
    for runIndex in np.arange(nRuns):
        # Steps that we have to do:
        # 1. load run regressor X
        # 2. find the emotional face trials (happy) X
        # 3. find the rest TRs right before each one  X
        # At every TR --> register to MNI, mask, etc
        # 4. zscore previous rest data (convert + register like before)
        # 5. calculate percent signal change over ROI
        # 6. save as a text file (Every TR-- display can smooth it)
        # LOAD RUN REGRESSOR
        runNum = runIndex + 1
        regressor = getRegressorMatrix(cfg, runNum)
        happy_TRs = findConditionTR(regressor,
                                    int(cfg.HAPPY))  # 3 blocks 12 TRs each
        happy_TRs_shifted = happy_TRs + cfg.nTR_shift
        happy_TRs_shifted_filenum = happy_TRs_shifted + cfg.nTR_skip  # to account for first 2 files that we're skipping
        neutral_TRs = findConditionTR(regressor, int(cfg.NEUTRAL))
        neutral_TRs_shifted = neutral_TRs + cfg.nTR_shift
        object_TRs = findConditionTR(regressor, int(cfg.OBJECT))
        object_TRs_shifted = object_TRs + cfg.nTR_shift
        nBlocks = np.shape(happy_TRs)[0]
        nTR_per_block = np.shape(happy_TRs)[1]
        fixation_TRs, fixation_blocks = findFixationTR(regressor)
        fixation_TRs_shifted = fixation_TRs + cfg.nTR_shift
        fixation_blocks_shifted = fixation_blocks + cfg.nTR_shift
        all_other_categories_shifted = np.concatenate(
            (neutral_TRs_shifted, object_TRs_shifted, fixation_blocks_shifted),
            axis=0).flatten()

        runData = StructDict()
        runData.all_data = np.zeros((cfg.nVox, cfg.nTR_run - cfg.nTR_skip))
        runData.percent_change = np.zeros((cfg.nTR_run - cfg.nTR_skip, ))
        runData.percent_change[:] = np.nan
        runData.badVoxels = np.array([])

        makeRunHeader(cfg, runIndex)
        run = cfg.runNum[runIndex]
        scanNum = cfg.scanNum[runIndex]
        TRindex = 0
        for TRFilenum in np.arange(cfg.nTR_skip + 1,
                                   cfg.nTR_run + 1):  # iterate through all TRs
            if TRFilenum == cfg.nTR_skip + 1:  # wait until run starts
                timeout_file = 180
            else:
                timeout_file = 5
            A = time.time()
            dicomData = readRetryDicomFromFileInterface(fileInterface,
                                                        getDicomFileName(
                                                            cfg, scanNum,
                                                            TRFilenum),
                                                        timeout=timeout_file)
            full_nifti_name = convertToNifti(TRFilenum, scanNum, cfg,
                                             dicomData)
            registeredFileName = registerNewNiftiToMNI(cfg, full_nifti_name)
            maskedData = apply_mask(registeredFileName, cfg.mask_filename)
            runData.all_data[:, TRindex] = maskedData
            B = time.time()
            print('read to mask time: {:5f}'.format(B - A))

            if TRindex in happy_TRs_shifted:  # we're at a happy block
                # now get TRs to use for zscoring
                TRs_to_use_other_categories = np.sort(
                    all_other_categories_shifted[
                        all_other_categories_shifted < TRindex])
                avg_activity, runData = getAvgSignal(
                    TRs_to_use_other_categories, runData, TRindex, cfg)
                runData.percent_change[TRindex] = calculatePercentChange(
                    avg_activity, runData.all_data[:, TRindex])

                text_to_save = '{0:05f}'.format(
                    runData.percent_change[TRindex])
                file_name_to_save = getOutputFilename(run, TRindex)
                if cfg.mode == 'cloud':
                    full_filename_to_save = os.path.join(
                        cfg.intelrt.subject_full_day_path, file_name_to_save)
                else:
                    full_filename_to_save = os.path.join(
                        cfg.subject_full_day_path, file_name_to_save)
                fileInterface.putTextFile(full_filename_to_save, text_to_save)
                if args.commpipe:
                    # JUST TO PLOT ON WEB SERVER
                    projUtils.sendResultToWeb(projComm, run, int(TRindex),
                                              runData.percent_change[TRindex])
            TRheader = makeTRHeader(cfg, runIndex, TRFilenum, TRindex,
                                    runData.percent_change[TRindex])
            TRindex += 1
        # SAVE OVER RUN NP FILE
        runData.scanNum = scanNum  # save scanning number
        runData.subjectName = cfg.subjectName
        runData.dicomDir = cfg.dicomDir
        run_filename = getRunFilename(cfg.sessionId, run)
        full_run_filename_to_save = os.path.join(cfg.subject_full_day_path,
                                                 run_filename)
        #try:
        sio.savemat(full_run_filename_to_save, runData, appendmat=False)
        #except Exception as err:
        #    errorReply = self.createReplyMessage(msg, MsgResult.Errsor)
        #    errorReply.data = "Error: Unable to save blkGrpFile %s: %r" % (blkGrpFilename, err)
        #    return errorReply
    sys.exit(0)
示例#10
0
def main(argv=None):
    """
	This is the main function that is called when you run 'finialize.py'.

	Here, you will load the configuration settings specified in the toml configuration 
	file, initiate the class fileInterface, and set up some directories and other 
	important things through 'finalize()'
	"""

    # define the parameters that will be recognized later on to set up fileIterface
    argParser = argparse.ArgumentParser()
    argParser.add_argument('--config',
                           '-c',
                           default=defaultConfig,
                           type=str,
                           help='experiment config file (.json or .toml)')
    # This parameter is used for projectInterface
    argParser.add_argument(
        '--commpipe',
        '-q',
        default=None,
        type=str,
        help='Named pipe to communicate with projectInterface')
    argParser.add_argument('--filesremote',
                           '-x',
                           default=False,
                           action='store_true',
                           help='retrieve files from the remote server')
    argParser.add_argument('--addr',
                           '-a',
                           default='localhost',
                           type=str,
                           help='server ip address')
    argParser.add_argument('--runs',
                           '-r',
                           default='',
                           type=str,
                           help='Comma separated list of run numbers')
    argParser.add_argument('--scans',
                           '-s',
                           default='',
                           type=str,
                           help='Comma separated list of scan number')
    args = argParser.parse_args(argv)

    # load the experiment configuration file
    cfg = utils.loadConfigFile(args.config)
    cfg = initialize(cfg, args)
    print(args.config)
    nRunsCompleted = finalize(cfg, args)
    # copy subject folders from server to local
    # subject-specific folder
    # everything in temp/convertedNiftis
    if args.filesremote:

        # open up the communication pipe using 'projectInterface'
        projectComm = projUtils.initProjectComm(args.commpipe,
                                                args.filesremote)

        # initiate the 'fileInterface' class, which will allow you to read and write
        #   files and many other things using functions found in 'fileClient.py'
        #   INPUT:
        #       [1] args.filesremote (to retrieve dicom files from the remote server)
        #       [2] projectComm (communication pipe that is set up above)
        fileInterface = FileInterface(filesremote=args.filesremote,
                                      commPipes=projectComm)

        # we don't need the tmp/convertedNiftis so first remove those
        tempNiftiDir = os.path.join(cfg.server.dataDir, 'tmp/convertedNiftis/')
        if os.path.exists(tempNiftiDir):
            projUtils.deleteFolder(tempNiftiDir)
            print(
                '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
            )
            print('deleting temporary convertedNifti folder: ', tempNiftiDir)
            print(
                '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
            )
        # next, go through each run and put each run data into local run folder
        for r in np.arange(nRunsCompleted):
            runNum = r + 1  # run numbers start at 1
            runId = 'run-{0:02d}'.format(runNum)
            runFolder = os.path.join(cfg.server.subject_full_day_path, runId,
                                     '*')
            listOfFiles = glob.glob(runFolder)
            runFolder_local = os.path.join(cfg.local.subject_full_day_path,
                                           runId)
            projUtils.downloadFilesFromList(fileInterface, listOfFiles,
                                            runFolder_local)
            print(
                '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
            )
            print('downloading data to local computer: ', runFolder)
        # next delete the entire subject folder on the cloud
        # MAKE SURE THIS IS CORRECT FOR YOUR EXPERIMENT BEFORE YOU RUN
        subject_dir = os.path.join(cfg.server.dataDir, cfg.bids_id)
        print('FOLDER TO DELETE ON CLOUD SERVER: ', subject_dir)
        print(
            'IF THIS IS CORRECT, GO BACK TO THE CONFIG FILE USED ON THE WEB SERBER COMPUTER AND CHANGE THE FLAG FROM false --> true IN [server] deleteAfter'
        )
        if cfg.server.deleteAfter:
            print(
                '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
            )
            print('DELETING SUBJECT FOLDER ON CLOUD SERVER: ', subject_dir)
            print(
                '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
            )
            if os.path.exists(subject_dir):
                projUtils.deleteFolder(subject_dir)

    return 0