def testSimple(self): dictObj = {"clef1": "val1", "clef2": 2} sXml = xmlio.toXML(dictObj) dictObj2 = xmlio.fromXML(sXml) assert dictObj2 == dictObj obj = ["ggg", 5, (677, 56), "ff"] sXml = xmlio.toXML(obj) obj2 = xmlio.fromXML(sXml) assert obj2 == obj
def testNumpy(self): obj = {"array1": np.random.randn(10), "val1": np.int(5), "array2D": np.ones((4, 4))} sXml = xmlio.toXML(obj, handler=xmlio.xmlnumpy.NumpyXMLHandler()) obj2 = xmlio.fromXML(sXml) assert obj2.keys() == obj.keys() assert np.allclose(obj2["array1"], obj["array1"])
def setDummyInputData(self, xmlFile): f = open(xmlFile, "r") xml = f.read() t = xmlio.fromXML(xml) if t.data.data_type == "volume": dataFn = pyhrf.get_data_file_name("dummySmallBOLD.nii.gz") maskFn = pyhrf.get_data_file_name("dummySmallMask.nii.gz") sd = FMRISessionVolumicData(bold_file=dataFn) t.set_init_param("fmri_data", FmriData.from_vol_ui(mask_file=maskFn, sessions_data=[sd])) elif t.data.data_type == "surface": fn = "real_data_surf_tiny_bold.gii" dataFn = pyhrf.get_data_file_name(fn) fn = "real_data_surf_tiny_parcellation.gii" maskFn = pyhrf.get_data_file_name(fn) fn = "real_data_surf_tiny_mesh.gii" meshFn = pyhrf.get_data_file_name(fn) sd = FMRISessionSurfacicData(bold_file=dataFn) t.set_init_param("fmri_data", FmriData.from_surf_ui(mask_file=maskFn, mesh_file=meshFn, sessions_data=[sd])) else: raise Exception("Unsupported class ... todo") f = open(xmlFile, "w") f.write(xmlio.toXML(t, handler=NumpyXMLHandler())) f.close()
def testSimple(self): ## print 'Simple ..' obj = Dummy() sXml = xmlio.toXML(obj) ## print 'xml:' ## print sXml xmlio.fromXML(sXml)
def testNumpy(self): ## print 'Numpy ..' obj = DummyNumpy() sXml = xmlio.toXML(obj) ## print 'Xml:' ## print sXml xmlio.fromXML(sXml)
def test_value_override(self): a = A() a.override_init("obj_t", T.from_param_c) # print 'after override ...' # print a._init_parameters xml = xmlio.toXML(a, handler=xmlio.xmlnumpy.NumpyXMLHandler(), pretty=True) # print xml new_a = xmlio.fromXML(xml)
def testNested(self): ## print 'Nested objects ..' obj = DummyNested() sXml = xmlio.toXML(obj) ## print 'Xml:' ## print sXml xmlio.fromXML(sXml)
def test_xml_from_init(self): b = B() # print 'b.obj:', b.obj xml = xmlio.toXML(b, handler=xmlio.xmlnumpy.NumpyXMLHandler(), pretty=True) # print 'xml:' # print xml b2 = xmlio.fromXML(xml)
def makeQuietOutputs(self, xmlFile): from pyhrf import xmlio from pyhrf.xmlio.xmlnumpy import NumpyXMLHandler t = xmlio.fromXML(file(xmlFile).read()) t.set_init_param('output_dir', None) f = open(xmlFile, 'w') f.write(xmlio.toXML(t, handler=NumpyXMLHandler())) f.close()
def makeQuietOutputs(self, xmlFile): # print 'makeQuietOutputs ...' # print 'xmlFile:', xmlFile t = xmlio.fromXML(file(xmlFile).read()) t.set_init_param("output_dir", None) f = open(xmlFile, "w") f.write(xmlio.toXML(t, handler=NumpyXMLHandler())) f.close()
def test_xml_from_classmethod_init(self): b = B.from_stuff(a=4, b=3) # print 'b.obj:', b.obj np.testing.assert_equal(b.obj, [4, 3]) xml = xmlio.toXML(b, handler=xmlio.xmlnumpy.NumpyXMLHandler(), pretty=True) # print 'xml:' # print xml b2 = xmlio.fromXML(xml) # print 'b2.obj:', b2.obj np.testing.assert_equal(b2.obj, [4, 3])
def test_JDEMCMCAnalyzerXML(self): from pyhrf.ui.jde import JDEMCMCAnalyser from pyhrf.jde.models import BOLDGibbsSampler a = JDEMCMCAnalyser(sampler=BOLDGibbsSampler({"nbIterations": 42})) # print 'a -- nbIterations:', a.sampler.nbIterations axml = xmlio.toXML(a, handler=xmlio.xmlnumpy.NumpyXMLHandler(), pretty=True) # print 'axml:' # print axml a2 = xmlio.fromXML(axml) # print 'a2 -- nbIterations:', a2.sampler.nbIterations self.assertEqual(a2.sampler.nbIterations, 42)
def setSimulationData(self, xmlFile, simu_file): f = open(xmlFile, "r") xml = f.read() t = xmlio.fromXML(xml) sd = FMRISessionSimulationData(simulation_file=simu_file) t.set_init_param("fmri_data", FmriData.from_simu_ui(sessions_data=[sd])) f = open(xmlFile, "w") sxml = xmlio.toXML(t, handler=NumpyXMLHandler()) # print 'sxml:' # print sxml f.write(sxml) f.close()
def test_ordered_dict(self): try: from collections import OrderedDict except ImportError: from pyhrf.tools.backports import OrderedDict d = OrderedDict([("a", 1), ("b", 2)]) sXml = xmlio.toXML(d) # print 'sXml:' # print sXml d2 = xmlio.fromXML(sXml) self.assertEqual(d, d2)
def test_TreatmentXML(self): # pyhrf.verbose.set_verbosity(6) from pyhrf.ui.jde import JDEMCMCAnalyser from pyhrf.jde.models import BOLDGibbsSampler from pyhrf.ui.treatment import FMRITreatment a = JDEMCMCAnalyser(sampler=BOLDGibbsSampler({"nbIterations": 42})) t = FMRITreatment(analyser=a) # print 't -- nbIterations:', t.analyser.sampler.nbIterations txml = xmlio.toXML(t, handler=xmlio.xmlnumpy.NumpyXMLHandler(), pretty=True) # print 'txml:' # print txml t2 = xmlio.fromXML(txml) # print 't2 -- nbIterations:', t2.analyser.sampler.nbIterations self.assertEqual(t2.analyser.sampler.nbIterations, 42)
def run_pyhrf_cmd_treatment(cfg_cmd, exec_cmd, default_cfg_file, default_profile_file, label_for_cluster): usage = 'usage: %%prog [options]' description = 'Manage a joint detection-estimation treatment of fMRI data.' \ 'This command runs the treatment defined in an xml '\ 'parameter file. See pyhrf_jde_buildcfg command to build a'\ 'template of such a file. If no xml file found, then runs a '\ 'default example analysis.' parser = OptionParser(usage=usage, description=description) parser.add_option('-c','--input-cfg-file', metavar='XMLFILE', dest='cfgFile', default=default_cfg_file, help='Configuration file: XML file containing parameters'\ ' defining input data and analysis to perform.') parser.add_option('-r','--roi-data', metavar='PICKLEFILE', dest='roidata', default=None, help='Input fMRI ROI data. The data '\ 'definition part in the config file is ignored.') parser.add_option('-t','--treatment_pck', metavar='PICKLEFILE', dest='treatment_pck', default=None, help='Input treatment as a pickle dump.' \ 'The XML cfg file is ignored') parser.add_option('-s','--stop-on-error', dest='stop_on_error', action='store_true', default=False, help='For debug: do not continue if error' \ ' during one ROI analysis') parser.add_option('-v','--verbose',dest='verbose',metavar='INTEGER', type='int',default=0, help=dictToString(pyhrf.verboseLevels)) parser.add_option('-p','--profile',action='store_true', default=False, help='Enable profiling of treatment. Store profile data in '\ '%s. NOTE: not avalaible in parallel mode.'\ %default_profile_file) parallel_choices = ['LAN','local','cluster'] parser.add_option('-x','--parallel', choices=parallel_choices, help='Parallel processing. Choices are %s'\ %string.join(parallel_choices,', ')) (options,args) = parser.parse_args() pyhrf.verbose.set_verbosity(options.verbose) t0 = time.time() if options.treatment_pck is not None: f = open(options.treatment_pck) treatment = cPickle.load(f) f.close() else: if not os.path.exists(options.cfgFile): print 'Error: could not find default configuration file "%s"\n'\ 'Consider running "%s" to generate it.' \ %(options.cfgFile, cfg_cmd) sys.exit(1) else: pyhrf.verbose(1, 'Loading configuration from: "%s" ...' \ %options.cfgFile) f = open(options.cfgFile, 'r') sXml = string.join(f.readlines()) f.close() treatment = xmlio.fromXML(sXml) if 0: sXml = xmlio.toXML(treatment, handler=xmlio.xmlnumpy.NumpyXMLHandler()) f = './treatment_cmd.xml' fOut = open(f,'w') fOut.write(sXml) fOut.close() #f = open(fOut, 'w') #cPickle.dump(treatment, f) #f.close() treatment.analyser.set_pass_errors(not options.stop_on_error) if options.parallel is not None: # tmpDir = tempfile.mkdtemp(prefix='pyhrf', # dir=pyhrf.cfg['global']['tmp_path']) # pyhrf.verbose(1, 'Tmpdir: %s' %tmpDir) treatment.run(parallel=options.parallel) else: if options.roidata is not None: #treatment.set_roidata(options.roidata) pyhrf.verbose(1, 'Loading ROI data from: "%s" ...' \ %options.roidata) roidata = cPickle.load(open(options.roidata)) roidata.verbosity = pyhrf.verbose.verbosity if pyhrf.verbose > 1: print roidata.getSummary() #TODO: enable profiling pyhrf.verbose(1, 'Launching analysis ...') if options.profile: cProfile.runctx("result = treatment.analyser(roidata)", globals(), {'treatment':treatment,'roidata': roidata}, default_profile_file) else: result = treatment.analyser(roidata) outPath = op.dirname(op.abspath(options.roidata)) fOut = op.join(outPath,"result_%04d.pck" %roidata.get_roi_id()) pyhrf.verbose(1, 'Dumping results to %s ...' %fOut) f = open(fOut, 'w') cPickle.dump(result, f) f.close() else: pyhrf.verbose(1, 'ROI data is none') if options.profile: cProfile.runctx("treatment.run()", globals(), {'treatment':treatment}, default_profile_file) else: #print 'treatment:', treatment treatment.run() pyhrf.verbose(1, 'Estimation done, took %s' %format_duration(time.time() - t0))
newOutputs[pname] = ov outputs = newOutputs if output_dir is not None: pyhrf.verbose(1,'Writing outputs ...') pyhrf.verbose(1,'Output directory: ' + output_dir) oxml = {} for on,ov in outputs.iteritems(): oxml[on] = xndarrayXml2.fromxndarray(ov, label=on, outDir=output_dir, relativePath=False, meta_data=meta_data) if len(target_shape) == 3: #volumic data so = xmlio.toXML(oxml, handler=NumpyXMLHandler()) out_file = op.join(output_dir,self.outFile) f = open(out_file,'w') f.write(so) f.close() else: #surfacic data #TODO: xml output ... ? for name, out in outputs.items(): fn = op.join(output_dir, name + '.gii') writexndarrayToTex(out, fn) else: pyhrf.verbose(1,"No output (file not specified)") return outputs def joinOutputs(self, cuboids, roiIds, mappers):
def create_treatment_surf(boldFiles, parcelFile, meshFile, dt, tr, paradigmFile, nbIterations=4000, writeXmlSetup=True, parallelize=False, outputDir=None, outputSuffix=None, outputPrefix=None, contrasts=';', beta=.6, estimBeta=True, pfMethod='ps', estimHrf=True, hrfVar=.01, roiIds=None, nbClasses=2,gzip_rdump=False, simulation_file=None, make_outputs=True): if roiIds is None: roiIds = np.array([],dtype=int) outFile = make_outfile(DEFAULT_OUTPUT_FILE_JDE, outputDir, outputPrefix, outputSuffix) outDump = make_outfile(DEFAULT_DUMP_FILE, outputDir, outputPrefix, outputSuffix) if gzip_rdump: outDump += '.gz' if contrasts is not None: cons = dict( ("con_%d"%i, ce) \ for i,ce in enumerate(";".split(contrasts)) ) else: cons = {} if nbClasses == 2: sampler = BG({ BG.P_NB_ITERATIONS : nbIterations, # level of spatial correlation = beta BG.P_BETA : BS({ BS.P_VAL_INI : np.array([beta]), BS.P_SAMPLE_FLAG : estimBeta, BS.P_PARTITION_FUNCTION_METH : pfMethod, }), # HRF BG.P_HRF : HS({ HS.P_SAMPLE_FLAG : estimHrf, }), # HRF variance BG.P_RH : HVS({ HVS.P_SAMPLE_FLAG : False, HVS.P_VAL_INI : np.array([hrfVar]), }), # neural response levels (stimulus-induced effects) BG.P_NRLS : NS({ NS.P_CONTRASTS : cons, }), }) elif nbClasses == 3: sampler = BG3({ BG.P_NB_ITERATIONS : nbIterations, # level of spatial correlation = beta BG.P_BETA : BS({ BS.P_VAL_INI : np.array([beta]), BS.P_SAMPLE_FLAG : estimBeta, BS.P_PARTITION_FUNCTION_METH : pfMethod, }), # HRF BG.P_HRF : HS({ HS.P_SAMPLE_FLAG : estimHrf, }), # HRF variance BG.P_RH : HVS({ HVS.P_SAMPLE_FLAG : False, HVS.P_VAL_INI : np.array([hrfVar]), }), # neural response levels (stimulus-induced effects) BG.P_NRLS : NS3({ NS.P_CONTRASTS : cons, }), }) analyser = JDEMCMCAnalyser(sampler, dt=dt) fmri_data = FmriData.from_surf_files(paradigmFile, boldFiles, tr, meshFile, parcelFile) if simulation_file is not None: f_simu = open(simulation_file) simulation = cPickle.load(f_simu) f_simu.close() fmri_data.simulation = simulation tjde = FMRITreatment(fmri_data, analyser, outputDir) #print 'make_outputs:', make_outputs sxml = xmlio.toXML(tjde, handler=NumpyXMLHandler()) if writeXmlSetup is not None and outputDir is not None: outSetupXml = make_outfile(DEFAULT_CFG_FILE_JDE, outputDir, outputPrefix, outputSuffix) pyhrf.verbose(1, "Writing XML setup to: " + outSetupXml ) f = open(outSetupXml, 'w') f.write(sxml) f.close() else: outSetupXml = None return tjde, outSetupXml
def save_treatment(t, f): sXml = xmlio.toXML(t, handler=xmlio.xmlnumpy.NumpyXMLHandler()) fOut = open(f, "w") fOut.write(sXml) fOut.close()
def test_write_callback(self): a = A() handler = xmlio.xmlnumpy.NumpyXMLHandler(write_callback=hack_a) xml = xmlio.toXML(a, handler=handler, pretty=True)