def getListOfAvailableFLVs(self): """Return list of .flv files that can be streamed.""" scope = Red5.getConnectionLocal().getScope() serverRoot = System.getProperty('red5.root') filesMap = HashMap() try: print 'Getting the FLV files' flvs = scope.getResources("streams/*.flv") for file in flvs: fso = File(serverRoot + '/webapps/oflaDemo' + file.path) flvName = fso.getName() flvBytes = 0 if hasattr(fso, 'length'): flvBytes = fso.length() else: print 'Length not found' lastMod = '0' if hasattr(fso, 'lastModified'): lastMod = self.formatDate(Date(fso.lastModified())) else: log.debug('Last modified not found') print 'FLV Name:', flvName print 'Last modified date:', lastMod print 'Size:', flvBytes print '-------' fileInfo = HashMap(3); fileInfo["name"] = flvName fileInfo["lastModified"] = lastMod fileInfo["size"] = flvBytes filesMap[flvName] = fileInfo except Exception, e: print 'Error in getListOfAvailableFLVs:', e
def getListOfAvailableFLVs(self): """Return list of .flv files that can be streamed.""" scope = Red5.getConnectionLocal().getScope() serverRoot = System.getProperty('red5.root') filesMap = HashMap() try: print 'Getting the FLV files' flvs = scope.getResources("streams/*.flv") for file in flvs: fso = File(serverRoot + '/webapps/oflaDemo' + file.path) flvName = fso.getName() flvBytes = 0 if hasattr(fso, 'length'): flvBytes = fso.length() else: print 'Length not found' lastMod = '0' if hasattr(fso, 'lastModified'): lastMod = self.formatDate(Date(fso.lastModified())) else: log.debug('Last modified not found') print 'FLV Name:', flvName print 'Last modified date:', lastMod print 'Size:', flvBytes print '-------' fileInfo = HashMap(3) fileInfo["name"] = flvName fileInfo["lastModified"] = lastMod fileInfo["size"] = flvBytes filesMap[flvName] = fileInfo except Exception, e: print 'Error in getListOfAvailableFLVs:', e
def loadRasterLayer (rasterfile, mode = "r" ): ## Load a Raster file in a Layer sourceFileName[0]=rasterfile if not isinstance (rasterfile,File): rasterfile = File(rasterfile) name, ext = splitext(rasterfile.getName()) view = currentView() # Get the manager to use dalManager = DALLocator.getDataManager() mapContextManager = MapContextLocator.getMapContextManager() if ext.lower() == ".ecw" or ext.lower() == ".jp2" : # FIXME pass elif ext.lower() == ".mrsid": # FIXME pass else: # Create the parameters to open the raster store based in GDAL params = dalManager.createStoreParameters("Gdal Store") params.setFile(rasterfile) # Create the raster store dataStore = dalManager.createStore(params) # Create a raster layer based in this raster store layer = mapContextManager.createLayer(name, dataStore); view.addLayer(layer) return layer
def batch_export(): from Experiment import config dss = __DATASOURCE__.getSelectedDatasets() if dss is None or len(dss) == 0: print 'Please select one or more files to export.' return path = selectSaveFolder() if path == None: return fi = File(path) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + path return eid = int(experiment_id.value) exp_folder = path + '/exp' + str(eid) fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return ILL_folder = exp_folder + '/ILLfiles' exp_folder += '/Datafiles' fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return fi = File(ILL_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + ILL_folder return count = 0 for dinfo in dss: loc = dinfo.getLocation() f = File(loc) fsn = f.getName() # count = int(fsn[3:10]) # new_fname = 'TAIPAN_exp' + ('%(value)04d' % {'value':eid}) + '_scan' + ('%(value)04d' % {'value':count}) + '.dat' df.datasets.clear() try: export.graffiti_export(df, loc, exp_folder, eid, get_prof_value) except: traceback.print_exc() traceback.print_exc(file = __buffer_logger__) df.datasets.clear() try: export.ILL_export(df, loc, ILL_folder, eid, get_prof_value) except: traceback.print_exc() traceback.print_exc(file = __buffer_logger__) # export.graffiti_export(df, loc, exp_folder, eid, get_prof_value) # df.datasets.clear() # export.ILL_export(df, loc, ILL_folder, eid, get_prof_value) print 'done'
def run(self, ctx): prj = ctx.getMainProject() assert prj, 'Need a project' artifactFile = File(self.path) a = Artifact(artifactFile.getName(), FileInput(artifactFile)) print(a) la = prj.processArtifact(a) print(la)
def updated(controller, newValue): global Plot1 global data global axis global fileId print 'updated' newCount = int(newValue.getStringData()) scanpoint = scanpoint_node.getValue().getIntData() variable_value = scan_variable_value_node.getValue().getFloatData() if scanpoint == 1: data = [newCount] axis = [variable_value] else: data.append(newCount) axis.append(variable_value) variable_name = scan_variable_node.getValue().getStringData() axis_name.value = variable_name try: checkFile = File(filenameNode.getValue().getStringData()) dataPath = System.getProperty('sics.data.path') checkFile = File(dataPath + "/" + checkFile.getName()) filePath = checkFile.getAbsolutePath() if not checkFile.exists(): print "The target file :" + checkFile.getAbsolutePath() + " can not be found"; dataSourceViewer.addDataset(filePath, False) fileId = checkFile.getName() if fileId.__contains__('TPN') and fileId.__contains__('.nx.hdf'): fileId = str(int(fileId[3 : 10])) except: print 'failed to load the file' fileId = variable_name print fileId ds = Dataset(data, axes = [axis], title = fileId) print ds Plot1.set_dataset(ds) Plot1.x_label = variable_name Plot1.y_label = 'counts' Plot1.title = 'counts vs ' + variable_name
def split(path): if sys.platform.startswith('java'): from java.io import File f=File(path) d=f.getParent() if not d: if f.isAbsolute(): d = path else: d = "" return (d, f.getName()) else: import os return os.path.split(path)
def batch_export(): from Experiment import config dss = __get_selected_files__() if dss is None or len(dss) == 0: print 'Please select one or more files to export.' return path = selectSaveFolder() if path == None: return fi = File(path) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + path return eid = int(experiment_id.value) exp_folder = path + '/exp' + str(eid) fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return ILL_folder = exp_folder + '/ILLfiles' exp_folder += '/Datafiles' fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return fi = File(ILL_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + ILL_folder return count = 0 flist = [] for loc in dss: f = File(loc) fsn = f.getName() # count = int(fsn[3:10]) # new_fname = 'TAIPAN_exp' + ('%(value)04d' % {'value':eid}) + '_scan' + ('%(value)04d' % {'value':count}) + '.dat' df.datasets.clear() fn = export.graffiti_export(df, loc, exp_folder, eid, get_pref_value) flist.append(fn) df.datasets.clear() fn = export.ILL_export(df, loc, ILL_folder, eid, get_pref_value) flist.append(fn) if len(flist) > 0: zip_files(flist, 'TAIPAN_rd_' + str(int(time.time()))[2:] + '.zip') print 'done'
def main (): print "main" prefs(); projection(); importer = OsmImporter() fileObj= File('/home/mdupont/experiments/josm/topeka/noto.osm') # fileObj= File('/home/mdupont/experiments/josm/topeka/topeka.osm') inobj = FileInputStream(fileObj); data = importer.loadLayer(inobj, fileObj, fileObj.getName(), NullProgressMonitor.INSTANCE) s = data.toString(); # print s primitives = data.getLayer().data.allPrimitives(); # print primitives objs= primitives.toArray() #DisplayTable(obj) streetlist(objs)
def actionPerformed(self,actionEvent): fc = JFileChooser(constants_lib.const_path_dict["LINAC_WIZARD_FILES_DIR_PATH"]) fc.setDialogTitle("Save data into the file ...") fc.setApproveButtonText("Save") fl_filter = FileNameExtensionFilter("SCL Wizard",["sclw",]) fc.setFileFilter(fl_filter) returnVal = fc.showOpenDialog(self.linac_wizard_document.linac_wizard_window.frame) if(returnVal == JFileChooser.APPROVE_OPTION): fl_out = fc.getSelectedFile() fl_path = fl_out.getPath() if(fl_path.rfind(".sclw") != (len(fl_path) - 5)): fl_out = File(fl_out.getPath()+".sclw") io_controller = self.linac_wizard_document.getIO_Controller() io_controller.writeData(fl_out.toURI().toURL()) io_controller.old_fl_out_name = fl_out.getName() self.linac_wizard_document.linac_wizard_window.setTitle(io_controller.old_fl_out_name)
def valueChanged(self, controller, newValue): global __file_to_add__ newCount = int(newValue.getStringData()); if newCount != self.saveCount: self.saveCount = newCount; try: checkFile = File(__file_name_node__.getValue().getStringData()); checkFile = File(__data_folder__ + "/" + checkFile.getName()); __file_to_add__ = checkFile.getAbsolutePath(); if not checkFile.exists(): print "The target file :" + __file_to_add__ + " can not be found"; return runnable = __Display_Runnable__() runnable.run = add_dataset Display.getDefault().asyncExec(runnable) except: print 'failed to add dataset ' + __file_to_add__
def valueChanged(self, controller, newValue): global __file_to_add__ newCount = int(newValue.getStringData()) if newCount != self.saveCount: self.saveCount = newCount try: checkFile = File(__file_name_node__.getValue().getStringData()) checkFile = File(__data_folder__ + "/" + checkFile.getName()) __file_to_add__ = checkFile.getAbsolutePath() if not checkFile.exists(): print "The target file :" + __file_to_add__ + " can not be found" return runnable = __Display_Runnable__() runnable.run = add_dataset Display.getDefault().asyncExec(runnable) except: print 'failed to add dataset ' + __file_to_add__
def run(self, ctx): engctx = ctx.getEnginesContext() if not engctx: print('Back-end engines not initialized') return projects = engctx.getProjects() if not projects: print('There is no opened project') return prj = projects[0] artifactFile = File(self.path) a = Artifact(artifactFile.getName(), FileInput(artifactFile)) print(a) la = prj.processArtifact(a) print(la)
def lstat(path): """lstat(path) -> stat result Like stat(path), but do not follow symbolic links. """ abs_path = sys.getPath(path) try: return stat_result.from_jnastat(_posix.lstat(abs_path)) except NotImplementedError: pass except: raise f = File(sys.getPath(path)) # XXX: jna-posix implements similar link detection in # JavaFileStat.calculateSymlink, fallback to that instead when not # native abs_parent = f.getAbsoluteFile().getParentFile() if not abs_parent: # root isn't a link return stat(path) can_parent = abs_parent.getCanonicalFile() if can_parent.getAbsolutePath() == abs_parent.getAbsolutePath(): # The parent directory's absolute path is canonical.. if f.getAbsolutePath() != f.getCanonicalPath(): # but the file's absolute and canonical paths differ (a # link) return stat_result((_stat.S_IFLNK, 0, 0, 0, 0, 0, 0, 0, 0, 0)) # The parent directory's path is not canonical (one of the parent # directories is a symlink). Build a new path with the parent's # canonical path and compare the files f = File(_path.join(can_parent.getAbsolutePath(), f.getName())) if f.getAbsolutePath() != f.getCanonicalPath(): return stat_result((_stat.S_IFLNK, 0, 0, 0, 0, 0, 0, 0, 0, 0)) # Not a link, only now can we determine if it exists (because # File.exists() returns False for dead links) if not f.exists(): raise OSError(errno.ENOENT, strerror(errno.ENOENT), path) return stat(path)
def openNewItem(self,itempath): itempath=self.normalizeItemFileName(itempath) # Construct a sane default instance of an item defaultcontent = self.defaultItemContent # Add the item to the current model # ... use xom to create a new document model object with default content # then open the new item from the model fileitem = File(itempath) unparsedcontent=defaultcontent try: self.view.status("Examining file "+fileitem.name ) doc = Builder().build( unparsedcontent, "" ) summary = XQueryUtil.xquery(doc, "/story/@title").get(0).getValue() content = doc.toXML() self.model.addItemToProject( fileitem, summary, content ) except XMLException, e: ndebug(unparsedcontent) self.view.status("Can't find an XML parser: "+e.toString() ) ndebug("Can't find an XML parser: "+e.toString() ) self.model.addItemToProject( fileitem, fileitem.getName() + " (Warning: not well-formed: ``"+e.toString()+"'')" , unparsedcontent )
def saved(controller, newValue): global __UI__ if pause.value: return newCount = int(newValue.getStringData()) axis_name.value = scan_variable_node.getValue().getStringData() # __UI__.getScriptExecutor().runScript(cmd) log('scanpoint=' + str(scanpoint_node.getValue().getIntData() + 1), __writer__) if newCount > 0: try: checkFile = File(filenameNode.getValue().getStringData()) dataPath = System.getProperty('sics.data.path') checkFile = File(dataPath + "/" + checkFile.getName()) filePath = checkFile.getAbsolutePath() if not checkFile.exists(): log("The target file :" + checkFile.getAbsolutePath() + " can not be found") dataSourceViewer.addDataset(filePath, True) except: log('failed to load the file')
def loadStore (self,rasterfile, mode = "r" ): if not isinstance(rasterfile,File): rasterfile = File(rasterfile) name, ext = splitext(rasterfile.getName()) dalManager = DALLocator.getDataManager() if ext.lower() == ".ecw" or ext.lower() == ".jp2" : # FIXME pass elif ext.lower() == ".mrsid": # FIXME pass else: # Create the parameters to open the raster store based in GDAL params = dalManager.createStoreParameters("Gdal Store") params.setFile(rasterfile) # Create the raster store dataStore = dalManager.createStore(params) return dataStore
def openNewItem(self, itempath): itempath = self.normalizeItemFileName(itempath) # Construct a sane default instance of an item defaultcontent = self.defaultItemContent # Add the item to the current model # ... use xom to create a new document model object with default content # then open the new item from the model fileitem = File(itempath) unparsedcontent = defaultcontent try: self.view.status("Examining file " + fileitem.name) doc = Builder().build(unparsedcontent, "") summary = XQueryUtil.xquery(doc, "/story/@title").get(0).getValue() content = doc.toXML() self.model.addItemToProject(fileitem, summary, content) except XMLException, e: ndebug(unparsedcontent) self.view.status("Can't find an XML parser: " + e.toString()) ndebug("Can't find an XML parser: " + e.toString()) self.model.addItemToProject( fileitem, fileitem.getName() + " (Warning: not well-formed: ``" + e.toString() + "'')", unparsedcontent)
class CommandHeap(Commands): def __init__(self): pass def buildCommands(self,s): self.path=File(s) files=[] if self.path.isDirectory(): self.addCommands(self.path, files) self.heap=Heap(files) def addCommands(self, path, files): commands = path.listFiles() for x in commands: if x.isDirectory(): self.addCommands(x,files) else: files.append(x) def exists(self,command): return self.heap.exists(command) def update(self): self.buildCommands(self.path.getName()) def getAllCommands(self): return self.heap.getAll()
def ILL_export(df, input_path, exp_folder, eid, get_prof_value): from Experiment import config f = File(input_path) fsn = f.getName() fid = int(fsn[3:10]) # index = fsn.rindex('.') # if index > 0 and index <= len(fsn) - 2 : # fsn = fsn[:index] new_fname = fsn[:fsn.index('.')] + '.ILL' nfn = exp_folder + '/' + new_fname ds = df[str(input_path)] if len(ds.axes) > 0: axes = ds.axes else: axes = None ds.y_title = 'detector' nf = open(nfn, 'w') try: text = [] text.append('R' * 80 + '\n') text.append(' ' + str(fid) + ' 1 0' + '\n') text.append('TAIPAN TAS data in the new ASCII format follow after the line VV...V' + '\n') text.append('A' * 80 + '\n') text.append(' 80 0' + '\n') text.append('TAIPAN\t' + str(ds['user_name']) + '\t' + str(ds['start_time']) + '\n') text.append('V' * 80 + '\n') text.append('INSTR: TAIPAN' + '\n') text.append('EXPNO: ' + str(eid) + '\n') text.append('USER_: ' + str(ds['user_name']) + '\n') text.append('LOCAL: ' + get_prof_value(LOCAL_CONTACT_PNAME) + '\n') text.append('FILE_: ' + fsn + '\n') text.append('DATE_: ' + str(ds['start_time']) + '\n') text.append('TITLE: ' + str(ds['experiment_title']) + '\n') text.append('COMND: ' + '' + '\n') if ds.size > 1: text.append('POSQE: QH=' + format(ds['qh'][0], 4, 8) + ', QK=' + format(ds['qk'][0], 4, 8) + ', QL=' + format(ds['ql'][0], 4, 8) + ', EN=' + format(ds['en'][0], 4, 9) + ', UN=meV' + '\n') else: try: text.append('POSQE: QH=' + format(ds['qh'], 4, 8) + ', QK=' + format(ds['qk'], 4, 8) + ', QL=' + format(ds['ql'], 4, 8) + ', EN=' + format(ds['en'], 4, 9) + ', UN=meV' + '\n') except: pass step = 0 axis = None if axes != None: axis = axes[-1] else : axis = ds['suid'] if axis.size > 1: step = (axis[-1] - axis[0]) / (axis.size - 1) text.append('STEPS: ' + axis.title + '=' + format(step, 2, 6) + '\n') sense = get_prof_value(SENSE_PNAME) sm = '' ss = '' sa = '' try : sm = sense[0] + '1' ss = sense[1] + '1' sa = sense[2] + '1' except: pass text.append('PARAM: DM=3.355, \tDA=3.355, \tSM=' + sm + ', \tSS=' + ss + ', \tSA=' + sa + '\n') text.append('PARAM: FX=2, \tKFIX=14.87' + '\n') alf1 = '' alf2 = '' alf3 = '' alf4 = '' hcoll = get_prof_value(HORIZONTALCOLLIMATION_PNAME) try: sep = ' ' if (hcoll.__contains__(',')): sep = ',' llist = hcoll.split(sep) for i in xrange(len(llist)): if llist[i] == ' ': del llist[i] alf1 = llist[0].strip() alf2 = llist[1].strip() alf3 = llist[2].strip() alf4 = llist[3].strip() except: pass bet1 = '' bet2 = '' bet3 = '' bet4 = '' vcoll = get_prof_value(VERTICALCOLLIMATION_PNAME) try: sep = ' ' if (vcoll.__contains__(',')): sep = ',' llist = vcoll.split(sep) for i in xrange(len(llist)): if llist[i] == ' ': del llist[i] bet1 = llist[0].strip() bet2 = llist[1].strip() bet3 = llist[2].strip() bet4 = llist[3].strip() except: pass text.append('PARAM: ALF1=' + alf1 + '\t, ALF2=' + alf2 + '\t, ALF3=' + alf3 + '\t, ALF4=' + alf4 + '\t' + '\n') text.append('PARAM: BET1=' + bet1 + '\t, BET2=' + bet2 + '\t, BET3=' + bet3 + '\t, BET4=' + bet4 + '\t' + '\n') text.append('PARAM: ETAM=30, \tETAA=30' + '\n') las = '' lbs = '' lcs = '' laa = '' lbb = '' lcc = '' try: ub_cell = sicsext.runCommand('tasub cell').strip() llist = ub_cell.split(' ') las = llist[0].strip() lbs = llist[1].strip() lcs = llist[2].strip() laa = llist[3].strip() lbb = llist[4].strip() lcc = llist[5].strip() except: lattice = get_prof_value(LATTICECONSTANTS_PNAME).strip() try: sep = ' ' if (lattice.__contains__(',')): sep = ',' llist = lattice.split(sep) for i in xrange(len(llist)): if llist[i] == ' ': del llist[i] las = llist[0].strip() lbs = llist[1].strip() lcs = llist[2].strip() laa = llist[3].strip() lbb = llist[4].strip() lcc = llist[5].strip() except: pass etas = '' smosaic = get_prof_value(SAMPLEMOSAIC_PNAME) if smosaic != None : etas = smosaic text.append('PARAM: AS=' + las + '\t, BS=' + lbs + '\t, CS=' + lcs + '\t' + '\n') text.append('PARAM: AA=' + laa + '\t, BB=' + lbb + '\t, CC=' + lcc + '\t, ETAS=' + etas + '\t' + '\n') sax = '' say = '' saz = '' sbx = '' sby = '' sbz = '' try: ub_matrix = sicsext.runCommand('tasub listub') if ub_matrix.__contains__('UB generated') : val = ub_matrix[:ub_matrix.index('UB generated')].strip() llist = val.split('\n') llist0 = llist[0].strip().split(' ') sax = llist0[0].strip() say = llist0[1].strip() saz = llist0[2].strip() llist1 = llist[1].strip().split(' ') sbx = llist1[0].strip() sby = llist1[1].strip() sbz = llist1[2].strip() except: splane = get_prof_value(SCATTERINGPLANE_PNAME).strip() try: sep = ' ' if (splane.__contains__(',')): sep = ',' llist = splane.split(sep) for i in xrange(len(llist)): if llist[i] == ' ': del llist[i] sax = llist[0].strip() say = llist[1].strip() saz = llist[2].strip() sbx = llist[3].strip() sby = llist[4].strip() sbz = llist[5].strip() except: pass text.append('PARAM: AX=' + sax + '\t, AY=' + say + '\t, AZ=' + saz + '\t' + '\n') text.append('PARAM: BX=' + sbx + '\t, BY=' + sby + '\t, BZ=' + sbz + '\t' + '\n') if ds.size > 1: text.append('VARIA: M1=' + format(ds['m1'][0], 2, 7) + ', M2=' + format(ds['m2'][0], 2, 7) + ', S1=' + format(ds['s1'][0], 2, 7) + ', S2=' + format(ds['s2'][0], 2, 7) + '\n') text.append('VARIA: A1=' + format(ds['a1'][0], 2, 7) + ', A2=' + format(ds['a2'][0], 2, 7) + ', RM=\t, RA=\t' + '\n') else: try: text.append('VARIA: M1=' + format(ds['m1'], 2, 7) + ', M2=' + format(ds['m2'], 2, 7) + ', S1=' + format(ds['s1'], 2, 7) + ', S2=' + format(ds['s2'], 2, 7) + '\n') text.append('VARIA: A1=' + format(ds['a1'], 2, 7) + ', A2=' + format(ds['a2'], 2, 7) + ', RM=\t, RA=\t' + '\n') except: pass text.append('VARIA: TD=\t' + '\n') try: m10 = float(sicsext.runCommand('m1 softzero')) m20 = float(sicsext.runCommand('m2 softzero')) s10 = float(sicsext.runCommand('s1 softzero')) s20 = float(sicsext.runCommand('s2 softzero')) a10 = float(sicsext.runCommand('a1 softzero')) a20 = float(sicsext.runCommand('a2 softzero')) text.append('ZEROS: A1=' + format(m10, 2, 7) + ', A2=' + format(m20, 2, 7) + ', A3=' + format(s10, 2, 7) + ', A4=' + format(s20, 2, 7) + '\n') text.append('ZEROS: A5=' + format(a10, 2, 7) + ', A6=' + format(a20, 2, 7) + ', RM=\t, RA=\t' + '\n') except: text.append('ZEROS: A1=\t0.0, A2=\t0.0, A3=\t0.0, A4=0.0\n') text.append('ZEROS: A5=\t0.0, A6=\t0.0, RM=\t, RA=\t' + '\n') text.append('ZEROS: TD=\t' + '\n') mode = ds['bm_mode'] if str(mode) == 'Timer': mode = 'TI' else: mode = 'MO' preset = ds['bm_preset'] text.append('PARAM: ' + mode + '=' + format(preset, 2, 10) + '\n') try: text.append('PARAM: TT=' + format(ds['tc1_ctrl1'][0], 2, 7) + ', RT=\t, TT2=\t, RT2=\t, MAG=\t' + '\n') except: text.append('PARAM: TT=\t, RT=\t, TT2=\t, RT2=\t, MAG=\t' + '\n') text.append('FORMT: (I4,1X,F9.4,2(1X,F8.0),1X,F9.2,1x,F,6(1X,F9.4))\n') text.append('DATA_: ' + '\n') titles = ' PNT' + ('%(item)10s' % {'item' : axis.title}) # for item in config.ILL_ITEMS: # titles += ('%(item)10s' % {'item' : item[0]}) # text.append(titles + '\n') # for i in xrange(axis.size): # line = '%(value)4d' % {'value' : i + 1} + ' ' # line += format(axis[i], 4, 10) # for item in config.ILL_ITEMS: # par = ds[item[1]] # if par: # line += format(par[i], item[2], 10) # else: # line += format(0, item[2], 10) # text.append(line + '\n') # text.append('Finished at ' + str(ds['end_time'])) titles = ' PNT' axes_titles = [] to_skip = [] if axes != None: for axis in axes: at = axis.title axes_titles.append(at) titles += ('%(item)12s' % {'item' : at}) if len(axes) > 0 and axes[0] != None: axis0 = axes[0] for i in xrange(axis0.size) : if math.fabs(axis0[i]) > 1e8: to_skip.append(i) for tit in config.MULTI_ITEMS: if not axes_titles.__contains__(tit[0]) : titles += ('%(item)12s' % {'item' : tit[0]}) comps = [] items = ds.__iNXdata__.getDataItemList() for item in items: target = item.getAttribute('target') if target : if target.getStringValue().__contains__('sample'): comps.append(item) sn = item.getShortName() if len(sn) > 2: if sn.__contains__('SP') : sn = sn[:2] + sn[2:].replace('SP', '_Setpoint') elif sn[2:].__contains__('S') : sn = sn[:2] + sn[2:].replace('S', '_Sensor') titles += ('%(item)16s' % {'item' : sn}) text.append(titles + '\n') for i in xrange(ds.size): if to_skip.__contains__(i): continue line = ('%(item)5s' % {'item' : str(i + 1)}) if axes != None: for axis in axes: fmt = '.4f' try: at = str(axis.title) for item in config.MULTI_ITEMS: if item[1] == at: fmt = item[2] break except: pass line += ('%(item)12s' % {'item' : (('%(value)' + fmt) % {'value' : axis[i]})}) for tit in config.MULTI_ITEMS: if not axes_titles.__contains__(tit[0]) : if tit[0].endswith('_err') : try: if ds.size == 1: line += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : math.sqrt(ds[tit[1]])})}) else: line += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : math.sqrt(ds[tit[1]][i])})}) except: line += ('%(item)12s' % {'item' : '0.0000'}) else : try: if ds.size == 1: line += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]]})}) else: line += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]][i]})}) except: line += ('%(item)12s' % {'item' : '0.0000'}) for comp in comps: data = comp.getData() line += ('%(item)16.4f' % {'item' : data.getFloat(data.getIndex().set(i))}) text.append(line + '\n') nf.writelines(text) nf.flush() # except: # print 'failed to process: ' + nfn # traceback.print_exc(file=sys.stdout) # finally: # nf.close() # ds.close() # except: # nf.close() # ds.close() # print 'failed to process: ' + nfn # traceback.print_exc(file=sys.stdout) # except: # print 'failed to process: ' + nfn # traceback.print_exc(file=sys.stdout) finally: nf.close() ds.close() print nfn + ' exported'
def graffiti_export(df, input_path, exp_folder, eid, get_prof_value): from Experiment import config from Experiment.lib.common import __get_axis_name__ f = File(input_path) fsn = f.getName() fid = int(fsn[3:10]) # index = fsn.rindex('.') # if index > 0 and index <= len(fsn) - 2 : # fsn = fsn[:index] new_fname = 'TAIPAN_exp' + ('%(value)04d' % {'value':eid}) + '_scan' + ('%(value)04d' % {'value':fid}) + '.dat' nfn = exp_folder + '/' + new_fname ds = df[str(input_path)] if len(ds.axes) > 0: axes = ds.axes else: axes = None ds.y_title = 'detector' nf = open(nfn, 'w') try: text = '# raw_file = ' + fsn + '\n' nf.write(text) for item in config.SINGLE_ITEMS: text = '# ' + item[0] + ' = ' if item[0] == 'proposal' : text += get_prof_value(PROPOSAL_ID_PNAME) elif item[0] == 'experiment_number': text += str(eid) elif item[0] == 'local_contact': text += get_prof_value(LOCAL_CONTACT_PNAME) elif item[0] == 'scan': text += str(fid) elif item[0] == 'experiment': text += get_prof_value(EXPERIMENT_TITLE_PNAME) elif item[0] == 'scan_title': text += str(ds['experiment_title']) elif item[0] == 'users': text += get_prof_value(USER_NAME_PNAME) elif item[0] == 'samplename': text += get_prof_value(SAMPLENAME_PNAME) elif item[0] == 'sampletype': text += get_prof_value(SAMPLETYPE_PNAME) elif item[0] == 'samplemosaic': text += get_prof_value(SAMPLEMOSAIC_PNAME) elif item[0] == 'latticeconstants': try: ub_cell = sicsext.runCommand('tasub cell') text += ub_cell except: text += get_prof_value(LATTICECONSTANTS_PNAME) elif item[0] == 'ubmatrix': try: ub_matrix = sicsext.runCommand('tasub listub') if ub_matrix.__contains__('UB generated') : val = ub_matrix[:ub_matrix.index('UB generated')] val = val.replace('\n', '') text += val else: text += ub_matrix except: text += get_prof_value(UBMATRIX_PNAME) elif item[0] == 'mode': text += '0' elif item[0] == 'plane_normal': text += get_prof_value(SCATTERINGPLANE_PNAME) elif item[0] == 'monochromator': text += get_prof_value(MONOCHROMATOR_PNAME) elif item[0] == 'analyzer': text += get_prof_value(ANALYZER_PNAME) elif item[0] == 'sense': text += get_prof_value(SENSE_PNAME) elif item[0] == 'collimation': text += get_prof_value(COLLIMATION_PNAME) elif item[0] == 'preset_type': text += 'normal ' elif item[0] == 'preset_channel': value = str(ds[item[1]]) if value == 'Timer': text += 'time' elif value == 'Monitor': text += 'monitor' else: text += '' elif item[0] == 'def_x': try: text += __get_axis_name__(axes) except: traceback.print_exc() text += '' else: if item[1] != None: if item[1] != None: try: text += str(ds[item[1]]) except: text += item[1] else: text += '' text += '\n' nf.write(text) # if len(ds) > 1: # text = '# Sum_of_Counts = ' + str(ds['bm2_counts'].sum()) + '\n' # else: # text = '# Sum_of_Counts = ' + str(ds['bm2_counts']) + '\n' # nf.write(text) text = '#' + ('%(item)6s' % {'item' : 'Pt.'}) axes_titles = [] to_skip = [] if axes != None: for axis in axes: at = axis.title axes_titles.append(at) text += ('%(item)12s' % {'item' : at}) if len(axes) > 0 and axes[0] != None: axis0 = axes[0] for i in xrange(axis0.size) : if math.fabs(axis0[i]) > 1e8: to_skip.append(i) for tit in config.MULTI_ITEMS: if not axes_titles.__contains__(tit[0]) : text += ('%(item)12s' % {'item' : tit[0]}) pol_enabled = False try: pol_field = ds['pol_field'] if pol_field != None: pol_enabled = True except: pass if pol_enabled : for tit in config.POLARISER_ITEMS: text += ('%(item)12s' % {'item' : tit[0]}) comps = [] items = ds.__iNXdata__.getDataItemList() for item in items: target = item.getAttribute('target') if target : if target.getStringValue().__contains__('sample'): comps.append(item) sn = item.getShortName() if len(sn) > 2: if sn.__contains__('SP') : sn = sn[:2] + sn[2:].replace('SP', '_Setpoint') elif sn[2:].__contains__('S') : sn = sn[:2] + sn[2:].replace('S', '_Sensor') text += ('%(item)16s' % {'item' : sn}) nf.write(text + '\n') for i in xrange(ds.size): if to_skip.__contains__(i): continue text = ('%(item)7s' % {'item' : str(i + 1)}) if axes != None: for axis in axes: fmt = '.4f' try: at = str(axis.title) for item in config.MULTI_ITEMS: if item[1] == at: fmt = item[2] break except: pass text += ('%(item)12s' % {'item' : (('%(value)' + fmt) % {'value' : axis[i]})}) for tit in config.MULTI_ITEMS: if not axes_titles.__contains__(tit[0]) : if str(tit[0]).endswith('_err') : try: if ds.size == 1: print tit[0] text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : math.sqrt(ds[tit[1]])})}) else: text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : math.sqrt(ds[tit[1]][i])})}) except: text += ('%(item)12s' % {'item' : '0.0000'}) else: try: if ds.size == 1: text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]]})}) else: text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]][i]})}) except: text += ('%(item)12s' % {'item' : '0.0000'}) if pol_enabled : for tit in config.POLARISER_ITEMS: if not axes_titles.__contains__(tit[0]) : try: if ds.size == 1: text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]]})}) else: text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]][i]})}) except: text += ('%(item)12s' % {'item' : '0.0000'}) for comp in comps: data = comp.getData() text += ('%(item)16.4f' % {'item' : data.getFloat(data.getIndex().set(i))}) nf.write(text + '\n') # except: # traceback.print_exc(file = sys.stdout) # print 'failed to process: ' + input_path finally: nf.close() ds.close() print nfn + ' exported'
def batch_export(): from Experiment import config dss = __DATASOURCE__.getSelectedDatasets() path = selectSaveFolder() if path == None: return fi = File(path) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + path return if str(fi.getName()) != 'taipan': path += '/taipan' fi = File(path) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + path return eid = experiment_id.value exp_folder = path + '/exp' + str(eid) fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return exp_folder += '/Datafiles' fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return count = 0 for dinfo in dss: count += 1 loc = dinfo.getLocation() f = File(loc) fsn = f.getName() # index = fsn.rindex('.') # if index > 0 and index <= len(fsn) - 2 : # fsn = fsn[:index] new_fname = 'TAIPAN_exp' + ('%(value)04d' % {'value':eid}) + '_scan' + ('%(value)04d' % {'value':count}) + '.dat' nfn = exp_folder + '/' + new_fname ds = df[str(loc)] if len(ds.axes) > 0: axes = ds.axes else: axes = None ds.y_title = 'detector' nf = open(nfn, 'w') try: text = '# raw_file = ' + fsn + '\n' nf.write(text) for item in config.SINGLE_ITEMS: text = '# ' + item[0] + ' = ' if item[0] == 'proposal' : text += proposal.value elif item[0] == 'local_contact': text += local_contact.value elif item[0] == 'monochromator': text += monochromator.value elif item[0] == 'analyzer': text += analyzer.value elif item[0] == 'sense': text += sense.value elif item[0] == 'collimation': text += collimation.value elif item[0] == 'preset_type': text += preset_type.value elif item[0] == 'preset_channel': value = str(ds[item[1]]) if value == 'Timer': text += 'time' elif value == 'Monitor': text += 'monitor' else: text += '' elif item[0] == 'def_x': try: text += ds.axes[0].title except: text += '' else: if item[1] != None: if item[1] != None: try: text += str(ds[item[1]]) except: text += item[1] else: text += '' text += '\n' nf.write(text) # if len(ds) > 1: # text = '# Sum_of_Counts = ' + str(ds['bm2_counts'].sum()) + '\n' # else: # text = '# Sum_of_Counts = ' + str(ds['bm2_counts']) + '\n' # nf.write(text) text = '#' + ('%(item)6s' % {'item' : 'Pt.'}) axes_titles = [] if axes != None: for axis in axes: at = axis.title axes_titles.append(at) text += ('%(item)12s' % {'item' : at}) for tit in config.MULTI_ITEMS: if not axes_titles.__contains__(tit[0]) : text += ('%(item)12s' % {'item' : tit[0]}) nf.write(text + '\n') for i in xrange(ds.size): text = ('%(item)7s' % {'item' : str(i + 1)}) if axes != None: for axis in axes: fmt = '.4f' try: at = str(axis.title) for item in config.MULTI_ITEMS: if item[1] == at: fmt = item[2] break except: pass text += ('%(item)12s' % {'item' : (('%(value)' + fmt) % {'value' : axis[i]})}) for tit in config.MULTI_ITEMS: if not axes_titles.__contains__(tit[0]) : try: if ds.size == 1: text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]]})}) else: text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]][i]})}) except: text += ('%(item)12s' % {'item' : '0.0000'}) nf.write(text + '\n') nf.close() ds.close() except: nf.close() ds.close() print 'failed to process: ' + loc print 'done'
def loadArtifacts(self, artifactFilePath): artifactFile = File(artifactFilePath) return Artifact(artifactFile.getName(), FileInput(artifactFile))
dark = float(options.dark or 0) if dark > 0: exposure = 0 # Connect to subsystems raftsub = CCS.attachSubsystem("ats-wreb") powersub = CCS.attachSubsystem("ats-power") bonnsub = CCS.attachSubsystem("bonn-shutter") # Load the sequencer if options.sequencer: compiler = FPGA2ModelBuilder() file = File(options.sequencer) model = compiler.compileFile(file) raftsub.sendSynchCommand("loadCompiledSequencer", model, file.getName()) print "Loaded %s" % file # Sanity checks biasOn = raftsub.sendSynchCommand("isBackBiasOn") if not biasOn: print "WARNING: Back bias is not on for WREB" state = raftsub.sendSynchCommand("getState") alert = state.getState(AlertState) if alert != AlertState.NOMINAL: print "WARNING: focal-plane subsystem is in alert state %s" % alert #alerts = raftsub.sendSynchCommand("getRaisedAlertSummary") #if alerts.alertState!=AlertState.NOMINAL:
def batch_export(): from Experiment import config dss = __DATASOURCE__.getSelectedDatasets() path = selectSaveFolder() if path == None: return for dinfo in dss: loc = dinfo.getLocation() f = File(loc) fsn = f.getName() index = fsn.rindex('.') if index > 0 and index <= len(fsn) - 2 : fsn = fsn[:index] nfn = path + '/' + fsn + '.dat' ds = df[str(loc)] if len(ds.axes) > 0: axes = ds.axes else: axes = None nf = open(nfn, 'w') try: for item in config.SINGLE_ITEMS: text = '# ' + item[0] + ' = ' if item[1] != None: text += str(ds[item[1]]) text += '\n' nf.write(text) if len(ds) > 1: text = '# Sum of Counts = ' + str(ds['bm2_counts'].sum()) + '\n' else: text = '# Sum of Counts = ' + str(ds['bm2_counts']) + '\n' nf.write(text) text = '#' + ('%(item)6s' % {'item' : 'Pt.'}) axes_titles = [] if axes != None: for axis in axes: at = axis.title axes_titles.append(at) text += ('%(item)12s' % {'item' : at}) for tit in config.MULTI_ITEMS: if not axes_titles.__contains__(tit[0]) : text += ('%(item)12s' % {'item' : tit[0]}) nf.write(text + '\n') for i in xrange(ds.size): text = ('%(item)7s' % {'item' : str(i)}) if axes != None: for axis in axes: fmt = '.1f' try: at = str(axis.title) for item in config.MULTI_ITEMS: if item[1] == at: fmt = item[2] break except: pass text += ('%(item)12s' % {'item' : (('%(value)' + fmt) % {'value' : axis[i]})}) for tit in config.MULTI_ITEMS: if not axes_titles.__contains__(tit[0]) : try: text += ('%(item)12s' % {'item' : (('%(value)' + tit[2]) % {'value' : ds[tit[1]][i]})}) except: text += ('%(item)12s' % {'item' : '0.0000'}) nf.write(text + '\n') nf.close() ds.close() except: nf.close() ds.close() print 'failed to process: ' + loc print 'done'
'.*DGC LUNZ.*', output): try: # EMC inquiry tool can be downloaded from ftp://ftp.emc.com/pub/symm3000/inquiry/ but we require an older # version for it to work. The tested version: # Inquiry utility, Version V7.3-1305 (Rev 1.0) (SIL Version V7.3.1.0 (Edit Level 1305) # copy inq to targets inq = 'inq.exe' path = coll_home + "/etc/templates/commands/extension-scripts/" + inq remotePath = os_handle.executeCommand( 'cmd.exe /C echo %TEMP%').strip() if not remotePath.endswith('\\'): remotePath = remotePath + '\\' remotePath = remotePath + inq srcInq = File(path) tmpInq = File.createTempFile( srcInq.getName() + '-' + os_handle.getSession().getHost() + '-' + str(System.currentTimeMillis()), None) if not tmpInq.exists(): tmpInq.createNewFile() source = None destination = None try: source = FileInputStream(srcInq).getChannel() destination = FileOutputStream(tmpInq).getChannel() destination.transferFrom(source, 0, source.size()) finally: if source != None: source.close() if destination != None: destination.close() try:
) fileToImport = os.path.join(fDialog.getDirectory(), fDialog.getFile()) if not os.path.exists(fileToImport) or not os.path.isfile(fileToImport): raise Exception( "IMPORT: Sorry, file selected to import either does not exist or is not a file" ) fileToImport = File(fileToImport) # set the parameters newAccountSet = False contextAccount = moneydance_ui.firstMainFrame.getSelectedAccount() filename = fileToImport.getName() extension = os.path.splitext(filename)[1].upper() if moneydance_data is None: raise Exception("ERROR - No data") wrapper = moneydance_ui.getCurrentAccounts() # type: AccountBookWrapper book = moneydance_data importWasSuccessful = True dirName = fileToImport.getParent() try: fPath = fileToImport.getAbsolutePath() # type: str fPath = fPath.upper().strip() if not moneydance_ui.saveCurrentAccount(): raise Exception("ERROR Save Failed") importer = moneydance_ui.getFileImporter(
from java.nio.file import Paths from java.nio.file import Path from java.io import File p=Paths.get("home","pi","Desktop","jython-prac-prog","stram_jython","file2.txt") x = p.toFile() x = File(x.getName()) print (x.isFile()) f = File("image1.png") print (f.getName()) print ("length",f.length()) print ("Execute",f.canExecute()) print ("read",f.canRead()) print ("write",f.canWrite()) print ("path",f.getPath()) print ("Directory",f.isDirectory()) print ("parent",f.getParent())
def CheckFileLists(lista, listb): return originalParent = "" imFileNames = MultiFileDialog("Open Image Files") tempFiles = ArrayList() ui = nrims.UI() ui.show() verbose = 1; IJ.log("Starting combine nrrds.") for i in range(len(imFileNames)): if i = 0: originalParent = imFile.getParent() IJ.log("Summing file:" + imFileNames[i]) imFile = File(imFileNames[i]) directory = imFile.getParent() name = imFile.getName() ui.openFile(imFile) mimStack = ui.getmimsStackEditing() imp = ArrayList() images = ui.getOpenMassImages() #compress the planes blockSize = images[0].getNSlices() done = mimStack.compressPlanes(blockSize) #force to 32bit massCorrection = nrimsData.massCorrection(ui) massCorrection.forceFloatImages(images) if done: nw = nrimsData.Nrrd_Writer(ui) dataFile = nw.save(images, System.getProperty("java.io.tmpdir"), "comp_" + name)