def batch_export(): from Experiment import config dss = __DATASOURCE__.getSelectedDatasets() if dss is None or len(dss) == 0: print 'Please select one or more files to export.' return path = selectSaveFolder() if path == None: return fi = File(path) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + path return eid = int(experiment_id.value) exp_folder = path + '/exp' + str(eid) fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return ILL_folder = exp_folder + '/ILLfiles' exp_folder += '/Datafiles' fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return fi = File(ILL_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + ILL_folder return count = 0 for dinfo in dss: loc = dinfo.getLocation() f = File(loc) fsn = f.getName() # count = int(fsn[3:10]) # new_fname = 'TAIPAN_exp' + ('%(value)04d' % {'value':eid}) + '_scan' + ('%(value)04d' % {'value':count}) + '.dat' df.datasets.clear() try: export.graffiti_export(df, loc, exp_folder, eid, get_prof_value) except: traceback.print_exc() traceback.print_exc(file = __buffer_logger__) df.datasets.clear() try: export.ILL_export(df, loc, ILL_folder, eid, get_prof_value) except: traceback.print_exc() traceback.print_exc(file = __buffer_logger__) # export.graffiti_export(df, loc, exp_folder, eid, get_prof_value) # df.datasets.clear() # export.ILL_export(df, loc, ILL_folder, eid, get_prof_value) print 'done'
def batch_export(): from Experiment import config dss = __get_selected_files__() if dss is None or len(dss) == 0: print 'Please select one or more files to export.' return path = selectSaveFolder() if path == None: return fi = File(path) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + path return eid = int(experiment_id.value) exp_folder = path + '/exp' + str(eid) fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return HMM_folder = exp_folder + '/HMMfiles' fi = File(HMM_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + HMM_folder return count = 0 for loc in dss: ds = df[str(loc)] if len(__exc_masks__) > 0: res = copy(ds.get_reduced()) for mask in __exc_masks__: res[:, mask[2]:mask[3], mask[0]:mask[1]] = 0 else : res = ds.get_reduced(1) if len(__inc_masks__) > 0: r = dataset.instance(res.shape, dtype=int) for mask in __inc_masks__: r[:, mask[2]:mask[3], mask[0]:mask[1]] = res[:, mask[2]:mask[3], mask[0]:mask[1]] else : r = res data = r.sum(0) if not ds.axes is None and len(ds.axes) > 0: if not axis_lock.value: axis_name.value = ds.axes[0].name if len(data == 1) : ds2 = Dataset(data, axes=[[ds[str(axis_name.value)]]]) else : axis = ds[str(axis_name.value)] ds2 = Dataset(data, axes=[axis]) ds2.title = ds.title # count = int(fsn[3:10]) # new_fname = 'TAIPAN_exp' + ('%(value)04d' % {'value':eid}) + '_scan' + ('%(value)04d' % {'value':count}) + '.dat' export.HMM_intensity_export(ds2, ds.bm1_counts, HMM_folder, eid, get_pref_value, reg_list.value) print 'done'
def batch_export(): from Experiment import config dss = __DATASOURCE__.getSelectedDatasets() if dss is None or len(dss) == 0: print "Please select one or more files to export." return path = selectSaveFolder() if path == None: return fi = File(path) if not fi.exists(): if not fi.mkdir(): print "Error: failed to make directory: " + path return eid = int(get_pref_value(EXPERIMENT_ID_PNAME)) exp_folder = path + "/exp" + str(eid) fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print "Error: failed to make directory: " + exp_folder return HMM_folder = exp_folder + "/HMMfiles" fi = File(HMM_folder) if not fi.exists(): if not fi.mkdir(): print "Error: failed to make directory: " + HMM_folder return count = 0 for dinfo in dss: loc = dinfo.getLocation() ds = df[str(loc)] if len(__exc_masks__) > 0: res = copy(ds.get_reduced()) for mask in __exc_masks__: res[:, mask[2] : mask[3], mask[0] : mask[1]] = 0 else: res = ds.get_reduced() if len(__inc_masks__) > 0: r = dataset.instance(res.shape, dtype=int) for mask in __inc_masks__: r[:, mask[2] : mask[3], mask[0] : mask[1]] = res[:, mask[2] : mask[3], mask[0] : mask[1]] else: r = res data = r.sum(0) if not ds.axes is None and len(ds.axes) > 0: if not axis_lock.value: axis_name.value = ds.axes[0].name axis = ds[str(axis_name.value)] ds2 = Dataset(data, axes=[axis]) ds2.title = ds.title # count = int(fsn[3:10]) # new_fname = 'TAIPAN_exp' + ('%(value)04d' % {'value':eid}) + '_scan' + ('%(value)04d' % {'value':count}) + '.dat' export.HMM_intensity_export(ds2, ds.bm1_counts, HMM_folder, eid, get_prof_value, reg_list.value) print "done"
def checkDiscoveryResources(mapingFilesListFileName, userExtDir, localFramework, intermediatesDir): try: initialMappingFileNameList = [] mappingFileNameList = [] ## Get mapping file list mappingFilesListFile = File(mapingFilesListFileName) if mappingFilesListFile.exists() and mappingFilesListFile.canRead(): initialMappingFileNameList = getMappingFileNames(mapingFilesListFileName) if initialMappingFileNameList == None or len(initialMappingFileNameList) < 1: excInfo = ('No mapping files found in <%s>' % mapingFilesListFileName) localFramework.reportError(excInfo) logger.error(excInfo) return None else: excInfo = ('Error reading file <%s>' % mapingFilesListFileName) localFramework.reportError(excInfo) logger.error(excInfo) return None ## Make sure that at least one of the mapping files in the list above ## exists and is readable mappingFileExists = 'false' for mappingFileName in initialMappingFileNameList: mappingFileAbsolutePath = userExtDir + 'data\\' + mappingFileName + '.xml' mappingFile = File(mappingFileAbsolutePath) if mappingFile.exists() and mappingFile.canRead(): debugPrint(4, '[' + SCRIPT_NAME + ':checkDiscoveryResources] Mapping file <%s> found!' % mappingFileAbsolutePath) mappingFileExists = 'true' ## Add file with path to mappingFileNameList #mappingFileNameList.append(mappingFileAbsolutePath) mappingFileNameList.append(mappingFileName) else: logger.info('Mapping file <%s> NOT found!' % mappingFileAbsolutePath) if mappingFileExists == 'false': excInfo = 'Error reading mapping file(s)!' localFramework.reportError(excInfo) logger.warn(excInfo) return None ## Make sure intermediates directory exists and is writable intermediatesDirectory = File(intermediatesDir) if intermediatesDirectory.exists() and intermediatesDirectory.canRead() and intermediatesDirectory.canWrite(): debugPrint(5, '[' + SCRIPT_NAME + ':checkDiscoveryResources] Intermediates directory <%s> present and writable!' % intermediatesDir) ## Clean up intermediate XML directory ## TODO remove cleanUpDirectory(intermediatesDir) else: excInfo = ('Intermediates directory <%s> not found or is read-only' % intermediatesDir) localFramework.reportError(excInfo) logger.warn(excInfo) return None ## If we made it this far, all resources are good return mappingFileNameList except: excInfo = logger.prepareJythonStackTrace('') debugPrint('[' + SCRIPT_NAME + ':checkDiscoveryResources] Exception: <%s>' % excInfo) pass
def batch_export(): from Experiment import config dss = __get_selected_files__() if dss is None or len(dss) == 0: print 'Please select one or more files to export.' return path = selectSaveFolder() if path == None: return fi = File(path) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + path return eid = int(experiment_id.value) exp_folder = path + '/exp' + str(eid) fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return ILL_folder = exp_folder + '/ILLfiles' exp_folder += '/Datafiles' fi = File(exp_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + exp_folder return fi = File(ILL_folder) if not fi.exists(): if not fi.mkdir(): print 'Error: failed to make directory: ' + ILL_folder return count = 0 flist = [] for loc in dss: f = File(loc) fsn = f.getName() # count = int(fsn[3:10]) # new_fname = 'TAIPAN_exp' + ('%(value)04d' % {'value':eid}) + '_scan' + ('%(value)04d' % {'value':count}) + '.dat' df.datasets.clear() fn = export.graffiti_export(df, loc, exp_folder, eid, get_pref_value) flist.append(fn) df.datasets.clear() fn = export.ILL_export(df, loc, ILL_folder, eid, get_pref_value) flist.append(fn) if len(flist) > 0: zip_files(flist, 'TAIPAN_rd_' + str(int(time.time()))[2:] + '.zip') print 'done'
def updateSimsRunning(self): simsFinished = [] for sim in self.simsRunning: timeFile = File(self.myProject.getProjectMainDirectory(), "simulations/"+sim+"/time.dat") finishedFile = File(self.myProject.getProjectMainDirectory(), "simulations/"+sim+"/finished") #print "Checking file: "+timeFile.getAbsolutePath() +", exists: "+ str(timeFile.exists()) if (((self.simulator == "NEURON") and timeFile.exists()) or ((self.simulator == "GENESIS") and finishedFile.exists())): simsFinished.append(sim) if(len(simsFinished)>0): for sim in simsFinished: self.simsRunning.remove(sim)
def __export__(fn): if fn.__contains__('/'): items = fn.split('/') fn = items[-1] input_path = __data_folder__ + '/' + fn exp_id = get_prof_value('taipan.experiment.id') exp_folder = __export_folder__ + '/exp' + exp_id fi = File(exp_folder) if not fi.exists(): if not fi.mkdirs(): print 'Error: failed to make directory: ' + exp_folder return ILL_folder = exp_folder + '/ILLfiles' exp_folder += '/Datafiles' fi = File(exp_folder) if not fi.exists(): if not fi.mkdirs(): print 'Error: failed to make directory: ' + exp_folder return fi = File(ILL_folder) if not fi.exists(): if not fi.mkdirs(): print 'Error: failed to make directory: ' + ILL_folder return # df.datasets.clear() # export.graffiti_export(df, input_path, exp_folder, int(exp_id), get_prof_value) # df.datasets.clear() # export.ILL_export(df, input_path, ILL_folder, int(exp_id), get_prof_value) df.datasets.clear() try: export.graffiti_export(df, input_path, exp_folder, int(exp_id), get_prof_value) except: try: export.graffiti_export(df, input_path, exp_folder, int(exp_id), get_prof_value) except: traceback.print_exc() traceback.print_exc(file = __buffer_logger__) df.datasets.clear() try: export.ILL_export(df, input_path, ILL_folder, int(exp_id), get_prof_value) except: try: export.ILL_export(df, input_path, ILL_folder, int(exp_id), get_prof_value) except: traceback.print_exc() traceback.print_exc(file = __buffer_logger__)
def createFileChooserDialog(filters, filename, prefs, prefkey, multiselect): """ Creates a file chooser dialog that remembers its last directory. """ fileChooser = JFileChooser() # Add filters if not hasattr(filters, '__iter__'): filters = (filters,) if filters: for filter in filters: fileChooser.addChoosableFileFilter(filter) fileChooser.fileFilter = filters[0] # Enable/disable multiple file select fileChooser.setMultiSelectionEnabled(multiselect) # Restore the last directory if prefs and prefkey: defaultDirName = prefs.get(prefkey, None) if defaultDirName: defaultDirectory = File(defaultDirName) if defaultDirectory.exists(): fileChooser.currentDirectory = defaultDirectory # Preset the file name if filename: fileChooser.selectedFile = File(fileChooser.currentDirectory, filename) return fileChooser
def updateCmdForDeltaScanning(commandLine, Framework): originalScanFileFolderPath = CollectorsParameters.PROBE_MGR_INVENTORY_XMLENRICHER_FILES_FOLDER + XmlEnricherConstants.ORIGINAL_FOLDER_NAME originalScanFile = File(originalScanFileFolderPath, InventoryUtils.generateScanFileName(Framework)) if originalScanFile.exists(): scan = None try: try: buffer = jarray.zeros(0x24, 'b') fileSize = originalScanFile.length() if fileSize > 0x24: scan = RandomAccessFile(originalScanFile, "r") scan.readFully(buffer) if (buffer[0] == 0x1F) and ((buffer[1] & 0xFF) == 0x8B) and (buffer[2] == 0x08): scan.seek(fileSize - 8) scan.readFully(buffer, 0, 8) crc32 = getInt(buffer, 0) size = getInt(buffer, 4) deltaParams = ' -oldscanid:' + str(crc32) + ' -oldscansize:' + str(size) + ' ' index = String(commandLine).indexOf(ENTERPRISE_MODE) + String(ENTERPRISE_MODE).length() commandLine = commandLine[0:index] + deltaParams + commandLine[index + 1:] logger.debug('Scanner execution command updated to ', commandLine) except: logger.debugException("Failed to calculate CRC32 and size of zipped scan file " + originalScanFile.getAbsolutePath()) finally: if scan is not None: try: scan.close() except: pass return commandLine
def validateDirectory(Framework): exportDirectory = Framework.getTriggerCIData("Export Directory") if exportDirectory != None and exportDirectory != "": dir = File(exportDirectory) if dir.exists() and dir.isDirectory(): return 1 return 0
def load_from_file(file_package,expected_class): print("loading %s from %s" % (expected_class,file_package)) compiled_file = File(file_package.replace(".", "/") + "/" + expected_class + "$py.class") source_file = File(file_package.replace(".", "/") + "/" + expected_class + ".py") print("get request for controller %s. Compiled file outdated."%expected_class) if compiled_file.exists(): if compiled_file.lastModified()<source_file.lastModified(): print("get request for controller %s. Compiled file outdated."%expected_class) compiled_file.delete() else: print("get request for controller %s. Compiled file is up-to-date."%expected_class) else: print("get request for controller %s. Compiled file does not exists."%expected_class) py_mod = imp.load_source("module_"+expected_class, source_file.getAbsolutePath()) if hasattr(py_mod, expected_class): class_inst = getattr(py_mod,expected_class) else: class_inst =None print(class_inst.__doc__) print(class_inst.__name__) return class_inst
def access(path, mode): """access(path, mode) -> True if granted, False otherwise Use the real uid/gid to test for access to a path. Note that most operations will use the effective uid/gid, therefore this routine can be used in a suid/sgid environment to test if the invoking user has the specified access to the path. The mode argument can be F_OK to test existence, or the inclusive-OR of R_OK, W_OK, and X_OK. """ if not isinstance(mode, (int, long)): raise TypeError('an integer is required') f = File(sys.getPath(path)) result = True if not f.exists(): result = False if mode & R_OK and not f.canRead(): result = False if mode & W_OK and not f.canWrite(): result = False if mode & X_OK: # NOTE: always False without jna-posix stat try: result = (stat(path).st_mode & _stat.S_IEXEC) != 0 except OSError: result = False return result
def registerExtenderCallbacks(self, callbacks): self.autosave = None self.callbacks = callbacks if len(self.args) > 1 and not self.args[-1].startswith('-'): fname = self.args[1].strip("'") f = File(fname) if f.exists(): callbacks.issueAlert('Loading %s..' % fname) callbacks.restoreState(f) conf = callbacks.saveConfig() self.autosave = True if conf['suite.doAutoSave'] else False conf['suite.doAutoSave'] = 'false' active_type = conf['scanner.activecustomscopetype'] conf['scanner.activecustomscopetype'] = '0' callbacks.loadConfig(conf) print "Deactivate doAutoSave (%s -> false)" % (self.autosave,) print "Deactivate active scanner (%s -> 0)" % (active_type,) if self.autosave is None: conf = callbacks.saveConfig() print "Activate doAutoSave (%s -> true)" % (conf['suite.doAutoSave'],) conf['suite.doAutoSave'] = 'true' active_type = conf['scanner.activecustomscopetype'] conf['scanner.activecustomscopetype'] = '0' print "Deactivate active scanner (%s -> 0)" % (active_type,) callbacks.loadConfig(conf)
def setup_wls_cp(): home_dir=rb_config.getProperty('wls.oracle.home') wls_name=rb_config.getProperty('wls.name') if home_dir and wls_name: weblogic_loc=File(home_dir + '/' + wls_name) if weblogic_loc.exists(): return create_setenv(home_dir)
def stat(path): """stat(path) -> stat result Perform a stat system call on the given path. The Java stat implementation only returns a small subset of the standard fields: size, modification time and change time. """ abs_path = sys.getPath(path) try: return stat_result.from_jnastat(_posix.stat(abs_path)) except NotImplementedError: pass except: raise f = File(abs_path) if not f.exists(): raise OSError(errno.ENOENT, strerror(errno.ENOENT), path) size = f.length() mtime = f.lastModified() / 1000.0 mode = 0 if f.isDirectory(): mode = _stat.S_IFDIR elif f.isFile(): mode = _stat.S_IFREG if f.canRead(): mode = mode | _stat.S_IREAD if f.canWrite(): mode = mode | _stat.S_IWRITE return stat_result((mode, 0, 0, 0, 0, 0, size, mtime, mtime, 0))
def __getFile(self, packageDir, filename): file = File(packageDir, filename) if not file.exists(): out = FileOutputStream(file) IOUtils.copy(Services.getClass().getResourceAsStream("/workflows/" + filename), out) out.close() return file
def check(fn='test.txt'): f = File(fn) if not f.exists(): raise support.TestError('"%s" should exist' % fn) if not f.length(): raise support.TestError('"%s" should have contents' % fn) os.remove(fn)
def getWatcherConfig(self): json = JsonConfig() watcherPath = json.get("watcher/path", "${fascinator.home}/watcher)") configFile = File("%s/app/config.json" % watcherPath) if configFile.exists(): return JsonConfigHelper(configFile) return None
def __activate__(self, context): self.log = context["log"] self.request = context["request"] self.sessionState = context["sessionState"] self.sessionState.set("username","admin") processingSet = self.request.getParameter("processingSet") self.procMsg = None # read configuration and trigger processing stream sets # storing the return object on the map configFilePath = FascinatorHome.getPath("process")+"/processConfig.json" procConfigFile = File(configFilePath) if procConfigFile.exists() == True: self.dataMap = HashMap() self.dataMap.put("indexer", context['Services'].getIndexer()) self.procConfigJson = JsonSimple(procConfigFile) for configObj in self.procConfigJson.getJsonArray(): configJson = JsonSimple(configObj) procId = configJson.getString("", "id") if processingSet is not None: if procId == processingSet: self.execProcSet(procId, configJson) else: self.execProcSet(procId, configJson) if self.procMsg is None: self.procMsg = "Processing complete!" else: self.procMsg = "Configuration file does not exist: " + configFilePath
def DiscoveryMain(Framework): logger.info('Start Phase 1 ... Pull from ARIS') # Set global framework global theFramework theFramework = Framework ## Make sure we have an input data file from ARIS ARISfileName = Framework.getParameter('ARIS_XML_file') or None ARISfile = File(ARISfileName) if not (ARISfile and ARISfile.exists() and ARISfile.canRead()): excInfo = ('ARIS XML input file is not specified or is invalid!') Framework.reportError(excInfo) logger.error(excInfo) return None ## Check that the language parameter is set - default to US English requestedLocaleID = Framework.getParameter('ARISLocaleId') or '&LocaleId.USen;' if not requestedLocaleID: logger.warn('ARIS LocaleID parameter is not set...defaulting to US English') requestedLocaleID = '&LocaleId.USen;' # File and directory names userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\TQLExport\\ARIS\\' intermediatesDir = userExtDir + 'inter\\' mapingFilesListFileName = userExtDir + 'tqls.txt' mappingFileNameList = checkDiscoveryResources(mapingFilesListFileName, userExtDir, Framework, intermediatesDir) if not mappingFileNameList: return None ## Get attribute names from mapping file(s) ## This is a list of extended attributes to be retrieved from ARIS for mappingFileName in mappingFileNameList: (requestedSourceObjectTypeList, requestedSourceRelationshipTypeList) = getMapping(userExtDir + 'data\\' + mappingFileName + '.xml') if requestedSourceObjectTypeList and requestedSourceRelationshipTypeList: arisObjectMap = processARISXML(ARISfile, requestedSourceObjectTypeList, requestedSourceRelationshipTypeList, requestedLocaleID) intermediateXmlDoc = None if arisObjectMap: intermediateXmlDoc = buildIntermediateXML(arisObjectMap) intermediateXmlLocation = intermediatesDir + mappingFileName + '.xml' else: Framework.reportWarning('No CIs found in the ARIS XML file') if intermediateXmlDoc: try: xmlOutputter = XMLOutputter() xmlOutputter.output(intermediateXmlDoc, FileOutputStream(intermediateXmlLocation)) except: excInfo = logger.prepareJythonStackTrace('') Framework.reportError('Error writing intermediate file: <%s>' % intermediateXmlLocation) logger.warn('[' + SCRIPT_NAME + ':DiscoveryMain] Exception: <%s>' % excInfo) pass else: Framework.reportWarning('Error creating intermediate XML') else: logger.warn('[' + SCRIPT_NAME + ':DiscoveryMain] Unable to process mapping file: <%s>' % mappingFileName) Framework.reportError(' Unable to process mapping file: <%s>' % mappingFileName) logger.info('End Phase 1.... Pull from ARIS')
def getsize(path): f = File(path) size = f.length() # Sadly, if the returned length is zero, we don't really know if the file # is zero sized or does not exist. if size == 0 and not f.exists(): raise OSError(0, 'No such file or directory', path) return size
def __checkIfScriptFileIsValid(self, scriptFile): if scriptFile == "": result = "Invalid script file" self.throw_error("Invalid script file") script = File(scriptFile) if not script.exists(): result = "Script file is not exist" self.throw_error("Script file is not exist")
def canOpenFile(self): #HACK check if mimetypes match between index and real file #dcFormat = self.__json.get("response/docs/dc_format", "") #if dcFormat is not None: # dcFormat = dcFormat[1:-1] #return dcFormat == self.__mimeType f = File(self.getObject().getId()) return f.exists();
def getatime(path): # We can't detect access time so we return modification time. This # matches the behaviour in os.stat(). path = _tostr(path, "getatime") f = File(path) if not f.exists(): raise OSError(0, 'No such file or directory', path) return f.lastModified() / 1000.0
def getsize(path): path = _tostr(path, "getsize") f = File(path) size = f.length() # Sadly, if the returned length is zero, we don't really know if the file # is zero sized or does not exist. if size == 0 and not f.exists(): raise OSError(0, 'No such file or directory', path) return size
def mkjar(): jarfile = File(jardir, jarfn) # create a .jar file containing a .class file if not jarfile.exists(): support.compileJava("%s/%s/%s.java" % (jardir, package, clazz)) jarPacker = support.JarPacker(jarfile, bufsize=128) jarPacker.addFile(clazzfile, parentDirName=package) jarPacker.close() return jardir + '/' + jarfn, package, clazz
def getsize(path): path = _tostr(path, "getsize") f = File(sys.getPath(path)) size = f.length() # Sadly, if the returned length is zero, we don't really know if the file # is zero sized or does not exist. if size == 0 and not f.exists(): raise OSError(0, "No such file or directory", path) return size
def savePreviousArguments(managedServerName): from java.io import File from java.io import FileOutputStream from java.util import Properties from java.util import Date from java.text import SimpleDateFormat import string startToEdit() # parameter on the wsdl ant task call fileLocation = sys.argv[1].replace("\\", "/") print "The backup file location is" print fileLocation try: dateFormat = SimpleDateFormat('_d_MMM_yyyy_HH_mm_ss') date = Date() formattedDate = dateFormat.format(date) print formattedDate except: print "The date cannot be created/formatted" try: propsFile = File(fileLocation + managedServerName + formattedDate + "_config.bkp") print propsFile.exists() if (propsFile.exists() == 0): propsFile.createNewFile() except: print "The file cannot be created on:" print propsFile.getAbsoluteFile() dumpStack() previousProperties = Properties() print '===> Saving the previous arguments - ' + managedServerName cd('/Servers/' + managedServerName) print "Getting the Classpath" classPath = cmo.getServerStart().getClassPath() print classPath if classPath == None: classPath = "" previousProperties.setProperty("classPath", classPath) print "Saving Arguments to file" previousProperties.store(FileOutputStream(propsFile), None) print '===> Saved arguments! Please verify the file on:' + fileLocation + "in" + managedServerName
def recordDataReqForToMatrixScript(min, inc, max, exciterList, outputPath): outputFolder = File(outputPath) if not outputFolder.exists(): outputFolder.mkdir() # ensure folder for .txt file exists varFile = open(outputPath + "toMatrixVariables.txt", "w") varFile.write(str(min) + "\n") varFile.write(str(inc) + "\n") varFile.write(str(max) + "\n") varFile.write(str(exciterList.size())) varFile.close()
def updateSimsRunning(self): simsFinished = [] for sim in self.simsRunning: timeFile = File(self.myProject.getProjectMainDirectory(), "simulations/" + sim + "/time.dat") if (timeFile.exists()): simsFinished.append(sim) if(len(simsFinished) > 0): for sim in simsFinished: self.simsRunning.remove(sim)
def checkPullsims(simList, projectFileName): slashparts = projectFileName.split('/') basefolder = '/'.join(slashparts[:-1]) + '/' for sim in simList: projectFullDir = basefolder + "/simulations/" + sim pullFileName = projectFullDir + "/pullsim.sh" pullFile = File(basefolder, "simulations/" + sim + "/pullsim.sh") # and not self.tempSimConfig.getMpiConf().isRemotelyExecuted() if (pullFile.exists()): print "Warning, found file from a previous parallel execution - please delete:" + pullFileName
def updateSimsRunning(): for sim in allRunningSims: timeFile = File(project.getProjectMainDirectory(), "simulations/"+sim+"/time.dat") timeFile2 = File(project.getProjectMainDirectory(), "simulations/"+sim+"/time.txt") # for PSICS... print "Checking file: "+timeFile.getAbsolutePath() +", exists: "+ str(timeFile.exists()) if (timeFile.exists()): allFinishedSims.append(sim) allRunningSims.remove(sim) else: print "Checking file: "+timeFile2.getAbsolutePath() +", exists: "+ str(timeFile2.exists()) if (timeFile2.exists()): allFinishedSims.append(sim) allRunningSims.remove(sim) print "allFinishedSims: "+str(allFinishedSims) print "allRunningSims: "+str(allRunningSims)
def getUniqueValueFileByField(name, path, formatName): filename = str(name) + "." + formatName.lower() newpath = os.path.join(path, filename) f = File(newpath) i = 0 while f.exists(): nid = str(name) + str(i) + "." + formatName.lower() f = File(os.path.join(path, nid)) i += 1 return f
def savePreviousArguments(managedServerName): from java.io import File from java.io import FileOutputStream from java.util import Properties from java.util import Date from java.text import SimpleDateFormat import string startToEdit() # parameter on the wsdl ant task call fileLocation = sys.argv[1].replace("\\","/") print "The backup file location is" print fileLocation try: dateFormat = SimpleDateFormat('_d_MMM_yyyy_HH_mm_ss') date = Date() formattedDate = dateFormat.format(date) print formattedDate except: print "The date cannot be created/formatted" try: propsFile = File(fileLocation+ managedServerName + formattedDate+"_config.bkp"); print propsFile.exists() if(propsFile.exists() == 0): propsFile.createNewFile() except: print "The file cannot be created on:" print propsFile.getAbsoluteFile() dumpStack() previousProperties = Properties() print '===> Saving the previous arguments - ' + managedServerName cd('/Servers/'+managedServerName) print "Getting the VMArgs" vmArgs = cmo.getServerStart().getArguments() print vmArgs if vmArgs == None: vmArgs = "" previousProperties.setProperty("vmArgs", vmArgs) print "Saving Arguments to file" previousProperties.store(FileOutputStream(propsFile),None) print '===> Saved arguments! Please verify the file on:'+ fileLocation + "in" + managedServerName
def EzDirectoryOpenDialog(initialDirectory, stage=None): from javafx.stage import DirectoryChooser from java.io import File dlg = DirectoryChooser() if initialDirectory: f = File(initialDirectory) if f.exists() and f.isDirectory(): dlg.setInitialDirectory(f); dlg.setTitle("Select Folder"); return dlg.showDialog(stage)
def updateSimsRunning(): simsFinished = [] for sim in simsRunning: timeFile = File(myProject.getProjectMainDirectory(), "simulations/"+sim+"/time.dat") #print "Checking file: "+timeFile.getAbsolutePath() +", exists: "+ str(timeFile.exists()) if (timeFile.exists()): simsFinished.append(sim) if(len(simsFinished)>0): for sim in simsFinished: simsRunning.remove(sim)
def writeResponseToStatusResponseCache(self, jobId, jobStatus): curationStatusRespones = File(FascinatorHome.getPath() + "/curation-status-responses") if curationStatusRespones.exists(): FileUtils.forceMkdir(curationStatusRespones) FileUtils.writeStringToFile( File(curationStatusRespones.getPath() + "/" + Integer(jobId).toString() + ".json"), jobStatus.toString(True))
def empty_user(self): strRootDir = request.getServletContext().getRealPath("/") strUserHtmlDir = strRootDir + "html" + File.separator + "user" + File.separator dirFile = File(strUserHtmlDir) if dirFile.exists() == False or dirFile.isDirectory() == False: request.setAttribute("errorMessage", u"用户缓存文件夹不存在!") return FileUtils.deleteQuietly(dirFile) request.setAttribute("errorMessage", u"删除所有用户缓存完毕!")
def stat(path): """The Java stat implementation only returns a small subset of the standard fields""" f = File(path) size = f.length() # Sadly, if the returned length is zero, we don't really know if the file # is zero sized or does not exist. if size == 0 and not f.exists(): raise OSError(0, 'No such file or directory', path) mtime = f.lastModified() / 1000.0 return (0, 0, 0, 0, 0, 0, size, mtime, mtime, 0)
def CreateFromSample(): # this is "embedded" (and updated) with each version of Nodel sampleFile = File( os.path.join(Nodel.getHostPath(), '.nodel', 'webui_cache', 'index-sample.xml')) contentDir = File(_node.getRoot(), 'content') dstFile = File(contentDir, 'index.xml') if dstFile.exists(): return console.warn('index.xml file already exists!') if not contentDir.exists() and not contentDir.mkdirs(): return console.error('Could not create directory %s' % contentDir) Stream.writeFully(dstFile, Stream.readFully(sampleFile)) console.info( '"content/index.xml" created successfully from sample. Please edit to suit your needs. Restarting node...' ) _node.restart()
def rmdir(path): """rmdir(path) Remove a directory.""" f = File(sys.getPath(path)) if not f.exists(): raise OSError(errno.ENOENT, errno.strerror(errno.ENOENT), path) elif not f.isDirectory(): raise OSError(errno.ENOTDIR, errno.strerror(errno.ENOTDIR), path) elif not f.delete(): raise OSError(0, "couldn't delete directory", path)
def execute(self): if self.loginUser == None: return ActionResult.LOGIN accessControlService = __spring__.getBean("accessControlService") if accessControlService.isSystemAdmin(self.loginUser) == False: self.addActionError(u"没有管理站点配置的权限,只有超级管理员才能进行管理。") return ActionResult.ERROR strFile = request.getServletContext().getRealPath("/") strFile = strFile + "html" + File.separator strFile = URLDecoder.decode(strFile, "utf-8") file = File(strFile) if file.exists() == False: file.mkdirs() #创建学科导航 strFile = strFile + "updateinfo.htm" file = File(strFile) if request.getMethod() == "POST": params = ParamUtil(request) html = params.safeGetStringParam("updateInfo") fw = OutputStreamWriter(FileOutputStream(file), "utf-8") fw.flush() fw.write(html) fw.close() siteIndexPartService = __spring__.getBean("siteIndexPartService") if html == "": siteIndexPart = siteIndexPartService.getSiteIndexPartByModuleName( u"系统维护通知") if siteIndexPart != None: siteIndexPartService.deleteSiteIndexPart(siteIndexPart) else: siteIndexPart = siteIndexPartService.getSiteIndexPartByModuleName( u"系统维护通知") if siteIndexPart == None: siteIndexPart = SiteIndexPart() siteIndexPart.setModuleName(u"系统维护通知") siteIndexPart.setModuleZone(1) siteIndexPart.setModuleOrder(0) siteIndexPart.setModuleDisplay(1) siteIndexPart.setModuleHeight(0) siteIndexPart.setContent(html) siteIndexPart.setPartType(100) siteIndexPart.setShowType(0) siteIndexPart.setShowBorder(0) siteIndexPartService.saveOrUpdateSiteIndexPart(siteIndexPart) request.setAttribute("deleteCache", "1") else: html = CommonUtil.readFile(file.getCanonicalPath(), "UTF-8") request.setAttribute("deleteCache", "0") request.setAttribute("updateInfo", html) return "/WEB-INF/ftl/admin/admin_site_update.ftl"
def test_listdir(self): # It is hard to avoid Unicode paths on systems like OS X. Use # relative paths from a temp CWD to work around this with test_support.temp_cwd() as new_cwd: unicode_path = os.path.join(".", "unicode") self.assertIs(type(unicode_path), str) chinese_path = os.path.join(unicode_path, u"中文") self.assertIs(type(chinese_path), unicode) home_path = os.path.join(chinese_path, u"首页") os.makedirs(home_path) with open(os.path.join(home_path, "test.txt"), "w") as test_file: test_file.write("42\n") # Verify works with str paths, returning Unicode as necessary entries = os.listdir(unicode_path) self.assertIn(u"中文", entries) # Verify works with Unicode paths entries = os.listdir(chinese_path) self.assertIn(u"首页", entries) # glob.glob builds on os.listdir; note that we don't use # Unicode paths in the arg to glob self.assertEqual( glob.glob(os.path.join("unicode", "*")), [os.path.join(u"unicode", u"中文")]) self.assertEqual( glob.glob(os.path.join("unicode", "*", "*")), [os.path.join(u"unicode", u"中文", u"首页")]) self.assertEqual( glob.glob(os.path.join("unicode", "*", "*", "*")), [os.path.join(u"unicode", u"中文", u"首页", "test.txt")]) # Now use a Unicode path as well as in the glob arg self.assertEqual( glob.glob(os.path.join(u"unicode", "*")), [os.path.join(u"unicode", u"中文")]) self.assertEqual( glob.glob(os.path.join(u"unicode", "*", "*")), [os.path.join(u"unicode", u"中文", u"首页")]) self.assertEqual( glob.glob(os.path.join(u"unicode", "*", "*", "*")), [os.path.join(u"unicode", u"中文", u"首页", "test.txt")]) # Verify Java integration. But we will need to construct # an absolute path since chdir doesn't work with Java # (except for subprocesses, like below in test_env) for entry in entries: entry_path = os.path.join(new_cwd, chinese_path, entry) f = File(entry_path) self.assertTrue(f.exists(), "File %r (%r) should be testable for existence" % ( f, entry_path))
def EzFileDialog(initialFile, save, stage=None): from javafx.stage import FileChooser from java.io import File dlg = FileChooser() if initialFile: f = File(initialFile) if f.exists(): if f.isDirectory(): dlg.setInitialDirectory(f) if f.isFile(): dlg.setInitialFileName(f.getAbsolutePath()); dlg.setTitle("Select File"); if save: return dlg.showSaveDialog(stage); else: return dlg.showOpenDialog(stage);
def checkEnrichedFileExisted(Framework): localScanFileName = InventoryUtils.generateScanFileName(Framework) localScanFileSendingFolderPath = CollectorsParameters.PROBE_MGR_INVENTORY_XMLENRICHER_FILES_FOLDER + XmlEnricherConstants.SENDING_FOLDER_NAME targetScanFile = File(localScanFileSendingFolderPath, localScanFileName) if not targetScanFile.exists(): logger.debug('No processed scan file yet. XML-Enricher is still running.') Framework.reportError(inventoryerrorcodes.INVENTORY_DISCOVERY_ENRICHED_SCANFILE_NOTREADY, [localScanFileName]) Framework.setStepExecutionStatus(WorkflowStepStatus.FAILURE) else: logger.debug('find processed scan file, goto next step') Framework.setStepExecutionStatus(WorkflowStepStatus.SUCCESS)
def getVersions(self, storedObj): indF = File(storedObj.getPath() + "/Version_Index.json") versions = [] if indF.exists(): versions = JsonSimple(indF).getJsonArray() # reverse to the order to from latest to oldest r = JSONArray() for s in reversed(versions): r.add(s) versions = r return versions
def sanityCheckInstall(domainProperties): beahome = domainProperties.getProperty('wls.oracle.home') helper.printHeader('[VALIDATING] wls.oracle.home property') if beahome is None or len(beahome)==0: raise Exception("Required property wls.oracle.home does not exist.") else: homeDir = File(beahome) if not homeDir.exists(): log.error("Property wls.oracle.home refers to an installation directory " + beahome + " that does not exist.") sys.exit() else: log.debug('wls.oracle.home directory [' + str(beahome) + '] exists.') helper.printHeader('[VALIDATING] WebLogic directory property') wls = domainProperties.getProperty('wls.name') wlsDir = File(str(beahome) + File.separator + str(wls)) if not wlsDir.exists(): log.error('WebLogic directory does not exist in wls.oracle.home directory, please verify wls.name and wls.oracle.home property.') sys.exit() else: log.debug('WebLogic directory [' + str(wls) + '] exists in ' + beahome) helper.printHeader('[VALIDATING] wls.domain.javahome property') javahome = domainProperties.getProperty('wls.domain.javahome') javahomeDir = File(str(javahome)) if not javahomeDir.exists(): log.error('JAVA_HOME directory ' + str(javahomeDir) + ' does not exist, please verify wls.domain.javahome property.') sys.exit() else: log.debug('JAVA_HOME directory [' + str(javahome) + '] exists.') validTemplates=validateTemplates(domainProperties) if not validTemplates: sys.exit()
def forAllMuscleExcitmentConfig(exciterList, tongueNodeList, min, cur, increment, max, excitePath, asciiOutputStartDir, wpOutputStartDir, makeAsciiAlso): if cur == exciterList.size() - 1: for i in CLessEqualLoop(min, max, increment): exciterList[cur].setExcitation(i) lastExciteValueBeforeRun = exciterList[cur].getExcitation() if not doSimResultsAlreadyExist(makeAsciiAlso, asciiOutputStartDir, wpOutputStartDir, excitePath, lastExciteValueBeforeRun): origExciterLevels = [] for excitIdx in range(0, exciterList.size( )): # save excitation levels before run() & reset() origExciterLevels.append( exciterList[excitIdx].getExcitation()) run(lengthPerSimulation) waitForStop() if Main.getRootModel().getMonitors().get( "RunUntilSettled").isSettled( ) or hasReachedMaxDuration(): System.out.println( "\nScript :: recording results now because tongue has settled or reached max duration.\n" ) recordResults(tongueNodeList, asciiOutputStartDir, wpOutputStartDir, excitePath, lastExciteValueBeforeRun, makeAsciiAlso) else: System.out.println( "\nScript :: exception detected. Will not record results.\n" ) reset() for excitIdx in range(0, exciterList.size( )): # restore excitation levels after reset() exciterList[excitIdx].setExcitation( origExciterLevels[excitIdx]) else: for i in CLessEqualLoop( min, max, increment): # for THIS excitation slider, iterate through exciterList[cur].setExcitation(i) if makeAsciiAlso: asciiNextFolder = File(asciiOutputStartDir + excitePath + str(exciterList[cur].getExcitation())) if not asciiNextFolder.exists(): asciiNextFolder.mkdir() forAllMuscleExcitmentConfig( exciterList, tongueNodeList, min, cur + 1, increment, max, excitePath + str(exciterList[cur].getExcitation()) + "/", asciiOutputStartDir, wpOutputStartDir, makeAsciiAlso) #recursively handle others
def updateSimsRunning(): simsFinished = [] for sim in simsRunning: timeFile = File(myProject.getProjectMainDirectory(), "simulations/" + sim + "/time.dat") #print "Checking file: "+timeFile.getAbsolutePath() +", exists: "+ str(timeFile.exists()) if (timeFile.exists()): simsFinished.append(sim) if (len(simsFinished) > 0): for sim in simsFinished: simsRunning.remove(sim)
class HomesData: def __init__(self): pass def __activate__(self, context): self.velocityContext = context self.request = context["request"] self.response = context["response"] self.sessionState = context["sessionState"] self.errorMsg = "" action = self.request.getParameter("action") homeConfigFilePath = FascinatorHome.getPath( "process") + "/notification/homeInstitutions.json" self.homeConfigFile = File(homeConfigFilePath) if self.homeConfigFile.exists() == False: self.errorMsg = "Configuration path does not exist: %s" % homeConfigFilePath return if action is None or action == "list": self.listHomes() else: self.errorMsg = "Invalid action." def getErrorMsg(self): return self.errorMsg def listHomes(self): term = self.request.getParameter("term") writer = self.response.getPrintWriter( "application/json; charset=UTF-8") homeJsonBlock = JsonSimple(self.homeConfigFile) writer.println("[") count = 0 for homeObj in homeJsonBlock.getArray("institutions"): if term is not None: if homeObj.get("name").lower().find(term.lower()) > -1: count = count + 1 self.printHome(writer, homeObj, count) else: count = count + 1 self.printHome(writer, homeObj, count) writer.println("]") writer.close() def printHome(self, writer, homeObj, count): if count > 1: writer.print(",") writer.println("\"%s\"" % homeObj.get("name"))
def process(self, dataSource, progressBar): # Set the ogress bar to an Indeterminate state for now progressBar.switchToIndeterminate() # Return if we're not running on a windows sytem if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # Save our output to a file in the reports folder # named based on EXE and data source ID reportFile = File(Case.getCurrentCase().getCaseDirectory() + "\\Reports" + "\\img_stat-" + str(dataSource.getId()) + ".txt") # Run the EXE, saving output to the report # Check if the ingest is terminated and # delete the incomplete report file self.log(Level.INFO, "Running program on data source") cmd = ArrayList() cmd.add(self.pathToEXE.toString()) cmd.add(imagePaths[0]) processBuilder = ProcessBuilder(cmd) processBuilder.redirectOutput(reportFile) ExecUtil.execute(processBuilder, DataSourceIngestModuleProcessTerminator(self.context)) # Add the report to the case, so it shows up in the tree if not self.context.dataSourceIngestIsCancelled(): Case.getCurrentCase().addReport(reportFile.toString(), "Run EXE", "img_stat output") else: if reportFile.exists(): if not reportFile.delete(): self.log(LEVEL.warning, "Error deleting the incomplete report file") return IngestModule.ProcessResult.OK
def shouldInstallScanner(scannerPlatformConfig, Framework, shell): shouldInstall = 0 #staring to check scanner version on remote machine isUpgradeAllowed = Boolean.parseBoolean(Framework.getParameter('IsScannerUpgradeAllowed')) logger.debug('Parameter isUpgradeAllowed:', isUpgradeAllowed) IsDowngradeAllowed = Boolean.parseBoolean(Framework.getParameter('IsScannerDowngradeAllowed')) logger.debug('Parameter IsDowngradeAllowed:', IsDowngradeAllowed) if isUpgradeAllowed and IsDowngradeAllowed: logger.debug('Upgrade and Downgrade allowed, installing scanner in any case') shouldInstall = 1 else: remoteScannerVersion = Framework.getDestinationAttribute('scannerVersion') if (remoteScannerVersion is None) or (len(str(remoteScannerVersion)) == 0) or (str(remoteScannerVersion) == 'NA'): logger.debug('Remote scanner version is unavailable, going to execute scanner upgrade') shouldInstall = 1 else: logger.debug('Scanner already found on remote machine') installerFileName = scannerPlatformConfig.getScannerExecutable() installerVersioninstallerXmlFilePath = CollectorsParameters.PROBE_MGR_RESOURCES_DIR + 'ud_scanners' + str(File.separator) + installerFileName + '-version.xml' logger.debug('Checking installer version in file ', installerVersioninstallerXmlFilePath) installerXmlFile = File(installerVersioninstallerXmlFilePath) if installerXmlFile.exists() and installerXmlFile.isFile(): installerVersion = getInstallerVersion(installerXmlFile, Framework) logger.debug('Current scanner version ', installerVersion) m = re.search('([\d\.]+) build ([\d]+)', remoteScannerVersion) if m: remoteScannerVersion = m.group(1)+'.'+m.group(2) logger.debug('Remote scanner version ', remoteScannerVersion) if compareVersions(installerVersion, remoteScannerVersion) > 0: if isUpgradeAllowed: logger.debug('Upgrade should be perfomed') shouldInstall = 1 else: logger.debug('Upgrade is not allowed') elif compareVersions(installerVersion, remoteScannerVersion) < 0: if IsDowngradeAllowed: logger.debug('Downgrade should be perfomed') shouldInstall = 1 else: logger.debug('Downgrade is not allowed') else: logger.debug('Scanner should be installed') shouldInstall = 1 else: if isUpgradeAllowed: logger.debug('Going to upgrade scanner, version file not exists:', installerVersioninstallerXmlFilePath) shouldInstall = 1 return shouldInstall
def update_buffer_log_folder(): global __buffer_log_file__, __export_folder__, __buffer_logger__, __history_log_file__, __history_logger__ __buffer_log_file__ = __export_folder__ fi = File(__buffer_log_file__) if not fi.exists(): if not fi.mkdirs(): print 'Error: failed to make directory: ' + __buffer_log_file__ __history_log_file__ = __buffer_log_file__ + '/History.txt' __buffer_log_file__ += '/LogFile.txt' if __buffer_logger__: __buffer_logger__.close() __buffer_logger__ = open(__buffer_log_file__, 'a') if __history_logger__: __history_logger__.close() __history_logger__ = open(__history_log_file__, 'a')
def process(self, dataSource, progressBar): # we don't know how much work there will be progressBar.switchToIndeterminate() # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # We'll save our output to a file in the reports folder, named based on EXE and data source ID reportFile = File(Case.getCurrentCase().getCaseDirectory() + "\\Reports" + "\\img_stat-" + str(dataSource.getId()) + ".txt") # Run the EXE, saving output to the report # Check if the ingest is terminated and delete the incomplete report file # Do not add report to the case tree if the ingest is cancelled before finish. # This can be done by using IngestJobContext.dataSourceIngestIsCancelled # See: http://sleuthkit.org/autopsy/docs/api-docs/4.7.0/_ingest_job_context_8java.html self.log(Level.INFO, "Running program on data source") cmd = ArrayList() cmd.add(self.pathToEXE.toString()) cmd.add(imagePaths[0]) processBuilder = ProcessBuilder(cmd) processBuilder.redirectOutput(reportFile) ExecUtil.execute(processBuilder, DataSourceIngestModuleProcessTerminator(self.context)) # Add the report to the case, so it shows up in the tree if not self.context.dataSourceIngestIsCancelled(): Case.getCurrentCase().addReport(reportFile.toString(), "Run EXE", "img_stat output") else: if reportFile.exists(): if not reportFile.delete(): self.log(LEVEL.warning, "Error deleting the incomplete report file") return IngestModule.ProcessResult.OK
def validateFilestoresProperty(domainProperties): error = 0 filestores = domainProperties.getProperty('persistent.filestores') if not filestores is None and len(filestores)>0: filestoreList = filestores.split(',') for filestore in filestoreList: helper.printHeader('[VALIDATING] filestore ' + str(filestore) + ' properties') filestoreName = domainProperties.getProperty('persistent.filestore.' + str(filestore) + '.Name') if filestoreName is None or len(filestoreName)==0: error = 1 log.error('Please verify persistent.filestore.' + str(filestore) + '.Name property if it exists in configuration.') else: log.debug('Filestore [' + str(filestore) + '] name property [' + str(filestoreName) + '] is valid.') filestoreTarget = domainProperties.getProperty('persistent.filestore.' + str(filestore) + '.Target') if not filestoreTarget is None and len(filestoreTarget)>0: servers = domainProperties.getProperty('wls.servers') if not servers is None and len(servers)>0: serverList = servers.split(',') exist = 0 for server in serverList: if server==filestoreTarget: exist = 1 break if not exist: error = 1 log.error('persistent.filestore.' + str(filestore) + '.Target property refers to server that does not exist within wls.servers property.') else: log.debug('Filestore [' + str(filestore) + '] target property [' + str(filestoreTarget) + '] is valid.') filestoreMigratable = domainProperties.getProperty('persistent.filestore.' + str(filestore) + '.Migratable') if not filestoreMigratable is None and len(filestoreMigratable)>0: if not filestoreMigratable.upper()=='TRUE' and not filestoreMigratable.upper()=='FALSE': error = 1 log.error('The persistent.filestore.' + str(filestore) + '.Migratable property supports only [true,false, or leave blank to use default].') else: log.debug('Filestore [' + str(filestore) + '] migratable property [' + str(filestoreMigratable) + '] is valid.') location = domainProperties.getProperty('persistent.filestore.' + str(filestore) + '.Location') if not location is None and len(location)>0: file = File(location) if file.isAbsolute(): if not file.exists(): log.debug('[NOTE] Please make sure the user running this script has permission to create directory and file [' + str(location) + '].') return error
def stat(path): """stat(path) -> stat result Perform a stat system call on the given path. The Java stat implementation only returns a small subset of the standard fields: size, modification time and change time. """ f = File(path) size = f.length() # Sadly, if the returned length is zero, we don't really know if the file # is zero sized or does not exist. if size == 0 and not f.exists(): raise OSError(0, 'No such file or directory', path) mtime = f.lastModified() / 1000.0 return stat_result((0, 0, 0, 0, 0, 0, size, mtime, mtime, 0))
def __get_manifest(self, source_path, from_archive): """ Returns the manifest object for the specified path. The source path may be a jar, or an exploded path. :param source_path: the source path to be checked :param from_archive: if True, use the manifest from the archive, otherwise from the file system :return: the manifest, or None if it is not present :raises: IOException: if there are problems reading an existing manifest """ manifest = None if string_utils.is_empty(source_path): return manifest source_path = self.model_context.replace_token_string(source_path) if from_archive and deployer_utils.is_path_into_archive(source_path): return self.archive_helper.get_manifest(source_path) else: if not os.path.isabs(source_path): # if this was in archive, it has been expanded under domain home. # or it may be a relative file intentionally placed under domain home. source_file = File(File(self.model_context.get_domain_home()), source_path) else: source_file = File(source_path) if source_file.isDirectory(): # read the manifest directly from the file system manifest_file = File(source_file, MANIFEST_NAME) if manifest_file.exists(): stream = None try: stream = FileInputStream(manifest_file) manifest = Manifest(stream) finally: if stream is not None: try: stream.close() except IOException: # nothing to report pass else: # read the manifest from the deployable ear/jar/war on the file system archive = JarFile(source_file.getAbsolutePath()) manifest = archive.getManifest() return manifest
def valueChanged(self, controller, newValue): global __file_to_add__ newCount = int(newValue.getStringData()) if newCount != self.saveCount: self.saveCount = newCount try: checkFile = File(__file_name_node__.getValue().getStringData()) checkFile = File(__data_folder__ + "/" + checkFile.getName()) __file_to_add__ = checkFile.getAbsolutePath() if not checkFile.exists(): print "The target file :" + __file_to_add__ + " can not be found" return runnable = __Display_Runnable__() runnable.run = add_dataset Display.getDefault().asyncExec(runnable) except: print 'failed to add dataset ' + __file_to_add__
def _loadFontsFromFileSystem(): gEnv = GraphicsEnvironment.getLocalGraphicsEnvironment() for fontsDir in _localFontDirectories: for dirpath, dirnames, filenames in os.walk(fontsDir): for filename in filenames: if filename.lower().endswith('.ttf'): p = os.path.join(dirpath, filename) # Load the font from a file fontFile = File(p) if not fontFile.exists(): raise RuntimeError, 'Could not get font file for {0}'.format( p) else: font = Font.createFont(Font.TRUETYPE_FONT, fontFile) gEnv.registerFont(font)
def deleteDirectory(self, sPath): # 如果sPath不以文件分隔符结尾,自动添加文件分隔符 dirFile = File(sPath) # 如果dir对应的文件不存在,或者不是一个目录,则退出 if dirFile.exists() == False or dirFile.isDirectory() == False: return # 删除文件夹下的所有文件(包括子目录) files = dirFile.listFiles() for f in files: # 删除子文件 if f.isFile(): f.delete() else: self.deleteDirectory(f.getAbsolutePath()) #/删除当前目录 dirFile.delete() dirFile = None