def syncNmapPortConfigFile(agentPath): ''' Sync nmap port config with global probe's "port number to port name" mapping ''' logger.debug('synchronizing nmap port config file') portConfigFilename = agentPath + CollectorsParameters.getDiscoveryConfigFolder( ) + CollectorsParameters.FILE_SEPARATOR + 'portNumberToPortName.xml' mamservice = File(portConfigFilename) nmapservice = File(agentPath + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'nmap-services') if nmapservice.lastModified() > mamservice.lastModified(): return nmapFile = FileOutputStream(nmapservice) document = SAXBuilder(0).build(mamservice) # document = parse(portConfigFilename) ports = XmlWrapper(document.getRootElement().getChildren('portInfo')) for port in ports: if int(port.getAttributeValue("discover")): portNumber = port.getAttributeValue("portNumber") portName = port.getAttributeValue("portName") portProtocol = port.getAttributeValue("portProtocol") nmapFile.write("%s\t%s/%s\r\n" % (portName, portNumber, portProtocol)) nmapFile.close()
def load_from_file(file_package,expected_class): print("loading %s from %s" % (expected_class,file_package)) compiled_file = File(file_package.replace(".", "/") + "/" + expected_class + "$py.class") source_file = File(file_package.replace(".", "/") + "/" + expected_class + ".py") print("get request for controller %s. Compiled file outdated."%expected_class) if compiled_file.exists(): if compiled_file.lastModified()<source_file.lastModified(): print("get request for controller %s. Compiled file outdated."%expected_class) compiled_file.delete() else: print("get request for controller %s. Compiled file is up-to-date."%expected_class) else: print("get request for controller %s. Compiled file does not exists."%expected_class) py_mod = imp.load_source("module_"+expected_class, source_file.getAbsolutePath()) if hasattr(py_mod, expected_class): class_inst = getattr(py_mod,expected_class) else: class_inst =None print(class_inst.__doc__) print(class_inst.__name__) return class_inst
def stat(path): """stat(path) -> stat result Perform a stat system call on the given path. The Java stat implementation only returns a small subset of the standard fields: size, modification time and change time. """ abs_path = sys.getPath(path) try: return stat_result.from_jnastat(_posix.stat(abs_path)) except NotImplementedError: pass except: raise f = File(abs_path) if not f.exists(): raise OSError(errno.ENOENT, strerror(errno.ENOENT), path) size = f.length() mtime = f.lastModified() / 1000.0 mode = 0 if f.isDirectory(): mode = _stat.S_IFDIR elif f.isFile(): mode = _stat.S_IFREG if f.canRead(): mode = mode | _stat.S_IREAD if f.canWrite(): mode = mode | _stat.S_IWRITE return stat_result((mode, 0, 0, 0, 0, 0, size, mtime, mtime, 0))
def getListOfAvailableFLVs(self): """Return list of .flv files that can be streamed.""" scope = Red5.getConnectionLocal().getScope() serverRoot = System.getProperty('red5.root') filesMap = HashMap() try: print 'Getting the FLV files' flvs = scope.getResources("streams/*.flv") for file in flvs: fso = File(serverRoot + '/webapps/oflaDemo' + file.path) flvName = fso.getName() flvBytes = 0 if hasattr(fso, 'length'): flvBytes = fso.length() else: print 'Length not found' lastMod = '0' if hasattr(fso, 'lastModified'): lastMod = self.formatDate(Date(fso.lastModified())) else: log.debug('Last modified not found') print 'FLV Name:', flvName print 'Last modified date:', lastMod print 'Size:', flvBytes print '-------' fileInfo = HashMap(3) fileInfo["name"] = flvName fileInfo["lastModified"] = lastMod fileInfo["size"] = flvBytes filesMap[flvName] = fileInfo except Exception, e: print 'Error in getListOfAvailableFLVs:', e
def getListOfAvailableFLVs(self): """Return list of .flv files that can be streamed.""" scope = Red5.getConnectionLocal().getScope() serverRoot = System.getProperty('red5.root') filesMap = HashMap() try: print 'Getting the FLV files' flvs = scope.getResources("streams/*.flv") for file in flvs: fso = File(serverRoot + '/webapps/oflaDemo' + file.path) flvName = fso.getName() flvBytes = 0 if hasattr(fso, 'length'): flvBytes = fso.length() else: print 'Length not found' lastMod = '0' if hasattr(fso, 'lastModified'): lastMod = self.formatDate(Date(fso.lastModified())) else: log.debug('Last modified not found') print 'FLV Name:', flvName print 'Last modified date:', lastMod print 'Size:', flvBytes print '-------' fileInfo = HashMap(3); fileInfo["name"] = flvName fileInfo["lastModified"] = lastMod fileInfo["size"] = flvBytes filesMap[flvName] = fileInfo except Exception, e: print 'Error in getListOfAvailableFLVs:', e
def stat(path): """stat(path) -> stat result Perform a stat system call on the given path. The Java stat implementation only returns a small subset of the standard fields: size, modification time and change time. """ abs_path = sys.getPath(path) try: return stat_result.from_jnastat(_posix.stat(abs_path)) except NotImplementedError: pass except: raise f = File(abs_path) if not f.exists(): raise OSError(errno.ENOENT, errno.strerror(errno.ENOENT), path) size = f.length() mtime = f.lastModified() / 1000.0 mode = 0 if f.isDirectory(): mode = _stat.S_IFDIR elif f.isFile(): mode = _stat.S_IFREG if f.canRead(): mode = mode | _stat.S_IREAD if f.canWrite(): mode = mode | _stat.S_IWRITE return stat_result((mode, 0, 0, 0, 0, 0, size, mtime, mtime, 0))
def getatime(path): # We can't detect access time so we return modification time. This # matches the behaviour in os.stat(). path = _tostr(path, "getatime") f = File(sys.getPath(path)) if not f.exists(): raise OSError(0, 'No such file or directory', path) return f.lastModified() / 1000.0
def getatime(path): # We can't detect access time so we return modification time. This # matches the behaviour in os.stat(). path = _tostr(path, "getatime") f = File(path) if not f.exists(): raise OSError(0, 'No such file or directory', path) return f.lastModified() / 1000.0
def load_object(self, path, callable_name): try: app_ns = {} ; execfile(path, app_ns) app_callable = app_ns[callable_name] f = File(path) self.cache[ (path, callable_name) ] = (app_callable, f.lastModified()) return app_callable except IOError, ioe: self.raise_exc(ApplicationNotFound, "Application filename not found: %s" % path)
def stat(path): """The Java stat implementation only returns a small subset of the standard fields""" f = File(path) size = f.length() # Sadly, if the returned length is zero, we don't really know if the file # is zero sized or does not exist. if size == 0 and not f.exists(): raise OSError(0, 'No such file or directory', path) mtime = f.lastModified() / 1000.0 return (0, 0, 0, 0, 0, 0, size, mtime, mtime, 0)
def syncNmapPortConfigFile(agentPath): ''' Sync nmap port config with global probe's "port number to port name" mapping ''' logger.debug('synchronizing nmap port config file') portConfigFilename = agentPath + CollectorsParameters.getDiscoveryConfigFolder() + CollectorsParameters.FILE_SEPARATOR + 'portNumberToPortName.xml' mamservice = File(portConfigFilename) nmapservice = File(agentPath + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'nmap-services') if nmapservice.lastModified() > mamservice.lastModified(): return nmapFile = FileOutputStream(nmapservice) document = SAXBuilder(0).build(mamservice) # document = parse(portConfigFilename) ports = XmlWrapper(document.getRootElement().getChildren('portInfo')) for port in ports: if int(port.getAttributeValue("discover")): portNumber = port.getAttributeValue("portNumber") portName = port.getAttributeValue("portName") portProtocol = port.getAttributeValue("portProtocol") nmapFile.write("%s\t%s/%s\r\n" % (portName, portNumber, portProtocol)) nmapFile.close()
def load_object(self, path, callable_name): try: app_ns = {} ; exec(compile(open(path).read(), path, 'exec'), app_ns) app_callable = app_ns[callable_name] f = File(path) self.cache[ (path, callable_name) ] = (app_callable, f.lastModified()) return app_callable except IOError as ioe: self.raise_exc(ApplicationNotFound, "Application filename not found: %s" % path) except KeyError as k: self.raise_exc(NoCallable, "No callable named '%s' in %s" % (callable_name, path)) except Exception as x: self.raise_exc(NoCallable, "Error loading jython callable '%s': %s" % (callable_name, str(x)) )
def stat(path): """stat(path) -> stat result Perform a stat system call on the given path. The Java stat implementation only returns a small subset of the standard fields: size, modification time and change time. """ f = File(path) size = f.length() # Sadly, if the returned length is zero, we don't really know if the file # is zero sized or does not exist. if size == 0 and not f.exists(): raise OSError(0, 'No such file or directory', path) mtime = f.lastModified() / 1000.0 return stat_result((0, 0, 0, 0, 0, 0, size, mtime, mtime, 0))
def get_app_object_old_style(self, req, environ): source_uri, callable_name = self.map_uri(req, environ) source_filename = source_uri if not self.params['cache_callables']: self.log.debug("Caching of callables disabled") return self.load_object(source_filename, callable_name) if not self.cache.has_key( (source_filename, callable_name) ): self.log.debug("Callable object not in cache: %s#%s" % (source_filename, callable_name) ) return self.load_object(source_filename, callable_name) app_callable, last_mod = self.cache.get( (source_filename, callable_name) ) self.log.debug("Callable object was in cache: %s#%s" % (source_filename, callable_name) ) if self.params['reload_on_mod']: f = File(source_filename) if f.lastModified() > last_mod: self.log.info("Source file '%s' has been modified: reloading" % source_filename) return self.load_object(source_filename, callable_name) return app_callable
def load_object(self, path, callable_name): try: app_ns = {} exec(compile(open(path).read(), path, 'exec'), app_ns) app_callable = app_ns[callable_name] f = File(path) self.cache[(path, callable_name)] = (app_callable, f.lastModified()) return app_callable except IOError as ioe: self.raise_exc(ApplicationNotFound, "Application filename not found: %s" % path) except KeyError as k: self.raise_exc( NoCallable, "No callable named '%s' in %s" % (callable_name, path)) except Exception as x: self.raise_exc( NoCallable, "Error loading jython callable '%s': %s" % (callable_name, str(x)))
def collectJDBCSQL(): global wlsInstance global wlsVersion global rptStart global rptEnd try: wlsVersion = version info('Weblogic Version:', wlsVersion) info('Note:', 'You must run this script on the Weblogic Managed Server which you connect.') info('', '') connect() edit() startEdit() serverNames=cmo.getServers() allServers = [] for name in serverNames: curServerName = name.getName() allServers.append(curServerName) #allServers.append('agile-server2') #allServers.append('agile-server3') #allServers.append('agile-server4') info('Find following Weblogic instance(s):', len(allServers)) info('', '') for i in range(len(allServers)): srv = allServers[i] info(' ' + str(i) + ':', srv) info('', '') info('Type the number to select the correct Weblogic instance to connect, or type x to exit.', '') user_sel = '' while user_sel == '': user_sel = raw_input(__prompt_cur + ' Your choice: ') if user_sel.lower()=='x': save() activate() disconnect() info('User quits.', 'Bye') exit() wlsInstance = allServers[int(user_sel)] cd('/Servers/'+ wlsInstance + '/Log/' + wlsInstance) #ls() sqlLogFile = get('FileName') info('Get log file:', sqlLogFile) sqlLogOrigSize = get('FileMinSize') info('Get log size:', str(sqlLogOrigSize)) logDTFormatStr = get('DateFormatPattern') info('Get log date format:', str(logDTFormatStr)) set('FileMinSize', __fileSize) info('Set log size:', str(__fileSize)) set('DateFormatPattern', __dt_format) info('Set log date format:', __dt_format) cd('/Servers/' + wlsInstance + '/ServerDebug/' + wlsInstance) set('DebugJDBCSQL','true') info('Set DebugJDBCSQL:', 'true') save() activate() sqlLogFilePath = os.getcwd() + '/../servers/' + wlsInstance + '/' + sqlLogFile rptStart = dfLogTimestamp.format(Date()) info('It is collecting SQL data. Press Enter after collected.', '') raw_input(__prompt_cur + ' ') dtRpt = Date() rptEnd = dfLogTimestamp.format(dtRpt) ## info(__script_name + ' is waiting for Weblogic to flush log, please hold on...', '') #pytime.sleep(__sleep) jfile = File(sqlLogFilePath) showProgress(__prompt_cur + ' ') while True: jfmodifiedUnix = jfile.lastModified() rpt_endtime_unix = dtRpt.getTime() dtCurrent = Date() if (flushed(rpt_endtime_unix, jfmodifiedUnix)): break showProgress('.') Thread.sleep(__sleep * 1000) showProgress('\n') sqlLogFilePathCopy = sqlLogFilePath + '.' + __script_name shutil.copyfile(sqlLogFilePath, sqlLogFilePathCopy) # copy jdbc log file info('Copy ' + sqlLogFile + ' to', sqlLogFilePathCopy) ## ## revert back to original setting edit() startEdit() info('Get server:', wlsInstance) cd('/Servers/'+ wlsInstance + '/Log/' + wlsInstance) set('FileMinSize', sqlLogOrigSize) info('Reset log size:', str(sqlLogOrigSize)) set('DateFormatPattern', logDTFormatStr) info('Reset log date format:', str(logDTFormatStr)) cd('/Servers/' + wlsInstance + '/ServerDebug/' + wlsInstance) set('DebugJDBCSQL','false') info('Reset DebugJDBCSQL:', 'false') save() activate() disconnect() return sqlLogFilePathCopy #rpt_endtime = pytime.strftime(__dt_format, pytime.localtime()) except Exception: save() activate() disconnect() raise
def getmtime(path): path = _tostr(path, "getmtime") f = File(path) if not f.exists(): raise OSError(0, 'No such file or directory', path) return f.lastModified() / 1000.0
print("Importing useful Java Classes...") from java.io import File #%% print("Importing useful Investment Reports Classes...") from com.moneydance.modules.features.invextension import ReportConfig from com.moneydance.modules.features.invextension import BulkSecInfo from com.moneydance.modules.features.invextension import TotalFromToReport from com.moneydance.modules.features.invextension import TransactionValues #%% print("prove moneydance data file exists, load it into java File object") mdTestFolder = "resources\\testMD02.moneydance" print("Moneydance Data File exists? {0}, in {1}".format( os.path.exists(mdTestFolder), mdTestFolder)) mdFileJava = File(mdTestFolder) last_modded_long = mdFileJava.lastModified() # type is java class 'JLong' last_modded_ts = pd.Timestamp(int(last_modded_long) / 1000, unit='s', tz='US/Central') print("data folder last modified: {0}".format(last_modded_ts.isoformat())) #%% print("now get AccountBookWrapper, accountBook, and rootAccount") wrapper = AccountBookWrapper.wrapperForFolder( mdFileJava) # wrapper is of java type 'AccountBookWrapper' wrapper.loadDataModel(None) accountBook = wrapper.getBook() root_account = accountBook.getRootAccount() #%% print( "call up a Report Configuration object from investment reports suitable for testing" )
def getatime(path): # We can't detect access time so we return modification time. This # matches the behaviour in os.stat(). path = _tostr(path, "getatime") f = File(path) return f.lastModified() / 1000.0
def getmtime(path): path = _tostr(path, "getmtime") f = File(path) return f.lastModified() / 1000.0
def getmtime(path): path = _tostr(path, "getmtime") f = File(sys.getPath(path)) if not f.exists(): raise OSError(0, 'No such file or directory', path) return f.lastModified() / 1000.0
# from com.infinitekind.moneydance.model import CurrencyUtil # from com.infinitekind.moneydance.model import MoneydanceSyncableItem print("Importing useful Java Classes...") from java.io import File # from java.lang import System print("prove moneydance data file exists, load it into java File object") print("Moneydance Data File exists? {0}, in {1}".format( os.path.exists(mdDataFolder), mdDataFolder)) if not os.path.exists(mdDataFolder): raise Exception("!!!! ERROR: Datafile: %s does NOT exist! Aborting...." % (mdDataFolder)) mdFileJava = File(mdDataFolder) last_modded_long = mdFileJava.lastModified() print("data folder last modified: %s" % (last_modded_long)) ################################################################### # The 'magic' that holds/returns the encryption key.... class MySecret(SecretKeyCallback): def __init__(self, theKey): self.theKey = theKey def getPassphrase(self, arg1, arg2=None): print("@getPassphrase(%s,%s) will return %s" % (arg1, arg2, self.theKey)) return self.theKey
def getCacheEntryAge(self, theKey): """ generated source for method getCacheEntryAge """ theGameFile = File(self.theCacheDirectory, theKey + ".zip") if theGameFile.exists(): return System.currentTimeMillis() - theGameFile.lastModified() return System.currentTimeMillis()
class CloudGameRepository(GameRepository): """ generated source for class CloudGameRepository """ theRepoURL = str() theCacheDirectory = File() needsRefresh = True def __init__(self, theURL): """ generated source for method __init__ """ super(CloudGameRepository, self).__init__() self.theRepoURL = RemoteGameRepository.properlyFormatURL(theURL) # Generate a unique hash of the repository URL, to use as the # local directory for files for the offline cache. theCacheHash = StringBuilder() try: while len(theDigest): theCacheHash.append(Math.abs(theDigest[i])) i += 1 except Exception as e: theCacheHash = None theCachesDirectory = File(System.getProperty("user.home"), ".ggpserver-gamecache") theCachesDirectory.mkdir() self.theCacheDirectory = File(theCachesDirectory, "repoHash" + theCacheHash) if self.theCacheDirectory.exists(): # For existing caches, only force a full refresh at most once per day self.needsRefresh = (System.currentTimeMillis() - self.theCacheDirectory.lastModified()) > 86400000 else: self.theCacheDirectory.mkdir() self.needsRefresh = True if self.needsRefresh: refreshThread.start() # Update the game cache asynchronously if there are already games. # Otherwise, force a blocking update. if len(length): try: refreshThread.join() except InterruptedException as e: self.theCacheDirectory.setLastModified(System.currentTimeMillis()) self.needsRefresh = False def getUncachedGameKeys(self): """ generated source for method getUncachedGameKeys """ theKeys = HashSet() for game in theCacheDirectory.listFiles(): theKeys.add(game.__name__.replace(".zip", "")) return theKeys def getUncachedGame(self, theKey): """ generated source for method getUncachedGame """ cachedGame = loadGameFromCache(theKey) if cachedGame != None: return cachedGame # Request the game directly on a cache miss. return RemoteGameRepository(self.theRepoURL).getGame(theKey) # ================================================================ # Games are cached asynchronously in their own threads. class RefreshCacheForGameThread(Thread): """ generated source for class RefreshCacheForGameThread """ theRepository = RemoteGameRepository() theKey = str() def __init__(self, a, b): """ generated source for method __init__ """ super(RefreshCacheForGameThread, self).__init__() self.theRepository = a self.theKey = b def run(self): """ generated source for method run """ try: if myGameVersion != None: myVersionedRepoURL = myGameVersion.getRepositoryURL() if not versionedRepoURL == myVersionedRepoURL: # Cache miss: we don't have the current version for # this game, and so we need to load it from the web. saveGameToCache(self.theKey, theGame) except Exception as e: e.printStackTrace() class RefreshCacheThread(Thread): """ generated source for class RefreshCacheThread """ theRepoURL = str() def __init__(self, theRepoURL): """ generated source for method __init__ """ super(RefreshCacheThread, self).__init__() self.theRepoURL = theRepoURL def run(self): """ generated source for method run """ try: # Sleep for the first two seconds after which the cache is loaded, # so that we don't interfere with the user interface startup. Thread.sleep(2000) except InterruptedException as e: e.printStackTrace() return remoteRepository = RemoteGameRepository(self.theRepoURL) print "Updating the game cache..." beginTime = System.currentTimeMillis() # Since games are immutable, we can guarantee that the games listed # by the repository server includes the games in the local cache, so # we can be happy just updating/refreshing the listed games. theGameKeys = remoteRepository.getGameKeys() if theGameKeys == None: return # If the server offers a single combined metadata file, download that # and use it to avoid checking games that haven't gotten new versions. bundledMetadata = remoteRepository.getBundledMetadata() if bundledMetadata != None: for theKey in theGameKeys: try: if myGameVersion == None: continue # Skip updating the game cache entry if the version is the same # and the cache entry was written less than a week ago. if myGameVersion.getRepositoryURL() == remoteVersionedGameURL and getCacheEntryAge(theKey) < 604800000: unchangedKeys.add(theKey) except Exception as e: continue theGameKeys.removeAll(unchangedKeys) # Start threads to update every entry in the cache (or at least verify # that the entry doesn't need to be updated). theThreads = HashSet() for gameKey in theGameKeys: t.start() theThreads.add(t) # Wait until we've updated the cache before continuing. for t in theThreads: try: t.join() except InterruptedException as e: endTime = System.currentTimeMillis() print "Updating the game cache took: " + (endTime - beginTime) + "ms." # ================================================================ @synchronized def saveGameToCache(self, theKey, theGame): """ generated source for method saveGameToCache """ if theGame == None: return theGameFile = File(self.theCacheDirectory, theKey + ".zip") try: theGameFile.createNewFile() pw.print_(theGame.serializeToJSON()) pw.flush() pw.close() gOut.close() fOut.close() except Exception as e: e.printStackTrace() @synchronized def loadGameFromCache(self, theKey): """ generated source for method loadGameFromCache """ theGameFile = File(self.theCacheDirectory, theKey + ".zip") theLine = None try: theLine = br.readLine() br.close() ir.close() gIn.close() fIn.close() except Exception as e: if theLine == None: return None return Game.loadFromJSON(theLine) @synchronized def getCacheEntryAge(self, theKey): """ generated source for method getCacheEntryAge """ theGameFile = File(self.theCacheDirectory, theKey + ".zip") if theGameFile.exists(): return System.currentTimeMillis() - theGameFile.lastModified() return System.currentTimeMillis() # ================================================================ @classmethod def main(cls, args): """ generated source for method main """ theRepository = CloudGameRepository("games.ggp.org/base") beginTime = System.currentTimeMillis() theGames = HashMap() for gameKey in theRepository.getGameKeys(): theGames.put(gameKey, theRepository.getGame(gameKey)) print "Games: " + len(theGames) endTime = System.currentTimeMillis() print "Time: " + (endTime - beginTime) + "ms." if __name__ == '__main__': import sys CloudGameRepository.main(sys.argv)