def scanApplications(): #app plists appPlists = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'generating list of all installed apps') #get all installed apps installedApps = utils.getInstalledApps() #now, get Info.plist for each app for app in installedApps: #skip apps that don't have a path if not 'path' in app: #skip continue #get/load app's Info.plist plist = utils.loadInfoPlist(app['path']) #skip apps that don't have Info.plist if not plist: #skip continue #save plist for processing appPlists.append(plist) #check all plists for DYLD_INSERT_LIBRARIES # ->for each found, creates file object return scanPlists(appPlists, APPLICATION_DYLD_KEY, isLoaded=True)
def scan(self): #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #init results dictionary results = self.initResults(STARTUP_ITEM_NAME, STARTUP_ITEM_DESCRIPTION) #iterate over all base startup item directories # ->look for startup items for startupItemBaseDirectory in STARTUP_ITEM_BASE_DIRECTORIES: #get sub directories # ->these are the actual startup items startupItemDirectories = glob.glob(startupItemBaseDirectory + '*') #check the sub directory (which is likely a startup item) # ->there should be a file (script) which matches the name of the sub-directory for startupItemDirectory in startupItemDirectories: #init the startup item startupItem = startupItemDirectory + '/' + os.path.split(startupItemDirectory)[1] #check if it exists if os.path.exists(startupItem): #save results['items'].append(file.File(startupItem)) return results
def scanLaunchItems(directories): #launch items launchItems = [] #results results = [] #expand directories # ->ensures '~'s are expanded to all user's directories = utils.expandPaths(directories) #get all files (plists) in launch daemon/agent directories for directory in directories: #dbg msg utils.logMessage(utils.MODE_INFO, 'scanning %s' % directory) #get launch daemon/agent launchItems.extend(glob.glob(directory + '*')) #process # ->get all auto-run launch services autoRunItems = autoRunBinaries(launchItems) #iterate over all auto-run items (list of the plist and the binary) # ->create file object and add to results for autoRunItem in autoRunItems: #create and append results.append(file.File(autoRunItem[0], autoRunItem[1])) return results
def scan(self): #results results = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #init results results = self.initResults(INSERTED_DYNAMIC_LIBRARIES_NAME, INSERTED_DYNAMIC_LIBRARIES_DESCRIPTION) #scan launch items for inserted dylibs launchItems = scanLaunchItems(LAUNCH_ITEMS_DIRECTORIES) if launchItems: #save results['items'].extend(launchItems) #scan all installed applications for inserted dylibs applications = scanApplications() if applications: #save results['items'].extend(applications) return results
def kh_dbLoadRecords(self, arg0, tokens, ref): local_macros = macros.Macros(**self.env.db) full_line = reconstruct_line(tokens).strip() tokenLog = TokenLog() tokenLog.tokenList = tokens tokenLog.token_pointer = 1 args = parse_bracketed_macro_definitions(tokenLog) nargs = len(args) if nargs not in (1, 2, 3,): msg = str(ref) + full_line raise UnhandledTokenPattern, msg utils.logMessage(arg0 + full_line, utils.LOGGING_DETAIL__NOISY) dbFileName = local_macros.replace(utils.strip_quotes(args[0])) if nargs in (2, 3,): # accumulate additional macro definitions local_macros = self.parse_macro_args(args[1], ref, tokens, local_macros) if nargs in (3,): path = args[2] # msg = str(ref) + full_line if self.symbols.exists(path): # substitute from symbol table path = self.symbols.get(path).value if os.path.exists(path): dbFileName = os.path.join(path, dbFileName) try: obj = database.Database(self, dbFileName, ref, **local_macros.db) self.database_list.append(obj) self.kh_shell_command(arg0, tokens, ref) except text_file.FileNotFound, _exc: msg = 'Could not find database file: ' + dbFileName utils.detailedExceptionLog(msg) return
def scan(self): #commands commands = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #init results dictionary results = self.initResults(LAUNCHD_CONF_NAME, LAUNCHD_CONF_DESCRIPTION) #get all commands in launchd.conf # ->note, commands in functions will be ignored... commands = utils.parseBashFile(LAUNCHD_CONF_FILE) #iterate over all commands # ->instantiate command obj and save into results for extractedCommand in commands: #TODO: could prolly do some more advanced processing (e.g. look for bsexec, etc) #instantiate and save results['items'].append(command.Command(extractedCommand)) return results
def parseCoordMatchFile(coordFileName): ''' A method to parse the coord file. returns a list of utils.match objects ''' returnValue = [] #throw if the file doesn't exist if os.path.exists(coordFileName) == False: raise utils.NoFileFoundException(coordFileName) #read the nucmer file into memory lines = open(coordFileName).readlines() #skip forward to the start of the matches. i = 0 while lines[i][0] != '=': i += 1 matchLines = lines[i + 1:] #parse each line for match start, end and sequenceID for matchLine in matchLines: returnValue.append(parseCoordMatchLine(matchLine)) utils.logMessage( "NucmerParser::parseCoordMatchFile( )", "Parse {0}, finding {1} matches".format(coordFileName, len(returnValue))) return returnValue
def scan(self): #cron jobs files cronJobFiles = [] #init results dictionary results = self.initResults(CRON_JOBS_NAME, CRON_JOBS_DESCRIPTION) #dbg utils.logMessage(utils.MODE_INFO, 'running scan') #get all files in kext directories cronJobFiles.extend(glob.glob(CRON_JOB_DIRECTORY + '*')) #process # ->open file and read each line for cronJobFile in cronJobFiles: #open file # ->read each line (for now, assume 1 cron job per line) with open(cronJobFile, 'r') as file: #read each line for cronJobData in file: #skip comment lines if cronJobData.lstrip().startswith('#'): #skip continue #create and append job results['items'].append(command.Command(cronJobData.strip())) return results
def scan(self): #results results = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #init results # ->for launch daemons results.append(self.initResults(LAUNCH_DAEMON_NAME, LAUNCH_DAEMON_DESCRIPTION)) #init results # ->for launch agents results.append(self.initResults(LAUNCH_AGENT_NAME, LAUNCH_AGENT_DESCRIPTION)) #init overriden items # ->scans overrides plists, and populates 'overriddenItems' class variable self.getOverriddenItems() #scan for auto-run launch daemons # ->save in first index of array results[0]['items'] = self.scanLaunchItems(LAUNCH_DAEMON_DIRECTORIES) #scan for auto-run launch agents # ->save in second index of array results[1]['items'] = self.scanLaunchItems(LAUNCH_AGENTS_DIRECTORIES) return results
def scan(self): #kexts kexts = [] #dbg utils.logMessage(utils.MODE_INFO, 'running scan') #init results dictionary results = self.initResults(KEXT_NAME, KEXT_DESCRIPTION) #get all files in kext directories for kextDir in KEXT_DIRECTORIES: #dbg utils.logMessage(utils.MODE_INFO, 'scanning %s' % kextDir) #get kexts kexts.extend(glob.glob(kextDir + '*')) #process # ->gets kext's binary, then create file object and add to results for kextBundle in kexts: #skip kext bundles that don't have kext's if not utils.getBinaryFromBundle(kextBundle): #next! continue #create and append # ->pass bundle, since want to access info.plist, etc results['items'].append(file.File(kextBundle)) return results
def scanLaunchItems(self, directories): #launch items launchItems = [] #results results = [] #expand directories # ->ensures '~'s are expanded to all user's directories = utils.expandPaths(directories) #get all files (plists) in launch daemon/agent directories for directory in directories: #dbg msg utils.logMessage(utils.MODE_INFO, 'scanning %s' % directory) #get launch daemon/agent launchItems.extend(glob.glob(directory + '*')) #process # ->get all auto-run launch services autoRunItems = self.autoRunBinaries(launchItems) #iterate over all auto-run items (list of the plist and the binary) # ->create file object and add to results for autoRunItem in autoRunItems: #create and append results.append(file.File(autoRunItem[0], autoRunItem[1])) return results
def scan(self): #results results = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #init results # ->for launch daemons results.append(self.initResults(LAUNCH_DAEMON_NAME, LAUNCH_DAEMON_DESCRIPTION)) #init results # ->for launch agents results.append(self.initResults(LAUNCH_AGENT_NAME, LAUNCH_AGENT_DESCRIPTION)) #scan for auto-run launch daemons # ->save in first index of array results[0]['items'] = scanLaunchItems(LAUNCH_DAEMON_DIRECTORIES) #scan for auto-run launch agents # ->save in second index of array results[1]['items'] = scanLaunchItems(LAUNCH_AGENTS_DIRECTORIES) return results
def crossValidatePrimers( self, primers, excludeFile ): excludeSequences = fastaparser.parseFastaFile( excludeFile ) #write a primer search input file with using the primers argument primerInputFileName = utils.getTemporaryDirectory( ) + "/tmpinputprimers.ps" primerOutputFileName = utils.getTemporaryDirectory( ) + "/tmpoutputprimers.ps" primersearchutils.writePrimerSearchInputFile( primers, primerInputFileName ) utils.logMessage( "PrimerManager::crossValidatePrimers", "finding primers that are not in the supplied exclude file" ) #run primer search to identify the primers self.primersearch.execute( [ excludeFile, primerInputFileName, primerOutputFileName, "0" ] ) #read the found primers from the file commonPrimers = primersearchutils.parsePrimerSearchFile( primerOutputFileName ) #compose a list of primers that are not found in the exclude file... returnValue = [ ] for primer in primers: if primer.id not in commonPrimers: returnValue.append( primer ) utils.logMessage( "PrimerManager::crossValidatePrimers", "{0} unique primers identified out of {1}".format( len( returnValue ), len( primers ) ) ) if len( returnValue ) == 0: raise utils.NoPrimersExistException( ) return returnValue
def scan(self): #results results = [] #dbg utils.logMessage(utils.MODE_INFO, 'running scan') #init results # ->for for login hook results.append(self.initResults(LOGIN_HOOK_NAME, LOGIN_HOOK_DESCRIPTION)) #init results # ->for logout hook results.append(self.initResults(LOGOUT_HOOK_NAME, LOGOUT_HOOK_DESCRIPTION)) #load plist plistData = utils.loadPlist(LOGIN_WINDOW_FILE) #make sure plist loaded if plistData: #grab login hook if 'LoginHook' in plistData: #save into first index of result results[0]['items'].append(command.Command(plistData['LoginHook'])) #grab logout hook if 'LogoutHook' in plistData: #save into second index of result results[1]['items'].append(command.Command(plistData['LogoutHook'])) return results
def storeApiResponse(db, response): cur = db.cursor() logMessage('psql', 'Inserting response of \'%s\' api call' % (response.method)) cur.execute('INSERT INTO flickr_api (hash, api, params, main_arg, response) VALUES(%s, %s, %s, %s, %s)', (response.digest(), response.method, json.dumps(response.params), response.main_arg, json.dumps(response.result))) db.commit() cur.close()
def scan(self): #commands commands = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #init results dictionary results = self.initResults(RC_SCRIPT_NAME, RC_SCRIPT_DESCRIPTION) #scan/parse all rc files for rcScript in RC_SCRIPTS: #get all commands in script file # ->note, commands in functions will be ignored... # of course, if the function is invoked, this invocation will be displayed commands = utils.parseBashFile(os.path.join('/etc', rcScript)) #iterate over all commands # ->instantiate command obj and save into results for extractedCommand in commands: #instantiate and save results['items'].append(command.Command(extractedCommand, rcScript)) return results
def add_pv(self, mne, pv, desc, fmt, as_string): '''Connect to a EPICS (PyEpics) process variable''' if pv in self.pvdb: msg = "key '%s' already defined by id=%s" % (pv, self.pvdb[pv]['id']) raise KeyError(msg) ch = epics.PV(pv) entry = { 'name': pv, # EPICS PV name 'id': mne, # symbolic name used in the python code 'description': desc, # text description for humans 'timestamp': None, # client time last monitor was received 'counter': 0, # number of monitor events received 'units': "", # engineering units 'ch': ch, # EPICS PV channel 'format': fmt, # format for display 'value': None, # formatted value 'raw_value': None, # unformatted value 'char_value': None, # string value 'as_string': as_string, # whether to return the string representation of the value } self.pvdb[pv] = entry self.xref[ mne] = pv # mne is local mnemonic, define actual PV in pvlist.xml ch.add_callback(self.EPICS_monitor_receiver) # start callbacks now cv = ch.get_ctrlvars() unit_renames = { # handle some non SI unit names # old new 'millime': 'mm', 'millira': 'mr', 'degrees': 'deg', 'Volts': 'V', 'VDC': 'V', 'eng': '', } if cv is not None and 'units' in cv: units = cv['units'] if units in unit_renames: units = unit_renames[units] entry['units'] = units # report the RTYP (record type, if available) basename = pv.split('.')[0] field = pv[len(basename):] rtyp_pv = epics.PV(basename + '.RTYP') rtyp = rtyp_pv.get() or 'unknown' if basename == pv or field == '.VAL': entry['record_type'] = rtyp else: # field of record entry['record_type'] = rtyp + field # FIXME: what to do if PV did not connect? (ch.connected == False) if not ch.connected: utils.logMessage('PV not connected yet: ' + pv) self.update_pvdb(pv, ch.get()) # initialize the cache
def scan(self): #commands commands = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #init results dictionary results = self.initResults(RC_SCRIPT_NAME, RC_SCRIPT_DESCRIPTION) #scan/parse all rc files for rcScript in RC_SCRIPTS: #get all commands in script file # ->note, commands in functions will be ignored... # of course, if the function is invoked, this invocation will be displayed commands = utils.parseBashFile(os.path.join('/etc', rcScript)) #iterate over all commands # ->instantiate command obj and save into results for extractedCommand in commands: #instantiate and save results['items'].append( command.Command(extractedCommand, rcScript)) return results
def writeFastaFile(sequences, fileName): ''' write a set of sequences to a fasta file. returns the name of the new file ''' primerSequenceIdent = "primer_sequences" utils.logMessage( "PrimerManager::writeFastaFile( )", "Writing {0} sequences to fasta file".format(len(sequences))) seqRecords = [] i = 0 for sequence in sequences: seqStr = str(reduce(lambda x, y: str(x) + str(y), sequence)) seqRecord = SeqIO.SeqRecord(Seq.Seq(seqStr, Alphabet.IUPAC.extended_dna), id="seq_{0}".format(i)) seqRecords.append(seqRecord) i += 1 SeqIO.write(seqRecords, open(fileName, "w"), "fasta") utils.logMessage("PrimerManager::writeFastaFile( )", "writing fasta file complete") return fileName
def buildOutputFileName(self): """ build a unique file name to store the combined output sequences to """ self.outputFile = utils.getTemporaryDirectory( ) + "/combined_exlude.ffn" utils.logMessage("ExcludeFileManager::buildOutputFileName( )", " exclude file: {0}".format(self.outputFile))
def setExcludeFile( self, excludeFileName ): """ A function to set the exclude file that will be used when nucmer is called """ utils.logMessage( "IncludeFileManager::setExcludeFile( )", "fileName {0}".format( excludeFileName ) ) self.excludeFileName = excludeFileName self.isExcludeFileInitialized = True
def __init__(self, parent, command, path, args, ref, **env): self.parent = parent self.command = command self.path = path self.args = args self.env = macros.Macros(**env) self.reference = ref utils.logMessage('command: ' + command + ' ' + args, utils.LOGGING_DETAIL__MEDIUM)
def get(self, filename, alternative = None): ''' get a reference to a file from the cache ''' if self.exists(filename): utils.logMessage('cached file: ' + filename, utils.LOGGING_DETAIL__NOISY) self.cache[filename].requested() return self.cache.get(filename, alternative)
def parseFastaFile(fileName): ''' parse a fasta file and return a list of Bio.Seq ''' utils.logMessage("fastaparser::parseFastaFile( )", "parsing fasta file {0}".format(fileName)) sequences = SeqIO.parse(open(fileName), "fasta") return sequences
def getCachedResultsFor(db, digest): cur = db.cursor() cur.execute('SELECT response FROM flickr_api WHERE hash = %s', [digest]) t = cur.fetchone() db.commit() cur.close() if t != None: logMessage('psql', 'Fetched cached response') return t[0] else: return None
def processFile(self, filename): ''' process just one file ''' f = text_file.read(filename) # use the file cache try: tokenize.tokenize(f.iterator().readline, self.tokenReceiver) except Exception, _exc: msg = 'trouble understanding: ' + f.absolute_filename msg += '\n' + str(_exc) utils.logMessage(msg, utils.LOGGING_DETAIL__NOISY) raise RuntimeError(msg)
def kh_cd(self, arg0, tokens, ref): path = reconstruct_line(tokens).strip() path = utils.strip_quotes(path) # strip double-quotes if self.symbols.exists(path): # symbol substitution path = self.symbols.get(path).value path = self.env.replace(path) # macro substitution if len(path) == 0: path = self.startup_directory if len(path) > 0 and os.path.exists(path): if os.path.abspath(path) != os.getcwd(): # only cd if it is really different os.chdir(path) self.kh_shell_command(arg0, tokens, ref) utils.logMessage(arg0 + ' ' + path, utils.LOGGING_DETAIL__MEDIUM)
def addExcludeFile(self, excludeFile): """ add a file to be managed by the ExcludeFileManager """ if os.path.exists(excludeFile) == False: utils.logMessage("ExcludeFileManager::addExcludeFile( )", "exclude file not found: {0}".format(excludeFile)) raise utils.NoFileFoundException(excludeFile) utils.logMessage("ExcludeFileManager::addExcludeFile( )", "adding exclude file {0}".format(excludeFile)) self.excludeFiles.append(excludeFile)
def listPlugins(): #dbg msg utils.logMessage(utils.MODE_INFO, 'listing plugins') #interate over all plugins for plugin in pluginManagerObj.getAllPlugins(): #dbg msg # ->always use print, since -v might not have been used print '%s -> %s' % (os.path.split(plugin.path)[1], plugin.name) return
def getMatchedSubSequences(self, minLength=100): utils.logMessage( "PrimerSequence::getMatchedSubSequences( )", "finding valid sub sequences for {0}".format(self.seqID)) returnValue = [] for match in self.matchedSubSequences: subSequence = self.sequence[match[0]:match[1]] if len(subSequence) >= minLength: returnValue.append(subSequence) return returnValue
def scan(self): # reported path reportedPaths = [] # dbg msg utils.logMessage(utils.MODE_INFO, "running scan") # init results results = self.initResults(UNCLASSIFIED_NAME, UNCLASSIFIED_DESCRIPTION) # get all running processes processes = utils.getProcessList() # set processes top parent # ->well, besides launchd (pid: 0x1) utils.setFirstParent(processes) # add process type (dock or not) utils.setProcessType(processes) # get all procs that don't have a dock icon # ->assume these aren't started by the user nonDockProcs = self.getNonDockProcs(processes) # save all non-dock procs for pid in nonDockProcs: # extract path path = nonDockProcs[pid]["path"] # ignore dups if path in reportedPaths: # skip continue # ignore things in /opt/X11/ # ->owned by r00t, so this should be ok.... if path.startswith("/opt/X11/"): # skip continue # save results["items"].append(file.File(path)) # record reportedPaths.append(path) return results
def scan(self): #reported path reportedPaths = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #init results results = self.initResults(UNCLASSIFIED_NAME, UNCLASSIFIED_DESCRIPTION) #get all running processes processes = utils.getProcessList() #set processes top parent # ->well, besides launchd (pid: 0x1) utils.setFirstParent(processes) #add process type (dock or not) utils.setProcessType(processes) #get all procs that don't have a dock icon # ->assume these aren't started by the user nonDockProcs = self.getNonDockProcs(processes) #save all non-dock procs for pid in nonDockProcs: #extract path path = nonDockProcs[pid]['path'] #ignore dups if path in reportedPaths: #skip continue #ignore things in /opt/X11/ # ->owned by r00t, so this should be ok.... if path.startswith('/opt/X11/'): #skip continue #save results['items'].append(file.File(path)) #record reportedPaths.append(path) return results
def get_pvlist(self): '''get the PVs from the XML file''' pvlist_file = self.configuration['PVLIST_FILE'] if not os.path.exists(pvlist_file): utils.logMessage('could not find file: ' + pvlist_file) return try: tree = etree.parse(pvlist_file) except: msg = 'could not parse file: ' + pvlist_file utils.logMessage(msg) raise CouldNotParseXml(msg) utils.validate(tree, XML_SCHEMA_FILE) msg = 'validated file: ' + pvlist_file utils.logMessage(msg) for key in tree.findall(".//EPICS_PV"): if key.get("_ignore_", "false").lower() == "false": mne = key.get("mne") pv = key.get("PV") desc = key.get("description") fmt = key.get("display_format", "%s") # default format as_string = key.get("as_string", False) # default format # :see: http://cars9.uchicago.edu/software/python/pyepics3/pv.html?highlight=as_string#pv.get try: self.add_pv(mne, pv, desc, fmt, as_string) except: msg = "%s: problem connecting: %s" % (pvlist_file, etree.tostring(key)) utils.logException(msg) utils.logMessage('all PVs added')
def findPrimers( self, sequences, outputFile, primerpairs = 20, returnPrimers = False ): ''' A method to find a set of primers based on the given sequences ''' utils.logMessage( "PrimerManager::findPrimer(s )", "writing sequences to a fasta file" ) #eleminate all sequences that are lees than the desired amplification size... if len( sequences ) == 4 : print sequences sequences = filter( lambda x: len( x ) >= 200, sequences ) primerFastaFile = utils.getTemporaryDirectory( ) + "/sequenceForEprimer.fasta" fastaparser.writeFastaFile( sequences, primerFastaFile ) utils.logMessage( "PrimerManager::findPrimers( )", "executing eprimer3 program" ) self.eprimer.execute( [ primerFastaFile, outputFile ] ) utils.logMessage( "PrimerManager::findPrimer( )", "eprimer3 file {0} created. Parsing for primers.".format( outputFile ) ) primers = eprimerparser.parsePrimerSequences( outputFile ) utils.logMessage( "PrimerManager::findPrimers( )", "parsing for sequences complete" ) if returnPrimers == True: return primers
def interactWithPhotos(driver, interactionsTillNow, blackListedUsers, archiveUsers): interactionCount = 0 # Find all photos in the current page # photolist = driver.find_elements_by_css_selector(articleClassSelector) photolist = driver.find_elements_by_tag_name("article") # Find like and archive button classes optionList = webcrawl(photolist[0].get_attribute('innerHTML'), "html.parser").contents[2].contents[0].findAll("a") likeButtonClass = str(optionList[0].attrs["class"]) archiveButtonClass = str(optionList[2].attrs["class"]) likeButtonClassSelector = parser.concatenate2(likeButtonClass) archiveButtonClassSelector = parser.concatenate2(archiveButtonClass) likeButtons = driver.find_elements_by_css_selector(likeButtonClassSelector) archiveButtons = driver.find_elements_by_css_selector( archiveButtonClassSelector) for i in range(0, len(photolist)): # Return, if we reached max interactions allowed if (interactionsTillNow + interactionCount) == CONSTANTS.MAX_INTERACTIONS: break # Find the username to which the photo belongs photoContainer = photolist[i].get_attribute('innerHTML') usernameOfPhoto = str( webcrawl(photoContainer, "html.parser").header.contents[1]. contents[0].contents[0].contents[0].contents[0]) # Don't interact with black listed users if usernameOfPhoto in blackListedUsers: utils.logMessage("User: "******" is blacklisted") continue shouldLike = shouldlikePhoto(photoContainer) shouldArchive = shouldArchiveThePhoto(photoContainer, usernameOfPhoto, archiveUsers) if shouldLike: likeButtons[i].click() utils.logMessage("Liked photo of user: "******"Saved photo of user: "******"Interaction Count: " + str(interactionCount + interactionsTillNow)) return interactionCount
def __init__(self, configuration): self.configuration = configuration # from XML configuration file self.pvdb = {} # cache of last known good values self.xref = { } # cross-reference between mnemonics and PV names: {mne:pvname} self.monitor_counter = 0 self.upload_patterns = configuration['PATTERNS'] self.get_pvlist() utils.logMessage('read list of PVs to monitor') pv_conn = [pv['ch'].connected for pv in self.pvdb.values()] numConnected = numpy.count_nonzero(pv_conn) utils.logMessage("Connected %d of total %d EPICS PVs" % (numConnected, len(self.pvdb)))
def parse_macro_args(self, arg, ref, tokens, parent_macros): local_macros = macros.Macros(**parent_macros.db) for definition in utils.strip_quotes(arg).split(','): if definition.find('=') < 0: # if self.symbols.get(definition, None) # such as: iocSubString=asdCreateSubstitutionString("IOC",iocprefix) msg = str(ref) + reconstruct_line(tokens).strip() utils.logMessage(msg, utils.LOGGING_DETAIL__IMPORTANT) #raise UnhandledTokenPattern, msg else: k, v = [_.strip() for _ in definition.split('=')] # expand macros now to avoid possible infinite loop while replacing # example: PORT=IP_$(PORT) v = local_macros.replace(v) local_macros.set(k, v, self, ref) return local_macros
class ProgramBase( object ): def __init__( self ): self.programName = None self.proc = None def getProcessArgs( self, args ): crash #abstract def execute( self, args, async = False ): ''' run the nucmer program with a given compare file and an exclude file ''' utils.logMessage( "ProgramBase::Execute( )", "Running the {0} program.".format( self.programName ) ) args, outputFile = self.getProcessArgs( args ) print "*** Running {0} ***".format( self.programName ) utils.logList( "ProgramBase::Execute( )", args ) proc = subprocess.Popen( args ) if async == False: #wait for the nucmer instance to finish proc.wait( ) print "*** Running {0} Complete ***".format( self.programName )
def __init__(self, parent, filename, ref, env=None, symbols=None): self.parent = parent self.filename = filename self.reference = ref self.pwd = os.getcwd() self.env = env or macros.Macros() self.symbols = symbols or macros.Macros() self.database_list = [] self.commands = [] self.template_list = [] self.includedCommandFile_list = [] self.pv_dict = {} # filename is a relative or absolute path to command file, no macros in the name self.filename_absolute = os.path.abspath(filename) self.dirname_absolute = os.path.dirname(self.filename_absolute) utils.logMessage('command file: ' + self.filename, utils.LOGGING_DETAIL__IMPORTANT) # self.source = text_file.read(filename) self.source = text_file.read(self.filename_absolute) if parent is None: self.startup_directory = self.dirname_absolute else: self.startup_directory = parent.startup_directory self.knownHandlers = { '<': self.kh_loadCommandFile, 'cd': self.kh_cd, # 'dbLoadDatabase': self.kh_dbLoadDatabase, 'dbLoadRecords': self.kh_dbLoadRecords, 'dbLoadTemplate': self.kh_dbLoadTemplate, 'epicsEnvSet': self.kh_epicsEnvSet, 'putenv': self.kh_putenv, # 'seq': self.kh_seq, 'strcpy': self.kh_strcpy, # 'nfsMount': self.kh_nfsMount, # 'nfs2Mount': self.kh_nfsMount, #------ overrides ----------- 'dbLoadDatabase': self.kh_shell_command, 'seq': self.kh_shell_command, 'nfsMount': self.kh_shell_command, 'nfs2Mount': self.kh_shell_command, } self.parse() utils.logMessage('end of command file: ' + self.filename, utils.LOGGING_DETAIL__IMPORTANT)
def getPrimers( self, sequences ): utils.logMessage( "PrimerManager::getCommonPrimers", "finding primers that are common to all include files" ) if len( sequences ) == 0: raise utils.NoPrimersExistException( ) referenceEPrimerFile = utils.getTemporaryDirectory( ) + "/referenceprimers.ep3" #run eprimer to find primers in the reference file primers = self.findPrimers( sequences, referenceEPrimerFile, 20, True ) if len( primers ) == 0: raise utils.NoPrimersExistException( ) return primers
def queryVT(items): #headers requestHeaders = {} #set content type requestHeaders['Content-Type'] = 'application/json' #set user agent requestHeaders['User-Agent'] = 'VirusTotal' #wrap try: #build request request = urllib2.Request(VT_URL + VT_API_KEY, json.dumps(items, indent=4), headers=requestHeaders) #make request response = urllib2.urlopen(request) #convert response to JSON vtResponse = json.loads(response.read()) #process response # ->should be a list of items, within the 'data' key if 'data' in vtResponse: #process/parse all for item in vtResponse['data']: #process parseResult(item) #exceptions # ->ignore (likely network related) except Exception, e: #dbg msg utils.logMessage( utils.MODE_ERROR, '\n EXCEPTION, %s() threw: %s' % (sys._getframe().f_code.co_name, e)) #ignore pass
def runBot(webPageData, driver): # Each image is wrapped with <article> (Get its class) scrollHeight = driver.find_elements_by_tag_name( "article")[0].get_attribute("scrollHeight") ''' For achieving the scrolling effect (so that new images get loaded), we need to run javascript with the help of selenium ''' scrollJSScript = "window.scrollBy(0, " + scrollHeight + ");" # Get the users whom you don't want to interact with blackListedUsers = [] blackListFileExists = os.path.isfile(CONSTANTS.BLACK_LIST_FILE) if blackListFileExists: blackListedUsers = utils.getDataFromFile(CONSTANTS.BLACK_LIST_FILE) archiveUsers = [] archiveListFileExists = os.path.isfile(CONSTANTS.ARCHIVE_LIST_FILE) if archiveListFileExists: archiveUsers = utils.getDataFromFile(CONSTANTS.ARCHIVE_LIST_FILE) # At max, interacts with top k photos (where k = CONSTANTS.MAX_INTERACTIONS) totalInteractionCount = 0 # Scroll only 100 times at max for iterations in range(0, 100): interactionCount = interactWithPhotos(driver, totalInteractionCount, blackListedUsers, archiveUsers) totalInteractionCount = totalInteractionCount + interactionCount if totalInteractionCount == CONSTANTS.MAX_INTERACTIONS: break else: # Scroll the page to get new photos driver.execute_script(scrollJSScript) # Wait for the page to load time.sleep(3 * CONSTANTS.WAIT_TIME) # Watch the stories now utils.logMessage("Started watching stories") watchStories(driver) time.sleep(10 * CONSTANTS.WAIT_TIME) utils.logMessage("Stopped watching stories")
def parseFastaFileAsPrimerSequence(fileName): utils.logMessage("fastaparser::parseFastaFileAsPrimerSequence( )", "parsing fasta file {0}".format(fileName)) returnValue = {} sequences = SeqIO.parse(open(fileName), "fasta") for sequence in sequences: seqdata = primersequence.PrimerSequence(sequence.id, len(sequence), sequence.seq) returnValue[sequence.id] = seqdata utils.logMessage("fastaparser::parseFastaFileAsPrimerSequence( )", "read {0} sequences".format(len(returnValue.keys()))) return returnValue
def scanApplications(): #app plists appPlists = [] #dbg msg utils.logMessage( utils.MODE_INFO, 'generating list of all installed apps (this may take some time)') #get all installed apps installedApps = utils.getInstalledApps() #sanity check # ->using system_profiler (to get installed apps) can timeout/throw exception, etc if not installedApps: #bail return None #now, get Info.plist for each app for app in installedApps: #skip apps that don't have a path if not 'path' in app: #skip continue #get/load app's Info.plist plist = utils.loadInfoPlist(app['path']) #skip apps that don't have Info.plist if not plist: #skip continue #save plist for processing appPlists.append(plist) #check all plists for DYLD_INSERT_LIBRARIES # ->for each found, creates file object return scanPlists(appPlists, APPLICATION_DYLD_KEY, isLoaded=True)
def scan(self): #results results = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'running scan') #get list of installed browsers browsers = self.getInstalledBrowsers() #iterate over all browsers # ->scan each for browser in browsers: #scan Safari extensions if 'Safari.app' in browser: #dbg msg utils.logMessage(utils.MODE_INFO, 'safari installed, scanning for extensions') #init results results.append(self.initResults(SAFARI_EXTENSIONS_NAME, SAFARI_EXTENSIONS_DESCRIPTION)) #scan results[len(results)-1]['items'] = self.scanExtensionsSafari() #scan Chrome extensions if 'Google Chrome.app' in browser: #dbg msg utils.logMessage(utils.MODE_INFO, 'chrome installed, scanning for extensions') #init results results.append(self.initResults(CHROME_EXTENSIONS_NAME, CHROME_EXTENSIONS_DESCRIPTION)) #scan results[len(results)-1]['items'] = self.scanExtensionsChrome() #scan Firefox extensions if 'Firefox.app' in browser: #dbg msg utils.logMessage(utils.MODE_INFO, 'firefox installed, scanning for extensions') #init results results.append(self.initResults(FIREFOX_EXTENSIONS_NAME, FIREFOX_EXTENSIONS_DESCRIPTION)) #scan results[len(results)-1]['items'] = self.scanExtensionsFirefox() return results
def scanApplications(): #app plists appPlists = [] #dbg msg utils.logMessage(utils.MODE_INFO, 'generating list of all installed apps (this may take some time)') #get all installed apps installedApps = utils.getInstalledApps() #sanity check # ->using system_profiler (to get installed apps) can timeout/throw exception, etc if not installedApps: #bail return None #now, get Info.plist for each app for app in installedApps: #skip apps that don't have a path if not 'path' in app: #skip continue #get/load app's Info.plist plist = utils.loadInfoPlist(app['path']) #skip apps that don't have Info.plist if not plist: #skip continue #save plist for processing appPlists.append(plist) #check all plists for DYLD_INSERT_LIBRARIES # ->for each found, creates file object return scanPlists(appPlists, APPLICATION_DYLD_KEY, isLoaded=True)
def queryVT(items): #headers requestHeaders = {} #set content type requestHeaders['Content-Type'] = 'application/json' #set user agent requestHeaders['User-Agent'] = 'VirusTotal' #wrap try: #build request request = urllib2.Request(VT_URL+VT_API_KEY, json.dumps(items), headers=requestHeaders) #make request response = urllib2.urlopen(request) #convert response to JSON vtResponse = json.loads(response.read()) #process response # ->should be a list of items, within the 'data' key if 'data' in vtResponse: #process/parse all for item in vtResponse['data']: #process parseResult(item) #exceptions # ->ignore (likely network related) except Exception, e: #dbg msg utils.logMessage(utils.MODE_ERROR, '\n EXCEPTION, %s() threw: %s' % (sys._getframe().f_code.co_name, e)) #ignore pass
def scan(self): #auth plugins authPlugins = [] #dbg utils.logMessage(utils.MODE_INFO, 'running scan') #init results dictionary results = self.initResults(AUTH_PLUGIN_NAME, AUTH_PLUGIN_DESCRIPTION) #get all files in auth plugin directories for authPluginDir in AUTH_PLUGIN_DIRECTORIES: #dbg utils.logMessage(utils.MODE_INFO, 'scanning %s' % authPluginDir) #get auth plugins authPlugins.extend(glob.glob(authPluginDir + '*')) #process # ->gets bundle's binary, then create file object and add to results for authPlugin in authPlugins: #skip any non-bundles # ->just do a directory check if not os.path.isdir(authPlugin): #skip continue #skip any invalid bundles if not utils.getBinaryFromBundle(authPlugin): #skip continue #create and append # ->pass bundle, since want to access info.plist, etc results['items'].append(file.File(authPlugin)) return results
def scan(self): #importers importers = [] #dbg utils.logMessage(utils.MODE_INFO, 'running scan') #init results dictionary results = self.initResults(IMPORTER_NAME, IMPORTER_DESCRIPTION) #get all files in importer directories for importerDir in IMPORTERS_DIRECTORIES: #dbg utils.logMessage(utils.MODE_INFO, 'scanning %s' % importerDir) #get imports importers.extend(glob.glob(importerDir + '*')) #process # ->gets bundle's binary, then create file object and add to results for importerBundle in importers: #skip any non-bundles # ->just do a directory check if not os.path.isdir(importerBundle): #skip continue #skip any invalid bundles if not utils.getBinaryFromBundle(importerBundle): #skip continue #create and append # ->pass bundle, since want to access info.plist, etc results['items'].append(file.File(importerBundle)) return results
def scanLaunchItems(directories): #launch items launchItems = [] #expand directories # ->ensures '~'s are expanded to all user's directories = utils.expandPaths(directories) #get all files (plists) in launch daemon/agent directories for directory in directories: #dbg msg utils.logMessage(utils.MODE_INFO, 'scanning %s' % directory) #get launch daemon/agent plist launchItems.extend(glob.glob(directory + '*')) #check all plists for DYLD_INSERT_LIBRARIES # ->for each found, creates file object return scanPlists(launchItems, LAUNCH_ITEM_DYLD_KEY)
def describe(ioc_name, st_cmd, output_path=None, report_file_cache=True): filename = os.path.abspath(st_cmd) path = os.path.dirname(filename) owd = os.getcwd() os.chdir(path) ref = utils.FileRef(__file__, 0, 0, ioc_name) env = {} utils.logMessage('IOC startup file: ' + st_cmd, utils.LOGGING_DETAIL__CERTAIN) short_name = os.path.split(filename)[-1] obj = command_file.CommandFile(None, short_name, ref, **env) os.chdir(owd) if output_path is None: reports.reportCmdFile(obj, ioc_name) else: os.chdir(output_path) reports.writeReports(obj, ioc_name, report_file_cache) os.chdir(owd) return obj
def initPluginManager(): #global global pluginManagerObj #create plugin manager pluginManagerObj = PluginManager() if not pluginManagerObj: #err msg utils.logMessage(utils.MODE_ERROR, 'failed to create plugin manager') #bail return False #set plugin path pluginManagerObj.setPluginPlaces([utils.getKKDirectory() + PLUGIN_DIR]) #get all plugins pluginManagerObj.collectPlugins() return True