def importStar(self, s, mt): from pysonarsq.java.Analyzer import Analyzer if mt is None or mt.getFile() is None: return mod = Analyzer.self.getAstForFile(mt.getFile()) if mod is None: return names = ArrayList() allType = mt.getTable().lookupType("__all__") if allType is not None and allType.isListType(): lt = allType.asListType(); for o in lt.values: if isinstance(o, (str, )): names.append(str(o)) if len(names): for name in names: b = mt.getTable().lookupLocal(name); if b is not None: s.update(name, b) else: m2 = list() m2.append(Name(name)) mod2 = Analyzer.self.loadModule(m2, s); if mod2 is not None: s.insert(name, None, mod2, Binding.Kind.VARIABLE) else: for e in mt.getTable().entrySet(): if not e[0].startswith("_"): s.update(e[0], e[1])
class TypeStack(object): class Pair(object): def __init__(self, first, second): self.first = first self.second = second def __init__(self): self.stack = ArrayList() def push(self, first, second): self.stack.append(self.Pair(first, second)) def pop(self, first, second): if len(self.stack): self.stack.pop() #v = len(self.stack) - 1 #if v in self.stack: self.stack.remove(v) def contains(self, first, second): for p in self.stack: if (p.first is first and p.second is second) or (p.first is second and p.second is first): return True return False
def resolve(self, s): from pysonarsq.java.Binder import Binder from pysonarsq.java.Binding import Binding from pysonarsq.java.Analyzer import Analyzer classType = ClassType(self.getName().id, s) baseTypes = ArrayList() for base in self.bases: baseType = self.resolveExpr(base, s); if baseType.isClassType(): classType.addSuper(baseType) elif baseType.isUnionType(): for b in baseType.asUnionType().getTypes(): classType.addSuper(b) break else: Analyzer.self.putProblem(base, str(base) + " is not a class") baseTypes.append(baseType) # XXX: Not sure if we should add "bases", "name" and "dict" here. They # must be added _somewhere_ but I'm just not sure if it should be HERE. builtins = Analyzer.self.builtins self.addSpecialAttribute(classType.getTable(), "__bases__", TupleType(baseTypes)) self.addSpecialAttribute(classType.getTable(), "__name__", builtins.BaseStr) self.addSpecialAttribute(classType.getTable(), "__dict__", DictType(builtins.BaseStr, Analyzer.self.builtins.unknown)) self.addSpecialAttribute(classType.getTable(), "__module__", builtins.BaseStr) self.addSpecialAttribute(classType.getTable(), "__doc__", builtins.BaseStr) # Bind ClassType to name here before resolving the body because the # methods need this type as self. Binder.bind(s, self.name, classType, Binding.Kind.CLASS) self.resolveExpr(self.body, classType.getTable()) return Analyzer.self.builtins.Cont
def importStar(self, s, mt): from pysonarsq.java.Analyzer import Analyzer if mt is None or mt.getFile() is None: return mod = Analyzer.self.getAstForFile(mt.getFile()) if mod is None: return names = ArrayList() allType = mt.getTable().lookupType("__all__") if allType is not None and allType.isListType(): lt = allType.asListType() for o in lt.values: if isinstance(o, (str, )): names.append(str(o)) if len(names): for name in names: b = mt.getTable().lookupLocal(name) if b is not None: s.update(name, b) else: m2 = list() m2.append(Name(name)) mod2 = Analyzer.self.loadModule(m2, s) if mod2 is not None: s.insert(name, None, mod2, Binding.Kind.VARIABLE) else: for e in mt.getTable().entrySet(): if not e[0].startswith("_"): s.update(e[0], e[1])
def getLoadPath(self): loadPath = ArrayList() if self.cwd is not None: loadPath.append(self.cwd) if self.projectDir is not None and os.path.isdir(self.projectDir): loadPath.append(self.projectDir) loadPath += self.path return loadPath
def putRef(self, node, bs): if not hasattr(bs, '__len__'): bs = [bs] if not (isinstance(node, (Url, ))): ref = Ref(node) bindings = self.references.get(ref) if bindings is None: bindings = ArrayList() self.references[ref] = bindings for b in bs: if not b in bindings: bindings.append(b) b.addRef(ref)
def getAbnormal(self, basis, coefficient): # If the basis is not set, do nothing abnormList = ArrayList() if basis == 0: return None # Fetch the abnormals from the log list for log in self._log: if float(log._responseLen) / float(basis) < coefficient or float( basis) / float(log._responseLen) < coefficient: abnormList.append(log._payload) return abnormList
def putRef(self, node, bs): if not hasattr(bs, '__len__'): bs = [bs] if not (isinstance(node, (Url, ))): ref = Ref(node); bindings = self.references.get(ref) if bindings is None: bindings = ArrayList() self.references[ref] = bindings for b in bs: if not b in bindings: bindings.append(b) b.addRef(ref)
def generate(self, idx, abspath): from Analyzer import Analyzer from Binding import Binding if isinstance(idx, Analyzer): mt = idx.loadFile(abspath) if mt is None: return ArrayList() scope, path = mt.getTable(), abspath else: scope, path = idx, abspath # alt impl. result = ArrayList() entries = TreeSet() for b in scope.values(): if not b.isSynthetic() and not b.isBuiltin() and path == b.getFile(): entries.add(b) entries = sorted(entries, lambda a,b: a.compareTo(b)) for nb in entries: kids = None if nb.getKind() == Binding.Kind.CLASS: realType = nb.getType(); if realType.isUnionType(): for t in realType.asUnionType().getTypes(): if t.isClassType(): realType = t break kids = self.generate(realType.getTable(), path) kid = Outliner.Branch() if (kids is not None) else Outliner.Leaf(); kid.setOffset(nb.getStart()) kid.setQname(nb.getQname()) kid.setKind(nb.getKind()) if kids is not None: kid.setChildren(kids) result.append(kid) return result
def segmentQname(self, qname, start, hasLoc): """ generated source for method segmentQname """ result = ArrayList() i = 0 while i < len(qname): name = ""; while qname[i] in (' ', '\t', '\n'): i += 1 nameStart = i; while i < len(qname) and (qname[i] not in (' ', '\t', '\n') or qname[i] == '*') and qname[i]!= '.': name += qname[i] i += 1 nameStop = i; nstart = start + nameStart if hasLoc else -1; nstop = start + nameStop if hasLoc else -1; result.append(Name(name, nstart, nstop)) i += 1 return result
class MatrixDB(): def __init__(self): # Holds all custom data # TODO: consider moving these constants to a different class self.STATIC_USER_TABLE_COLUMN_COUNT = 3 self.STATIC_MESSAGE_TABLE_COLUMN_COUNT = 3 self.lock = Lock() self.arrayOfMessages = ArrayList() self.arrayOfRoles = ArrayList() self.arrayOfUsers = ArrayList() self.deletedUserCount = 0 self.deletedRoleCount = 0 self.deletedMessageCount = 0 # Returns the index of the user, whether its new or not def getOrCreateUser(self, name, token=""): self.lock.acquire() userIndex = -1 # Check if User already exits for i in self.getActiveUserIndexes(): if self.arrayOfUsers[i]._name == name: userIndex = i # Add new User if userIndex < 0: userIndex = self.arrayOfUsers.size() self.arrayOfUsers.append(UserEntry(userIndex, userIndex - self.deletedUserCount, name, token)) # Add all existing roles as unchecked for roleIndex in self.getActiveRoleIndexes(): self.arrayOfUsers[userIndex].addRoleByIndex(roleIndex) self.lock.release() return userIndex # Returns the index of the role, whether its new or not def getOrCreateRole(self, role): self.lock.acquire() roleIndex = -1 # Check if Role already exists for i in self.getActiveRoleIndexes(): if self.arrayOfRoles[i]._name == role: roleIndex = i # Add new Role if roleIndex < 0: roleIndex = self.arrayOfRoles.size() self.arrayOfRoles.append(RoleEntry(roleIndex, roleIndex + self.STATIC_MESSAGE_TABLE_COLUMN_COUNT - self.deletedRoleCount, roleIndex + self.STATIC_USER_TABLE_COLUMN_COUNT - self.deletedRoleCount, role)) # Add new role to each existing user as unchecked for userIndex in self.getActiveUserIndexes(): self.arrayOfUsers[userIndex].addRoleByIndex(roleIndex) # Add new role to each existing message as unchecked for messageIndex in self.getActiveMessageIndexes(): self.arrayOfMessages[messageIndex].addRoleByIndex(userIndex) self.lock.release() return roleIndex # Returns the Row of the new message # Unlike Users and Roles, allow duplicate messages def createNewMessage(self,messagebuffer,url): self.lock.acquire() messageIndex = self.arrayOfMessages.size() self.arrayOfMessages.add(MessageEntry(messageIndex, messageIndex - self.deletedMessageCount, messagebuffer, url)) # Add all existing roles as unchecked for roleIndex in self.getActiveRoleIndexes(): self.arrayOfMessages[messageIndex].addRoleByIndex(roleIndex) self.lock.release() return messageIndex def clear(self): self.lock.acquire() self.arrayOfMessages = ArrayList() self.arrayOfRoles = ArrayList() self.arrayOfUsers = ArrayList() self.deletedUserCount = 0 self.deletedRoleCount = 0 self.deletedMessageCount = 0 self.lock.release() def load(self, db, callbacks, helpers): def loadRequestResponse(index, callbacks, helpers, host, port, protocol, requestData): # TODO tempRequestResont is now an array # because of a timing issue, where if this thread times out, it will still update temprequestresponse later on.. # TODO also this still locks the UI until all requests suceed or time out... try: # Due to Burp Extension API, must create a original request for all messages self.tempRequestResponse[index] = callbacks.makeHttpRequest(helpers.buildHttpService(host, port, protocol),requestData) except: traceback.print_exc(file=callbacks.getStderr()) self.lock.acquire() self.arrayOfRoles = db.arrayOfRoles self.arrayOfUsers = db.arrayOfUsers self.deletedUserCount = db.deletedUserCount self.deletedRoleCount = db.deletedRoleCount self.deletedMessageCount = db.deletedMessageCount self.arrayOfMessages = ArrayList() self.tempRequestResponse = [] index=0 for message in db.arrayOfMessages: self.tempRequestResponse.append(None) t = Thread(target=loadRequestResponse, args = [index, callbacks, helpers, message._host, message._port, message._protocol, message._requestData]) t.start() # TODO fix timeout here to be non-static t.join(2.0) if not t.isAlive() and self.tempRequestResponse[index] != None: self.arrayOfMessages.append(MessageEntry( message._index, message._tableRow, callbacks.saveBuffersToTempFiles(self.tempRequestResponse[index]), message._url, message._name, message._roles, message._successRegex, message._deleted)) index += 1 self.lock.release() def getSaveableObject(self): # TODO: might not need locks? self.lock.acquire() serializedMessages = [] for message in self.arrayOfMessages: serializedMessages.append(MessageEntryData( message._index, message._tableRow, message._requestResponse.getRequest(), message._requestResponse.getHttpService().getHost(), message._requestResponse.getHttpService().getPort(), message._requestResponse.getHttpService().getProtocol(), message._url, message._name, message._roles, message._successRegex, message._deleted)) ret = MatrixDBData(serializedMessages,self.arrayOfRoles, self.arrayOfUsers, self.deletedUserCount, self.deletedRoleCount, self.deletedMessageCount) self.lock.release() return ret def getActiveUserIndexes(self): return [x._index for x in self.arrayOfUsers if not x.isDeleted()] def getActiveRoleIndexes(self): return [x._index for x in self.arrayOfRoles if not x.isDeleted()] def getActiveMessageIndexes(self): return [x._index for x in self.arrayOfMessages if not x.isDeleted()] def getMessageByRow(self, row): for m in self.arrayOfMessages: if not m.isDeleted() and m.getTableRow() == row: return m def getUserByRow(self, row): for u in self.arrayOfUsers: if not u.isDeleted() and u.getTableRow() == row: return u def getRoleByMColumn(self, column): for r in self.arrayOfRoles: if not r.isDeleted() and r.getMTableColumn() == column: return r def getRoleByUColumn(self, column): for r in self.arrayOfRoles: if not r.isDeleted() and r.getUTableColumn() == column: return r def deleteUser(self,userIndex): self.lock.acquire() userEntry = self.arrayOfUsers[userIndex] if userEntry: userEntry._deleted = True self.deletedUserCount += 1 if len(self.arrayOfUsers) > userIndex+1: for i in self.arrayOfUsers[userIndex+1:]: i._tableRow -= 1 self.lock.release() def deleteRole(self,roleIndex): self.lock.acquire() roleEntry = self.arrayOfRoles[roleIndex] if roleEntry: roleEntry._deleted = True self.deletedRoleCount += 1 if len(self.arrayOfRoles) > roleIndex+1: for i in self.arrayOfRoles[roleIndex+1:]: i.updateMTableColumn(i.getMTableColumn()-1) i.updateUTableColumn(i.getUTableColumn()-1) self.lock.release() def deleteMessage(self,messageIndex): self.lock.acquire() messageEntry = self.arrayOfMessages[messageIndex] if messageEntry: messageEntry._deleted = True self.deletedMessageCount += 1 if len(self.arrayOfMessages) > messageIndex+1: for i in self.arrayOfMessages[messageIndex+1:]: i._tableRow -= 1 self.lock.release() # TODO: If this method is unused, probably remove it? def getUserEntriesWithinRole(self, roleIndex): return [userEntry for userEntry in self.arrayOfUsers if userEntry._roles[roleIndex]]
class Styler(DefaultNodeVisitor): """ generated source for class Styler """ BUILTIN = re.compile("None|True|False|NotImplemented|Ellipsis|__debug__") # # * Matches the start of a triple-quote string. # TRISTRING_PREFIX = re.compile("^[ruRU]{0,2}['\"]{3}") #linker = Linker() # # * Offsets of doc strings found by node visitor. # docOffsets = HashSet() def __init__(self, idx, linker): """ generated source for method __init__ """ super(Styler, self).__init__() self.analyzer = idx self.styles = ArrayList() self.linker = linker self.path = None self.source = None # # * Entry point for decorating a source file. # * # * @param path absolute file path # * @param src file contents # def addStyles(self, path, src): """ generated source for method addStyles """ self.path = path self.source = src m = self.analyzer.getAstForFile(path) if m is not None: m.visit(self) self.styles = sorted(self.styles, lambda a,b: a.compareTo(b)) return self.styles #@overloaded def visit(self, n): if isinstance(n, Num): self.addStyle(n, StyleRun.Type.NUMBER) return True if isinstance(n, Str): s = self.sourceString(n.start, n.end) #if self.TRISTRING_PREFIX.match(s).lookingAt(): if self.TRISTRING_PREFIX.match(s) is not None: self.addStyle(n.start, n.end - n.start, StyleRun.Type.DOC_STRING) self.docOffsets.add(n.start) # don't re-highlight as a string # highlightDocString(n); return True if isinstance(n, Name): parent = n.getParent() if isinstance(parent, (FunctionDef, )): fn = parent; if n == fn.name: self.addStyle(n, StyleRun.Type.FUNCTION) elif n == fn.kwarg or n == fn.vararg: self.addStyle(n, StyleRun.Type.PARAMETER) return True if self.BUILTIN.match(n.id) is not None: self.addStyle(n, StyleRun.Type.BUILTIN) return True return True def addStyle(self, *args): if len(args) == 4: self.addStyle1(*args) elif len(args) == 2: self.addStyle2(*args) elif len(args) == 3: self.addStyle3(*args) #@overloaded def addStyle1(self, e, start, len, type_): if e.getFile() is not None: # if it's an NUrl, for instance self.addStyle(start, len, type_) #@addStyle.register(object, Node, StyleRun.Type) def addStyle2(self, e, type_): if e.start is not None and e.end is not None: self.addStyle(e, e.start, e.end - e.start, type_) #@addStyle.register(object, int, int, StyleRun.Type) def addStyle3(self, begin, len, type_): self.styles.append(StyleRun(type_, begin, len)) def sourceString(self, *args): if len(args) == 1: return self.sourceString0(*args) else: return self.sourceString1(*args) #@overloaded def sourceString0(self, e): """ generated source for method sourceString """ return self.sourceString(e.start, e.end) #@sourceString.register(object, int, int) def sourceString1(self, begin, end): """ generated source for method sourceString_0 """ a = max(begin, 0) b = min(end, len(self.source)) b = max(b, 0) try: return self.source[a, b] except Exception as sx: # Silent here, only happens for weird encodings in file return ""
class TupleType(Type): """ generated source for class TupleType """ #@overloaded def initsimple(self): from pysonarsq.java.Analyzer import Analyzer super(TupleType, self).__init__() self.eltTypes = ArrayList() self.getTable().addSuper(Analyzer.self.builtins.BaseTuple.getTable()) self.getTable().setPath( Analyzer.self.builtins.BaseTuple.getTable().getPath()) #@__init__.register(object, A) def __init__(self, *types): self.initsimple() if types != (): if len(types) == 1: if isinstance(types[0], list): self.eltTypes += types[0] elif len(types): self.eltTypes += list(types) def setElementTypes(self, eltTypes): self.eltTypes = eltTypes def getElementTypes(self): return self.eltTypes def add(self, elt): self.eltTypes.append(elt) def get(self, i): return self.eltTypes[i] def toListType(self): from ListType import ListType t = ListType() for e in self.eltTypes: t.add(e) return t def __eq__(self, other): from pysonarsq.java._ import _ if self.typeStack.contains(self, other): return True elif isinstance(other, (TupleType, )): types1 = self.getElementTypes() types2 = other.getElementTypes() if len(types1) == len(types2): self.typeStack.push(self, other) i = 0 while i < len(types1): if not types1[i] == types2[i]: self.typeStack.pop(self, other) return False i += 1 self.typeStack.pop(self, other) return True else: return False else: return False def hashCode(self): return hash("TupleType") def printType(self, ctr): sb = [] num = ctr.visit(self) if num is not None: sb.append("#") sb.append(num) else: newNum = ctr.push(self) first = True if len(self.getElementTypes()) != 1: sb.append("(") for t in self.getElementTypes(): if not first: sb.append(", ") sb.append(t.printType(ctr)) first = False if ctr.isUsed(self): sb.append("=#") sb.append(newNum) sb.append(":") if len(self.getElementTypes()) != 1: sb.append(")") ctr.pop(self) return "".join(map(str, sb))
class BurpDatabaseModels(): """ The in-memory Database implementation of the Burp Extension. """ def __init__(self): """ The constructor of BurpDatabaseModels object defines a number of class variables tracking the number of deleted records, and maintaining the references to the arrays of records. Args: None Returns: None """ self.STATIC_MESSAGE_TABLE_COLUMN_COUNT = 6 self.lock = Lock() self.arrayOfMessages = ArrayList() self.arrayOfCampaigns = ArrayList() self.arrayOfSettings = ArrayList() self.deletedCampaignCount = 0 self.deletedRoleCount = 0 self.deletedMessageCount = 0 self.deletedSettingCount = 0 self.selfExtender = None def addCampaign(self, name): """ Adds a new record in the Campaigns array, and returns the index of the campaign. Args: name (str): The name of the campaign Returns: int: The index of the inserted campaign """ campaign_index = -1 try: self.lock.acquire() campaign_index = self.arrayOfCampaigns.size() self.arrayOfCampaigns.append( CampaignEntry(campaign_index, campaign_index - self.deletedCampaignCount, name)) finally: self.lock.release() return campaign_index def updateCampaign(self, campaign_index, id, val): """ Adds a new record in the Campaigns array, and returns the index of the campaign. Args: name (str): The name of the campaign Returns: int: The index of the inserted campaign """ try: self.lock.acquire() campaign_entry = self.arrayOfCampaigns[campaign_index] if campaign_entry: if id == 1: campaign_entry._Membership = val if id == 2: campaign_entry._MembershipB = val except: print 'Error when inserting campaings. Wanted to insert campaign with id ', campaign_index size = self.arrayOfCampaigns.size() print 'Current campaigns:', size for i in self.arrayOfCampaigns: print 'Campaign with id ', i._index print sys.exc_info() finally: self.lock.release() def delete_campaign(self, campaignIndex): """ Delete the selected row of the campaigns Args: row (str): The row of the table that the campaign appears Returns: None """ try: self.lock.acquire() print 'Terminating Campaign' campaign_entry = self.arrayOfCampaigns[campaignIndex] if campaign_entry: campaign_entry._deleted = True campaign_entry._Result = "Terminated" self.deletedCampaignCount += 1 if len(self.arrayOfCampaigns) > campaignIndex + 1: for i in self.arrayOfCampaigns[campaignIndex + 1:]: i._tableRow -= 1 print 'Campaign was Terminated' finally: self.lock.release() def addSetting(self, name, value, domain=None, description=None, path=None): """ Adds a new record in the settings array, and returns the index of the setting. Args: name (str): The name of the setting value (str): The value of the setting domain (str): The category of the setting (optional) description (str): A small description of the setting (optional) path (str): The location of the described resource (optional) Returns: int: The index of the inserted setting """ self.lock.acquire() settingIndex = -1 for i in self.getActiveSettingIndexes(domain): if self.arrayOfSettings[i]._name == name: settingIndex = i if settingIndex < 0: settingIndex = self.arrayOfSettings.size() self.arrayOfSettings.append( SettingEntry(settingIndex, len(self.getActiveSettingIndexes(domain)), name, value, domain, description, path)) self.lock.release() return settingIndex def createNewMessage(self, messagebuffer, host, method, path, selectedparameter, totest=False, regex="HTTP/1.1 200 OK", failRegex="(HTTP/1.1 403|block|impact)"): """ Adds a new record in the messages array, and returns the index of the message. Args: messagebuffer (str): The saved buffer of the burp request-response host (str): The targetted host method (str): The used HTTP method path (str): The HTTP URL path selectedparameter (str): The parameter of the HTTP request totest (bool): A boolean vaule indicating weather to test with the regex the response or not regex (str): A regex that if matches the response indicates a success failRegex (str): A regex that if matches the response indicates a failure Returns: int: The index of the inserted message """ self.lock.acquire() messageIndex = self.arrayOfMessages.size() self.arrayOfMessages.add( MessageEntry(messageIndex, messageIndex - self.deletedMessageCount, messagebuffer, host, method, path, selectedparameter, regex, failRegex)) self.lock.release() if totest: t = Thread(target=self.selfExtender.runMessagesThread, args=[messageIndex]) t.start() return messageIndex def clear(self): """ Clears all arrays and all counters Args: None Returns: None """ self.lock.acquire() self.arrayOfMessages = ArrayList() self.arrayOfCampaigns = ArrayList() self.deletedCampaignCount = 0 self.deletedRoleCount = 0 self.deletedMessageCount = 0 self.lock.release() def getActiveCampaignIndexes(self): """ Gets all campaigns that are not deleted Args: None Returns: array: The active campaigns """ return [x._index for x in self.arrayOfCampaigns if not x.isDeleted()] def getActiveMessageIndexes(self): """ Gets all messages that are not deleted Args: None Returns: array: The active messages """ return [x._index for x in self.arrayOfMessages if not x.isDeleted()] def getActiveSettingIndexes(self, domain=None): """ Gets all settings that are not deleted and belong to the input category Args: domain (str): The category of the requested settings (optional) Returns: array: The returned settings """ return [ x._index for x in self.arrayOfSettings if not x.isDeleted() and (not domain or x._domain == domain) ] def getMessageByRow(self, row): """ Gets a selected row of the messages, as long as it is not deleted. Args: row (str): The row of the table that the message appears Returns: array: The returned row with the message details """ for m in self.arrayOfMessages: if not m.isDeleted() and m.getTableRow() == row: return m def getSettingByRow(self, row, domain=None): """ Gets the a selected row of the settings, as long as it is not deleted. Args: row (str): The row of the table that the message appears Returns: array: The returned row with the setting details """ for m in [ x for x in self.arrayOfSettings if (not domain or x._domain == domain) ]: if not m.isDeleted() and m.getTableRow() == row and ( not domain or m._domain == domain): return m def getCampaignByRow(self, row): """ Gets the a selected row of the campaigns, as long as it is not deleted. Args: row (str): The row of the table that the message appears Returns: array: The returned row with the campaign details """ for u in self.arrayOfCampaigns: if not u.isDeleted() and u.getTableRow() == row: return u def delete_message(self, messageIndex): """ Delete the selected row of the messages Args: row (str): The row of the table that the message appears Returns: None """ self.lock.acquire() messageEntry = self.arrayOfMessages[messageIndex] if messageEntry: messageEntry._deleted = True self.deletedMessageCount += 1 if len(self.arrayOfMessages) > messageIndex + 1: for i in self.arrayOfMessages[messageIndex + 1:]: i._tableRow -= 1 self.lock.release()
class Analyzer(object): # global static instance of the analyzer itself #self = Analyzer() allBindings = ArrayList() references = LinkedHashMap() semanticErrors = HashMap() parseErrors = HashMap() cwd = None nCalled = 0 multilineFunType = False path = ArrayList() uncalled = HashSet() callStack = HashSet() importStack = HashSet() astCache = AstCache() cacheDir = str() failedToParse = HashSet() stats = Stats() builtins = None # Builtins() logger = logging.getLogger(__name__) loadingProgress = None projectDir = str() # below doesn't work for some reason.... """ def init_vars(self): self.allBindings = ArrayList() self.references = LinkedHashMap() self.semanticErrors = HashMap() self.parseErrors = HashMap() self.cwd = None self.nCalled = 0 self.multilineFunType = False self.path = ArrayList() self.uncalled = HashSet() self.callStack = HashSet() self.importStack = HashSet() self.astCache = AstCache() self.cacheDir = str() self.failedToParse = HashSet() self.stats = Stats() self.builtins = None # Builtins() self.logger = logging.getLogger(__name__) self.loadingProgress = None self.projectDir = str() """ # singleton pattern _instance = None def __new__(cls, *args, **kwargs): if not cls._instance: cls._instance = super(Analyzer, cls).__new__(cls, *args, **kwargs) return cls._instance def __init__(self): self.moduleTable = Scope(None, Scope.ScopeType.GLOBAL) self.loadedFiles = ArrayList() self.globaltable = Scope(None, Scope.ScopeType.GLOBAL) import time millis = int(round(time.time() * 1000)) self.stats.putInt("startTime", millis) self.logger = logging.getLogger(__name__) if not hasattr(Analyzer, 'self'): setattr(Analyzer, 'self', self) self.builtins = Builtins() self.builtins.init() #self.addPythonPath() self.createCacheDir() self.getAstCache() # main entry to the analyzer def analyze(self, path): self.projectDir = _.unifyPath(path) self.loadFileRecursive(self.projectDir) def setCWD(self, cd): if cd is not None: self.cwd = cd #if cd is not None: # self.cwd = _.unifyPath(cd) def addPaths(self, p): for s in p: addPath(s) def addPath(self, p): self.path.add(_.unifyPath(p)) def setPath(self, path): self.path = ArrayList(len(path)) self.addPaths(path) def addPythonPath(self): path = System.getenv("PYTHONPATH") if path is not None: for p in segments: self.addPath(p) def getLoadPath(self): loadPath = ArrayList() if self.cwd is not None: loadPath.append(self.cwd) if self.projectDir is not None and os.path.isdir(self.projectDir): loadPath.append(self.projectDir) loadPath += self.path return loadPath def inStack(self, f): return f in self.callStack def pushStack(self, f): self.callStack.add(f) def popStack(self, f): self.callStack.remove(f) def inImportStack(self, f): return f in self.importStack def pushImportStack(self, f): self.importStack.add(f) def popImportStack(self, f): self.importStack.remove(f) def getAllBindings(self): return self.allBindings def getCachedModule(self, file_): t = self.moduleTable.lookupType(_.moduleQname(file_)) if t is None: return None elif t.isUnionType(): for tt in t.asUnionType().getTypes(): if tt.isModuleType(): return tt return None elif t.isModuleType(): return t else: return None def getDiagnosticsForFile(self, file_): errs = self.semanticErrors.get(file_) if errs is not None: return errs return ArrayList() #@overloaded def putRef(self, node, bs): if not hasattr(bs, '__len__'): bs = [bs] if not (isinstance(node, (Url, ))): ref = Ref(node); bindings = self.references.get(ref) if bindings is None: bindings = ArrayList() self.references[ref] = bindings for b in bs: if not b in bindings: bindings.append(b) b.addRef(ref) def getReferences(self): """ generated source for method getReferences """ return self.references def putProblem(self, *args): if len(args) == 2: return self.putProblem0(*args) else: return self.putProblem1(*args) #@overloaded def putProblem0(self, loc, msg): """ generated source for method putProblem """ file_ = loc.getFile() if file_ is not None: self.addFileErr(file_, loc.start, loc.end, msg) # for situations without a Node #@putProblem.register(object, str, int, int, str) def putProblem1(self, file_, begin, end, msg): """ generated source for method putProblem_0 """ if file_ is not None: self.addFileErr(file_, begin, end, msg) def addFileErr(self, file_, begin, end, msg): """ generated source for method addFileErr """ d = Diagnostic(file_, Diagnostic.Category.ERROR, begin, end, msg) self.getFileErrs(file_, self.semanticErrors).append(d) def getParseErrs(self, file_): return self.getFileErrs(file_, self.parseErrors) def getFileErrs(self, file_, _map): msgs = _map.get(file_) if msgs is None: msgs = ArrayList() _map[file_] = msgs return msgs def loadFile(self, path): _.msg("loading: " + path); path = _.unifyPath(path) if not os.path.isfile(path): self.finer("\nfile not not found or cannot be read: " + path) return None module_ = self.getCachedModule(path) if module_ is not None: self.finer("\nusing cached module " + path + " [succeeded]") return module_ # detect circular import if Analyzer.self.inImportStack(path): return None # set new CWD and save the old one on stack oldcwd = self.cwd self.setCWD( os.path.join(*path.split(os.sep)[:-1]) ) Analyzer.self.pushImportStack(path) mod = self.parseAndResolve(path) # restore old CWD self.setCWD(oldcwd) return mod def isInLoadPath(self, dir): for s in getLoadPath(): if File(s) == dir: return True return False def parseAndResolve(self, file_): self.finer("Analyzing: " + file_) self.loadingProgress.tick() try: ast = self.getAstForFile(file_) if ast is None: self.failedToParse.add(file_) return None else: self.finer("resolving: " + file_) mod = ast.resolve(self.moduleTable) assert isinstance(mod, ModuleType) self.finer("[success]") self.loadedFiles.append(file_) return mod except MemoryError as e: if self.astCache is not None: self.astCache.clear() import gc gc.collect() return None def createCacheDir(self): """ generated source for method createCacheDir """ self.cacheDir = _.makePathString(_.getSystemTempDir(), "pysonar2", "ast_cache") f = self.cacheDir _.msg("AST cache is at: " + self.cacheDir) if not os.path.exists(f): os.makedirs(f) if not os.path.exists(f): _.die("Failed to create tmp directory: " + self.cacheDir + ".Please check permissions") def getAstCache(self): """ generated source for method getAstCache """ if self.astCache is None: self.astCache = AstCache.get() return self.astCache.INSTANCE # # * Returns the syntax tree for {@code file}. <p> # def getAstForFile(self, file_): return self.getAstCache().getAST(file_) def getBuiltinModule(self, qname): return self.builtins.get(qname) def makeQname(self, names): if _.isEmpty(names): return "" ret = "" i = 0 while i < len(names) - 1: ret += names[i].id + "." i += 1 ret += names[len(names) - 1].id return ret # # * Find the path that contains modname. Used to find the starting point of locating a qname. # * # * @param headName first module name segment # def locateModule(self, headName): loadPath = self.getLoadPath() for p in loadPath: startDir = os.sep.join([p, headName]) initFile = _.joinPath(startDir, "__init__.py") if os.path.exists(initFile): return p startFile = startDir + ".py" if os.path.exists(startFile): return p return None def loadModule(self, name, scope): if _.isEmpty(name): return None from Binding import Binding qname = self.makeQname(name) mt = self.getBuiltinModule(qname) if mt is not None: scope.insert(name[0].id, Url(Builtins.LIBRARY_URL + mt.getTable().getPath() + ".html"), mt, Binding.Kind.SCOPE) return mt # If there's more than one segment # load the packages first prev = None startPath = self.locateModule(name[0].id) if startPath is None: return None path = startPath for i, n in enumerate(name): path = os.sep.join([path, name[i].id]) initFile = _.joinPath(path, "__init__.py") if os.path.isfile(initFile): mod = self.loadFile(initFile) if mod is None: return None if prev is not None: prev.getTable().insert(name[i].id, name[i], mod, Binding.Kind.VARIABLE) else: scope.insert(name[i].id, name[i], mod, Binding.Kind.VARIABLE) prev = mod elif i == len(name) - 1: startFile = path + ".py" if os.path.isfile(startFile): mod = self.loadFile(startFile) if mod is None: return None if prev is not None: prev.getTable().insert(name[i].id, name[i], mod, Binding.Kind.VARIABLE) else: scope.insert(name[i].id, name[i], mod, Binding.Kind.VARIABLE) prev = mod else: return None return prev # # * Load all Python source files recursively if the given fullname is a # * directory; otherwise just load a file. Looks at file extension to # * determine whether to load a given file. # def loadFileRecursive(self, fullname): count = self.countFileRecursive(fullname) if self.loadingProgress is None: self.loadingProgress = FancyProgress(count, 50) if os.path.isdir(fullname): for root, dirs, files in os.walk(fullname): for f in files: self.loadFileRecursive(root + os.sep + f) for d in dirs: self.loadFileRecursive(root + os.sep + d) else: if fullname.endswith(".py"): self.loadFile(fullname) # count number of .py files def countFileRecursive(self, fullname): sum = 0 if os.path.isdir(fullname): for root, dirs, files in os.walk(fullname): for f in files: sum += self.countFileRecursive(root + os.sep + f) for d in dirs: sum += self.countFileRecursive(root + os.sep + d) else: if fullname.endswith(".py"): sum += 1 return sum def finish(self): """ generated source for method finish """ # progress.end(); _.msg("\nFinished loading files. " + str(self.nCalled) + " functions were called.") _.msg("Analyzing uncalled functions") self.applyUncalled() # mark unused variables for b in self.allBindings: if not b.getType().isClassType() and not b.getType().isFuncType() and not b.getType().isModuleType() and _.isEmpty(b.getRefs()): Analyzer.self.putProblem(b.getNode(), "Unused variable: " + b.__class__.__name__) for ent in self.references.items(): self.convertCallToNew(ent[0], ent[1]) _.msg(self.getAnalysisSummary()) def close(self): """ generated source for method close """ self.astCache.close() def convertCallToNew(self, ref, bindings): """ generated source for method convertCallToNew """ if ref.isRef(): return if len(bindings) == 0: return nb = bindings[0] t = nb.getType() if t.isUnionType(): t = t.asUnionType().firstUseful() if t is None: return if not t.isUnknownType() and not t.isFuncType(): ref.markAsNew() def addUncalled(self, cl): """ generated source for method addUncalled """ if not cl.func.called: self.uncalled.add(cl) def removeUncalled(self, f): if f in self.uncalled: self.uncalled.remove(f) def applyUncalled(self): """ generated source for method applyUncalled """ progress = FancyProgress(len(self.uncalled), 50) while not _.isEmpty(self.uncalled): uncalledDup = list(self.uncalled) for cl in uncalledDup: progress.tick() Call.apply(cl, None, None, None, None, None) def getAnalysisSummary(self): sb = [] sb.append("\n" + _.banner("analysis summary")) duration = _.formatTime(_.millis() - self.stats.getInt("startTime")) sb.append("\n- total time: " + duration) sb.append("\n- modules loaded: " + str(len(self.loadedFiles))) sb.append("\n- semantic problems: " + str(len(self.semanticErrors))) sb.append("\n- failed to parse: " + str(len(self.failedToParse))) # calculate number of defs, refs, xrefs nDef = 0 nXRef = 0 for b in self.getAllBindings(): nDef += 1 nXRef += len(b.getRefs()) sb.append("\n- number of definitions: " + str(nDef)) sb.append("\n- number of cross references: " + str(nXRef)) sb.append("\n- number of references: " + str(len(self.getReferences()))) resolved = self.stats.getInt("resolved") unresolved = self.stats.getInt("unresolved") sb.append("\n- resolved names: " + str(resolved)) sb.append("\n- unresolved names: " + str(unresolved)) sb.append("\n- name resolve rate: " + _.percent(resolved, resolved + unresolved)) sb.append("\n" + _.getGCStats()) return ''.join(sb) def getLoadedFiles(self): files = ArrayList() for file_ in self.loadedFiles: if file_.endswith(".py"): files.append(file_) return files def registerBinding(self, b): self.allBindings.append(b) def log(self, level, msg): _.msg(msg) def severe(self, msg): self.log(Level.SEVERE, msg) def warn(self, msg): self.log(Level.WARNING, msg) def info(self, msg): self.log(Level.INFO, msg) def fine(self, msg): self.log(Level.FINE, msg) def finer(self, msg): self.log('*a log level*', msg) def __str__(self): return "<Analyzer:locs=" + len(self.references) + ":probs=" + len(self.semanticErrors) + ":files=" + len(self.loadedFiles) + ">"
def getLoadedFiles(self): files = ArrayList() for file_ in self.loadedFiles: if file_.endswith(".py"): files.append(file_) return files
def analyze(self, dataSource, fileManager, context): oruxMapsTrackpointsDbs = AppSQLiteDB.findAppDatabases( dataSource, "oruxmapstracks.db", True, self._PACKAGE_NAME) for oruxMapsTrackpointsDb in oruxMapsTrackpointsDbs: try: current_case = Case.getCurrentCaseThrows() skCase = Case.getCurrentCase().getSleuthkitCase() geoArtifactHelper = GeoArtifactsHelper( skCase, self._MODULE_NAME, self._PROGRAM_NAME, oruxMapsTrackpointsDb.getDBFile()) poiQueryString = "SELECT poilat, poilon, poialt, poitime, poiname FROM pois" poisResultSet = oruxMapsTrackpointsDb.runQuery(poiQueryString) abstractFile = oruxMapsTrackpointsDb.getDBFile() if poisResultSet is not None: while poisResultSet.next(): latitude = poisResultSet.getDouble("poilat") longitude = poisResultSet.getDouble("poilon") time = poisResultSet.getLong( "poitime") / 1000 # milliseconds since unix epoch name = poisResultSet.getString("poiname") altitude = poisResultSet.getDouble("poialt") attributes = ArrayList() artifact = abstractFile.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_DATETIME, self._MODULE_NAME, time)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_GEO_LATITUDE, self._MODULE_NAME, latitude)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_GEO_LONGITUDE, self._MODULE_NAME, longitude)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_GEO_ALTITUDE, self._MODULE_NAME, altitude)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, self._MODULE_NAME, name)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_PROG_NAME, self._MODULE_NAME, self._PROGRAM_NAME)) artifact.addAttributes(attributes) try: # index the artifact for keyword search blackboard = Case.getCurrentCase( ).getSleuthkitCase().getBlackboard() blackboard.postArtifact(artifact, self._MODULE_NAME) except Blackboard.BlackboardException as ex: self._logger.log( Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) MessageNotifyUtil.Notify.error( "Failed to index trackpoint artifact for keyword search.", artifact.getDisplayName()) # tracks -> segments -> trackpoints # # The reason that the track and the segment are put into arrays is that once the segment query is run an error occurs that it cannot find the # trackname column in the track query. This is avoided if all the tracks/segments are found and put into an array(s) that can then be processed all at once. trackQueryString = "SELECT _id, trackname, trackciudad FROM tracks" trackResultSet = oruxMapsTrackpointsDb.runQuery( trackQueryString) if trackResultSet is not None: trackResults = ArrayList() while trackResultSet.next(): tempTrack = ArrayList() trackName = trackResultSet.getString( "trackname") + " - " + trackResultSet.getString( "trackciudad") trackId = str(trackResultSet.getInt("_id")) tempTrack.append(trackId) tempTrack.append(trackName) trackResults.append(tempTrack) for trackResult in trackResults: trackId = trackResult[0] trackName = trackResult[1] segmentQueryString = "SELECT _id, segname FROM segments WHERE segtrack = " + trackId segmentResultSet = oruxMapsTrackpointsDb.runQuery( segmentQueryString) if segmentResultSet is not None: segmentResults = ArrayList() while segmentResultSet.next(): segmentName = trackName + " - " + segmentResultSet.getString( "segname") segmentId = str(segmentResultSet.getInt("_id")) tempSegment = ArrayList() tempSegment.append(segmentId) tempSegment.append(segmentName) segmentResults.append(tempSegment) for segmentResult in segmentResults: segmentId = segmentResult[0] segmentName = segmentResult[1] trackpointsQueryString = "SELECT trkptlat, trkptlon, trkptalt, trkpttime FROM trackpoints WHERE trkptseg = " + segmentId trackpointsResultSet = oruxMapsTrackpointsDb.runQuery( trackpointsQueryString) if trackpointsResultSet is not None: geoPointList = GeoTrackPoints() while trackpointsResultSet.next(): latitude = trackpointsResultSet.getDouble( "trkptlat") longitude = trackpointsResultSet.getDouble( "trkptlon") altitude = trackpointsResultSet.getDouble( "trkptalt") time = trackpointsResultSet.getLong( "trkpttime" ) / 1000 # milliseconds since unix epoch geoPointList.addPoint( TrackPoint(latitude, longitude, altitude, segmentName, 0, 0, 0, time)) try: geoartifact = geoArtifactHelper.addTrack( segmentName, geoPointList, None) except Blackboard.BlackboardException as ex: self._logger.log( Level.SEVERE, "Error using geo artifact helper with blackboard", ex) self._logger.log( Level.SEVERE, traceback.format_exc()) MessageNotifyUtil.Notify.error( "Failed to add track artifact.", "geoArtifactHelper") except TskCoreException as e: self._logger.log( Level.SEVERE, "Error using geo artifact helper with TskCoreException", ex) self._logger.log( Level.SEVERE, traceback.format_exc()) MessageNotifyUtil.Notify.error( "Failed to add track artifact with TskCoreException.", "geoArtifactHelper") except SQLException as ex: self._logger.log( Level.WARNING, "Error processing query result for Orux Map trackpoints.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) except TskCoreException as ex: self._logger.log( Level.SEVERE, "Failed to add Orux Map trackpoint artifacts.", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except BlackboardException as ex: self._logger.log(Level.WARNING, "Failed to post artifacts.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) except NoCurrentCaseException as ex: self._logger.log(Level.WARNING, "No case currently open.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) finally: oruxMapsTrackpointsDb.close()
class TupleType(Type): """ generated source for class TupleType """ #@overloaded def initsimple(self): from pysonarsq.java.Analyzer import Analyzer super(TupleType, self).__init__() self.eltTypes = ArrayList() self.getTable().addSuper(Analyzer.self.builtins.BaseTuple.getTable()) self.getTable().setPath(Analyzer.self.builtins.BaseTuple.getTable().getPath()) #@__init__.register(object, A) def __init__(self, *types): self.initsimple() if types != (): if len(types) == 1: if isinstance(types[0], list): self.eltTypes += types[0] elif len(types): self.eltTypes += list(types) def setElementTypes(self, eltTypes): self.eltTypes = eltTypes def getElementTypes(self): return self.eltTypes def add(self, elt): self.eltTypes.append(elt) def get(self, i): return self.eltTypes[i] def toListType(self): from ListType import ListType t = ListType() for e in self.eltTypes: t.add(e) return t def __eq__(self, other): from pysonarsq.java._ import _ if self.typeStack.contains(self, other): return True elif isinstance(other, (TupleType, )): types1 = self.getElementTypes(); types2 = other.getElementTypes(); if len(types1) == len(types2): self.typeStack.push(self, other) i = 0 while i < len(types1): if not types1[i] == types2[i]: self.typeStack.pop(self, other) return False i += 1 self.typeStack.pop(self, other) return True else: return False else: return False def hashCode(self): return hash("TupleType") def printType(self, ctr): sb = [] num = ctr.visit(self) if num is not None: sb.append("#") sb.append(num) else: newNum = ctr.push(self) first = True if len(self.getElementTypes()) != 1: sb.append("(") for t in self.getElementTypes(): if not first: sb.append(", ") sb.append(t.printType(ctr)) first = False if ctr.isUsed(self): sb.append("=#") sb.append(newNum) sb.append(":") if len(self.getElementTypes()) != 1: sb.append(")") ctr.pop(self) return "".join(map(str, sb))
class Analyzer(object): # global static instance of the analyzer itself #self = Analyzer() allBindings = ArrayList() references = LinkedHashMap() semanticErrors = HashMap() parseErrors = HashMap() cwd = None nCalled = 0 multilineFunType = False path = ArrayList() uncalled = HashSet() callStack = HashSet() importStack = HashSet() astCache = AstCache() cacheDir = str() failedToParse = HashSet() stats = Stats() builtins = None # Builtins() logger = logging.getLogger(__name__) loadingProgress = None projectDir = str() # below doesn't work for some reason.... """ def init_vars(self): self.allBindings = ArrayList() self.references = LinkedHashMap() self.semanticErrors = HashMap() self.parseErrors = HashMap() self.cwd = None self.nCalled = 0 self.multilineFunType = False self.path = ArrayList() self.uncalled = HashSet() self.callStack = HashSet() self.importStack = HashSet() self.astCache = AstCache() self.cacheDir = str() self.failedToParse = HashSet() self.stats = Stats() self.builtins = None # Builtins() self.logger = logging.getLogger(__name__) self.loadingProgress = None self.projectDir = str() """ # singleton pattern _instance = None def __new__(cls, *args, **kwargs): if not cls._instance: cls._instance = super(Analyzer, cls).__new__(cls, *args, **kwargs) return cls._instance def __init__(self): self.moduleTable = Scope(None, Scope.ScopeType.GLOBAL) self.loadedFiles = ArrayList() self.globaltable = Scope(None, Scope.ScopeType.GLOBAL) import time millis = int(round(time.time() * 1000)) self.stats.putInt("startTime", millis) self.logger = logging.getLogger(__name__) if not hasattr(Analyzer, 'self'): setattr(Analyzer, 'self', self) self.builtins = Builtins() self.builtins.init() #self.addPythonPath() self.createCacheDir() self.getAstCache() # main entry to the analyzer def analyze(self, path): self.projectDir = _.unifyPath(path) self.loadFileRecursive(self.projectDir) def setCWD(self, cd): if cd is not None: self.cwd = cd #if cd is not None: # self.cwd = _.unifyPath(cd) def addPaths(self, p): for s in p: addPath(s) def addPath(self, p): self.path.add(_.unifyPath(p)) def setPath(self, path): self.path = ArrayList(len(path)) self.addPaths(path) def addPythonPath(self): path = System.getenv("PYTHONPATH") if path is not None: for p in segments: self.addPath(p) def getLoadPath(self): loadPath = ArrayList() if self.cwd is not None: loadPath.append(self.cwd) if self.projectDir is not None and os.path.isdir(self.projectDir): loadPath.append(self.projectDir) loadPath += self.path return loadPath def inStack(self, f): return f in self.callStack def pushStack(self, f): self.callStack.add(f) def popStack(self, f): self.callStack.remove(f) def inImportStack(self, f): return f in self.importStack def pushImportStack(self, f): self.importStack.add(f) def popImportStack(self, f): self.importStack.remove(f) def getAllBindings(self): return self.allBindings def getCachedModule(self, file_): t = self.moduleTable.lookupType(_.moduleQname(file_)) if t is None: return None elif t.isUnionType(): for tt in t.asUnionType().getTypes(): if tt.isModuleType(): return tt return None elif t.isModuleType(): return t else: return None def getDiagnosticsForFile(self, file_): errs = self.semanticErrors.get(file_) if errs is not None: return errs return ArrayList() #@overloaded def putRef(self, node, bs): if not hasattr(bs, '__len__'): bs = [bs] if not (isinstance(node, (Url, ))): ref = Ref(node) bindings = self.references.get(ref) if bindings is None: bindings = ArrayList() self.references[ref] = bindings for b in bs: if not b in bindings: bindings.append(b) b.addRef(ref) def getReferences(self): """ generated source for method getReferences """ return self.references def putProblem(self, *args): if len(args) == 2: return self.putProblem0(*args) else: return self.putProblem1(*args) #@overloaded def putProblem0(self, loc, msg): """ generated source for method putProblem """ file_ = loc.getFile() if file_ is not None: self.addFileErr(file_, loc.start, loc.end, msg) # for situations without a Node #@putProblem.register(object, str, int, int, str) def putProblem1(self, file_, begin, end, msg): """ generated source for method putProblem_0 """ if file_ is not None: self.addFileErr(file_, begin, end, msg) def addFileErr(self, file_, begin, end, msg): """ generated source for method addFileErr """ d = Diagnostic(file_, Diagnostic.Category.ERROR, begin, end, msg) self.getFileErrs(file_, self.semanticErrors).append(d) def getParseErrs(self, file_): return self.getFileErrs(file_, self.parseErrors) def getFileErrs(self, file_, _map): msgs = _map.get(file_) if msgs is None: msgs = ArrayList() _map[file_] = msgs return msgs def loadFile(self, path): _.msg("loading: " + path) path = _.unifyPath(path) if not os.path.isfile(path): self.finer("\nfile not not found or cannot be read: " + path) return None module_ = self.getCachedModule(path) if module_ is not None: self.finer("\nusing cached module " + path + " [succeeded]") return module_ # detect circular import if Analyzer.self.inImportStack(path): return None # set new CWD and save the old one on stack oldcwd = self.cwd self.setCWD(os.path.join(*path.split(os.sep)[:-1])) Analyzer.self.pushImportStack(path) mod = self.parseAndResolve(path) # restore old CWD self.setCWD(oldcwd) return mod def isInLoadPath(self, dir): for s in getLoadPath(): if File(s) == dir: return True return False def parseAndResolve(self, file_): self.finer("Analyzing: " + file_) self.loadingProgress.tick() try: ast = self.getAstForFile(file_) if ast is None: self.failedToParse.add(file_) return None else: self.finer("resolving: " + file_) mod = ast.resolve(self.moduleTable) assert isinstance(mod, ModuleType) self.finer("[success]") self.loadedFiles.append(file_) return mod except MemoryError as e: if self.astCache is not None: self.astCache.clear() import gc gc.collect() return None def createCacheDir(self): """ generated source for method createCacheDir """ self.cacheDir = _.makePathString(_.getSystemTempDir(), "pysonar2", "ast_cache") f = self.cacheDir _.msg("AST cache is at: " + self.cacheDir) if not os.path.exists(f): os.makedirs(f) if not os.path.exists(f): _.die("Failed to create tmp directory: " + self.cacheDir + ".Please check permissions") def getAstCache(self): """ generated source for method getAstCache """ if self.astCache is None: self.astCache = AstCache.get() return self.astCache.INSTANCE # # * Returns the syntax tree for {@code file}. <p> # def getAstForFile(self, file_): return self.getAstCache().getAST(file_) def getBuiltinModule(self, qname): return self.builtins.get(qname) def makeQname(self, names): if _.isEmpty(names): return "" ret = "" i = 0 while i < len(names) - 1: ret += names[i].id + "." i += 1 ret += names[len(names) - 1].id return ret # # * Find the path that contains modname. Used to find the starting point of locating a qname. # * # * @param headName first module name segment # def locateModule(self, headName): loadPath = self.getLoadPath() for p in loadPath: startDir = os.sep.join([p, headName]) initFile = _.joinPath(startDir, "__init__.py") if os.path.exists(initFile): return p startFile = startDir + ".py" if os.path.exists(startFile): return p return None def loadModule(self, name, scope): if _.isEmpty(name): return None from Binding import Binding qname = self.makeQname(name) mt = self.getBuiltinModule(qname) if mt is not None: scope.insert( name[0].id, Url(Builtins.LIBRARY_URL + mt.getTable().getPath() + ".html"), mt, Binding.Kind.SCOPE) return mt # If there's more than one segment # load the packages first prev = None startPath = self.locateModule(name[0].id) if startPath is None: return None path = startPath for i, n in enumerate(name): path = os.sep.join([path, name[i].id]) initFile = _.joinPath(path, "__init__.py") if os.path.isfile(initFile): mod = self.loadFile(initFile) if mod is None: return None if prev is not None: prev.getTable().insert(name[i].id, name[i], mod, Binding.Kind.VARIABLE) else: scope.insert(name[i].id, name[i], mod, Binding.Kind.VARIABLE) prev = mod elif i == len(name) - 1: startFile = path + ".py" if os.path.isfile(startFile): mod = self.loadFile(startFile) if mod is None: return None if prev is not None: prev.getTable().insert(name[i].id, name[i], mod, Binding.Kind.VARIABLE) else: scope.insert(name[i].id, name[i], mod, Binding.Kind.VARIABLE) prev = mod else: return None return prev # # * Load all Python source files recursively if the given fullname is a # * directory; otherwise just load a file. Looks at file extension to # * determine whether to load a given file. # def loadFileRecursive(self, fullname): count = self.countFileRecursive(fullname) if self.loadingProgress is None: self.loadingProgress = FancyProgress(count, 50) if os.path.isdir(fullname): for root, dirs, files in os.walk(fullname): for f in files: self.loadFileRecursive(root + os.sep + f) for d in dirs: self.loadFileRecursive(root + os.sep + d) else: if fullname.endswith(".py"): self.loadFile(fullname) # count number of .py files def countFileRecursive(self, fullname): sum = 0 if os.path.isdir(fullname): for root, dirs, files in os.walk(fullname): for f in files: sum += self.countFileRecursive(root + os.sep + f) for d in dirs: sum += self.countFileRecursive(root + os.sep + d) else: if fullname.endswith(".py"): sum += 1 return sum def finish(self): """ generated source for method finish """ # progress.end(); _.msg("\nFinished loading files. " + str(self.nCalled) + " functions were called.") _.msg("Analyzing uncalled functions") self.applyUncalled() # mark unused variables for b in self.allBindings: if not b.getType().isClassType() and not b.getType().isFuncType( ) and not b.getType().isModuleType() and _.isEmpty(b.getRefs()): Analyzer.self.putProblem( b.getNode(), "Unused variable: " + b.__class__.__name__) for ent in self.references.items(): self.convertCallToNew(ent[0], ent[1]) _.msg(self.getAnalysisSummary()) def close(self): """ generated source for method close """ self.astCache.close() def convertCallToNew(self, ref, bindings): """ generated source for method convertCallToNew """ if ref.isRef(): return if len(bindings) == 0: return nb = bindings[0] t = nb.getType() if t.isUnionType(): t = t.asUnionType().firstUseful() if t is None: return if not t.isUnknownType() and not t.isFuncType(): ref.markAsNew() def addUncalled(self, cl): """ generated source for method addUncalled """ if not cl.func.called: self.uncalled.add(cl) def removeUncalled(self, f): if f in self.uncalled: self.uncalled.remove(f) def applyUncalled(self): """ generated source for method applyUncalled """ progress = FancyProgress(len(self.uncalled), 50) while not _.isEmpty(self.uncalled): uncalledDup = list(self.uncalled) for cl in uncalledDup: progress.tick() Call.apply(cl, None, None, None, None, None) def getAnalysisSummary(self): sb = [] sb.append("\n" + _.banner("analysis summary")) duration = _.formatTime(_.millis() - self.stats.getInt("startTime")) sb.append("\n- total time: " + duration) sb.append("\n- modules loaded: " + str(len(self.loadedFiles))) sb.append("\n- semantic problems: " + str(len(self.semanticErrors))) sb.append("\n- failed to parse: " + str(len(self.failedToParse))) # calculate number of defs, refs, xrefs nDef = 0 nXRef = 0 for b in self.getAllBindings(): nDef += 1 nXRef += len(b.getRefs()) sb.append("\n- number of definitions: " + str(nDef)) sb.append("\n- number of cross references: " + str(nXRef)) sb.append("\n- number of references: " + str(len(self.getReferences()))) resolved = self.stats.getInt("resolved") unresolved = self.stats.getInt("unresolved") sb.append("\n- resolved names: " + str(resolved)) sb.append("\n- unresolved names: " + str(unresolved)) sb.append("\n- name resolve rate: " + _.percent(resolved, resolved + unresolved)) sb.append("\n" + _.getGCStats()) return ''.join(sb) def getLoadedFiles(self): files = ArrayList() for file_ in self.loadedFiles: if file_.endswith(".py"): files.append(file_) return files def registerBinding(self, b): self.allBindings.append(b) def log(self, level, msg): _.msg(msg) def severe(self, msg): self.log(Level.SEVERE, msg) def warn(self, msg): self.log(Level.WARNING, msg) def info(self, msg): self.log(Level.INFO, msg) def fine(self, msg): self.log(Level.FINE, msg) def finer(self, msg): self.log('*a log level*', msg) def __str__(self): return "<Analyzer:locs=" + len(self.references) + ":probs=" + len( self.semanticErrors) + ":files=" + len(self.loadedFiles) + ">"
class MatrixDB(): def __init__(self): # Holds all custom data # TODO: consider moving these constants to a different class self.STATIC_USER_TABLE_COLUMN_COUNT = 3 self.STATIC_MESSAGE_TABLE_COLUMN_COUNT = 3 self.lock = Lock() self.arrayOfMessages = ArrayList() self.arrayOfRoles = ArrayList() self.arrayOfUsers = ArrayList() self.deletedUserCount = 0 self.deletedRoleCount = 0 self.deletedMessageCount = 0 # Returns the index of the user, whether its new or not def getOrCreateUser(self, name, token=""): self.lock.acquire() userIndex = -1 # Check if User already exits for i in self.getActiveUserIndexes(): if self.arrayOfUsers[i]._name == name: userIndex = i # Add new User if userIndex < 0: userIndex = self.arrayOfUsers.size() self.arrayOfUsers.append( UserEntry(userIndex, userIndex - self.deletedUserCount, name, token)) # Add all existing roles as unchecked for roleIndex in self.getActiveRoleIndexes(): self.arrayOfUsers[userIndex].addRoleByIndex(roleIndex) self.lock.release() return userIndex # Returns the index of the role, whether its new or not def getOrCreateRole(self, role): self.lock.acquire() roleIndex = -1 # Check if Role already exists for i in self.getActiveRoleIndexes(): if self.arrayOfRoles[i]._name == role: roleIndex = i # Add new Role if roleIndex < 0: roleIndex = self.arrayOfRoles.size() self.arrayOfRoles.append( RoleEntry( roleIndex, roleIndex + self.STATIC_MESSAGE_TABLE_COLUMN_COUNT - self.deletedRoleCount, roleIndex + self.STATIC_USER_TABLE_COLUMN_COUNT - self.deletedRoleCount, role)) # Add new role to each existing user as unchecked for userIndex in self.getActiveUserIndexes(): self.arrayOfUsers[userIndex].addRoleByIndex(roleIndex) # Add new role to each existing message as unchecked for messageIndex in self.getActiveMessageIndexes(): self.arrayOfMessages[messageIndex].addRoleByIndex(userIndex) self.lock.release() return roleIndex # Returns the Row of the new message # Unlike Users and Roles, allow duplicate messages def createNewMessage(self, messagebuffer, url): self.lock.acquire() messageIndex = self.arrayOfMessages.size() self.arrayOfMessages.add( MessageEntry(messageIndex, messageIndex - self.deletedMessageCount, messagebuffer, url)) # Add all existing roles as unchecked for roleIndex in self.getActiveRoleIndexes(): self.arrayOfMessages[messageIndex].addRoleByIndex(roleIndex) self.lock.release() return messageIndex def clear(self): self.lock.acquire() self.arrayOfMessages = ArrayList() self.arrayOfRoles = ArrayList() self.arrayOfUsers = ArrayList() self.deletedUserCount = 0 self.deletedRoleCount = 0 self.deletedMessageCount = 0 self.lock.release() def load(self, db, callbacks, helpers): def loadRequestResponse(index, callbacks, helpers, host, port, protocol, requestData): # TODO tempRequestResont is now an array # because of a timing issue, where if this thread times out, it will still update temprequestresponse later on.. # TODO also this still locks the UI until all requests suceed or time out... try: # Due to Burp Extension API, must create a original request for all messages self.tempRequestResponse[index] = callbacks.makeHttpRequest( helpers.buildHttpService(host, port, protocol), requestData) except: traceback.print_exc(file=callbacks.getStderr()) self.lock.acquire() self.arrayOfRoles = db.arrayOfRoles self.arrayOfUsers = db.arrayOfUsers self.deletedUserCount = db.deletedUserCount self.deletedRoleCount = db.deletedRoleCount self.deletedMessageCount = db.deletedMessageCount self.arrayOfMessages = ArrayList() self.tempRequestResponse = [] index = 0 for message in db.arrayOfMessages: self.tempRequestResponse.append(None) t = Thread(target=loadRequestResponse, args=[ index, callbacks, helpers, message._host, message._port, message._protocol, message._requestData ]) t.start() # TODO fix timeout here to be non-static t.join(2.0) if not t.isAlive() and self.tempRequestResponse[index] != None: self.arrayOfMessages.append( MessageEntry( message._index, message._tableRow, callbacks.saveBuffersToTempFiles( self.tempRequestResponse[index]), message._url, message._name, message._roles, message._successRegex, message._deleted)) index += 1 self.lock.release() def getSaveableObject(self): # TODO: might not need locks? self.lock.acquire() serializedMessages = [] for message in self.arrayOfMessages: serializedMessages.append( MessageEntryData( message._index, message._tableRow, message._requestResponse.getRequest(), message._requestResponse.getHttpService().getHost(), message._requestResponse.getHttpService().getPort(), message._requestResponse.getHttpService().getProtocol(), message._url, message._name, message._roles, message._successRegex, message._deleted)) ret = MatrixDBData(serializedMessages, self.arrayOfRoles, self.arrayOfUsers, self.deletedUserCount, self.deletedRoleCount, self.deletedMessageCount) self.lock.release() return ret def getActiveUserIndexes(self): return [x._index for x in self.arrayOfUsers if not x.isDeleted()] def getActiveRoleIndexes(self): return [x._index for x in self.arrayOfRoles if not x.isDeleted()] def getActiveMessageIndexes(self): return [x._index for x in self.arrayOfMessages if not x.isDeleted()] def getMessageByRow(self, row): for m in self.arrayOfMessages: if not m.isDeleted() and m.getTableRow() == row: return m def getUserByRow(self, row): for u in self.arrayOfUsers: if not u.isDeleted() and u.getTableRow() == row: return u def getRoleByMColumn(self, column): for r in self.arrayOfRoles: if not r.isDeleted() and r.getMTableColumn() == column: return r def getRoleByUColumn(self, column): for r in self.arrayOfRoles: if not r.isDeleted() and r.getUTableColumn() == column: return r def deleteUser(self, userIndex): self.lock.acquire() userEntry = self.arrayOfUsers[userIndex] if userEntry: userEntry._deleted = True self.deletedUserCount += 1 if len(self.arrayOfUsers) > userIndex + 1: for i in self.arrayOfUsers[userIndex + 1:]: i._tableRow -= 1 self.lock.release() def deleteRole(self, roleIndex): self.lock.acquire() roleEntry = self.arrayOfRoles[roleIndex] if roleEntry: roleEntry._deleted = True self.deletedRoleCount += 1 if len(self.arrayOfRoles) > roleIndex + 1: for i in self.arrayOfRoles[roleIndex + 1:]: i.updateMTableColumn(i.getMTableColumn() - 1) i.updateUTableColumn(i.getUTableColumn() - 1) self.lock.release() def deleteMessage(self, messageIndex): self.lock.acquire() messageEntry = self.arrayOfMessages[messageIndex] if messageEntry: messageEntry._deleted = True self.deletedMessageCount += 1 if len(self.arrayOfMessages) > messageIndex + 1: for i in self.arrayOfMessages[messageIndex + 1:]: i._tableRow -= 1 self.lock.release() # TODO: If this method is unused, probably remove it? def getUserEntriesWithinRole(self, roleIndex): return [ userEntry for userEntry in self.arrayOfUsers if userEntry._roles[roleIndex] ]