def default(self): """Default command""" host = None port = None # Check for hostname if self.app.pargs.host: host = self.app.pargs.host # Check for port if self.app.pargs.port: port = int(self.app.pargs.port) # Read SQLite data if self.app.pargs.filename: from smalisca.modules.module_sql_models import AppSQLModel # Read SQLite data appSQL = AppSQLModel(self.app.pargs.filename) log.info("Successfully opened SQLite DB") # Create API endpoints flask_app = create_flask_app() # Start web server log.info("Starting web application ...") web_server = WebServer(host, port, flask_app) web_server.create_blueprints(appSQL.get_session()) web_server.run()
def do_dcl(self, params): """Draw calls. Type '--help' for more information.""" try: args = self.dcl_parser.parse_args(params.split()) results = self.get_calls(args) # Create new graph calls_graph = CallGraph() for r in results: calls_graph.add_call(r) # Finalize graph calls_graph.finalize() # Write output if args.output: if results: calls_graph.write( args.output_format, args.output, args.output_prog, args.output_args) log.info("Wrote results to %s" % args.output) else: log.info("No results :(") except SystemExit: pass
def do_dcl(self, params): """Draw calls. Type '--help' for more information.""" try: args = self.dcl_parser.parse_args(params.split()) results = self.get_calls(args) # Create new graph calls_graph = CallGraph() for r in results: calls_graph.add_call(r) # Finalize graph calls_graph.finalize() # Write output if args.output: if results: calls_graph.write(args.output_format, args.output, args.output_prog, args.output_args) log.info("Wrote results to %s" % args.output) else: log.info("No results :(") except SystemExit: pass
def run(self): """Runs the process""" c = 0 for d in self.dirs: log.info("%s %d/%d Parsing %s ... " % (self.name, c, len(self.dirs), d)) # Parse directory parser = SmaliParser(d, self.suffix) parser.run() # Get and save results res = parser.get_results() self.result_queue.put(res) c += 1
def default(self): """Default command""" if (self.app.pargs.filename) and (self.app.pargs.fileformat): # Create new app app = App(__name__) # Analysis obj analysis = None # Check for config file if self.app.pargs.config_file: config.smalisca_conf.read(self.app.pargs.config_file) else: log.info("Using default conf (%s)" % config.PROJECT_CONF) config.smalisca_conf.read(config.PROJECT_CONF) config.smalisca_conf.parse() # Read SQLite data if self.app.pargs.fileformat == "sqlite": from smalisca.analysis.analysis_sqlite import AnalyzerSQLite from smalisca.modules.module_sql_models import AppSQLModel # Read SQLite data appSQL = AppSQLModel(self.app.pargs.filename) log.info("Successfully opened SQLite DB") # Create analysis framework log.info("Creating analyzer framework ...") analysis = AnalyzerSQLite(appSQL.get_session()) # Where to read commands from? if self.app.pargs.commands_file: commands = open(self.app.pargs.commands_file, "rt") try: log.info("Reading commands from %s" % self.app.pargs.commands_file) cmd_shell = AnalyzerShell(analysis) cmd_shell.use_rawinput = False cmd_shell.stdin = commands cmd_shell.prompt = '' cmd_shell.cmdloop() finally: commands.close() else: # Start new shell log.info("Starting new analysis shell") cmd_shell = AnalyzerShell(analysis) cmd_shell.cmdloop()
def parse_location(self): """Parse files in specified location""" log.info("Parsing .%s files in %s ... " % (self.suffix, self.location)) for root, dirs, files in os.walk(self.location): for f in files: if f.endswith(self.suffix): # TODO: What about Windows paths? file_path = root + "/" + f # Set current path self.current_path = file_path # Parse file log.debug("Parsing file:\t %s" % f) self.parse_file(file_path) log.info("Finished parsing!")
def do_dxcl(self, params): """Draw cross calls. Type '--help' for more information.""" try: args = self.dxcl_parser.parse_args(params.split()) calls_args = argparse.Namespace() if args.class_name: if args.direction == 'to': calls_args.to_class = args.class_name elif args.direction == 'from': calls_args.from_class = args.class_name if args.method_name: if args.direction == 'to': calls_args.to_method = args.method_name elif args.direction == 'from': calls_args.from_method = args.method_name log.info(calls_args) # Get calls results = self.get_calls(calls_args) # Get cross-references xresults = self.analysis.xref_call(results, args.direction, args.xref_depth) # Create new graph calls_graph = CallGraph() for r in xresults: calls_graph.add_call(r) # Finalize graph calls_graph.finalize() # Write output if args.output: calls_graph.write(args.output_format, args.output, args.output_prog, args.output_args) log.info("Wrote results to %s" % args.output) except SystemExit: pass
def print_global_search(self, results): # Print classes print( "- Classes ---------------------------------------------------------------------" ) if len(results['classes']) > 0: classes = results['classes'] log.info("Found %d results" % len(classes)) for c in classes: print("%s\n" % c) else: log.warn("No found classes.\n") # Print properties print( "- Properties ------------------------------------------------------------------" ) if len(results['properties']) > 0: properties = results['properties'] log.info("Found %d results" % len(properties)) for p in properties: print("%s\n" % p) else: log.warn("No found properties.\n") # Print const strings print( "- Const strings ---------------------------------------------------------------" ) if len(results['consts']) > 0: const_strings = results['consts'] log.info("Found %d results" % len(const_strings)) for s in const_strings: print("%s\n" % s) else: log.warn("No found const strings.\n") # Print methods print( "- Methods ---------------------------------------------------------------------" ) if len(results['methods']) > 0: methods = results['methods'] log.info("Found %d results" % len(methods)) for m in methods: print("%s\n" % m) else: log.warn("No found methods.\n")
def xref_call(self, results, xref_type, max_depth=1): """ Get xref results """ def to_xref(results): """ Get xrefs pointing _calling_ the results""" query = self.db.query(SmaliCall) # Unique class names class_names = list(set([r.from_class for r in results])) # Make query results = query.filter(SmaliCall.dst_class.in_(class_names)).all() return results def from_xref(results): """ Get xrefs which are __called__ by the results""" query = self.db.query(SmaliCall) # Unique class names class_names = list(set([r.dst_class for r in results])) # Make query results = query.filter(SmaliCall.from_class.in_(class_names)).all() return results # Avoid if blocks func_call = {"to": to_xref, "from": from_xref} tmp_res = None # Get xrefs <max_depth> times for d in range(0, max_depth): if not tmp_res: tmp_res = func_call[xref_type](results) else: tmp_res = func_call[xref_type](tmp_res) log.info("Run:\t%d\tResults:\t%d" % (d, len(tmp_res))) # If no cross results, return old results return tmp_res if tmp_res else results
def xref_call(self, results, xref_type, max_depth=1): """ Get xref results """ def to_xref(results): """ Get xrefs pointing _calling_ the results""" query = self.db.query(SmaliCall) # Unique class names class_names = list(set([r.from_class for r in results])) # Make query results = query.filter(SmaliCall.dst_class.in_(class_names)).all() return results def from_xref(results): """ Get xrefs which are __called__ by the results""" query = self.db.query(SmaliCall) # Unique class names class_names = list(set([r.dst_class for r in results])) # Make query results = query.filter(SmaliCall.from_class.in_(class_names)).all() return results # Avoid if blocks func_call = {'to': to_xref, 'from': from_xref} tmp_res = None # Get xrefs <max_depth> times for d in range(0, max_depth): if not tmp_res: tmp_res = func_call[xref_type](results) else: tmp_res = func_call[xref_type](tmp_res) log.info("Run:\t%d\tResults:\t%d" % (d, len(tmp_res))) # If no cross results, return old results return tmp_res if tmp_res else results
def walk_location(self): """Walk through location and return lists of files and directories Args: location (str): Location path where to lookup for files and dirs Returns: tuple: (<list of dirs>, <list of files>) """ file_list = [] dirs_list = [] startinglevel = self.location.count(os.sep) # "Walk" through location for root, dirs, files in os.walk(self.location): depth = root.count(os.sep) - startinglevel # Collect dirs for d in dirs: dirpath = os.path.join(root, d) if (os.path.isdir(dirpath)) and (depth == self.depth): log.info("Adding %s to list" % dirpath) dirs_list.append(dirpath) # Collect files for filename in files: filepath = os.path.join(root, filename) if os.path.isfile(filepath): file_list.append(filepath) # Save results self.dirs = dirs_list self.files = file_list
def print_global_search(self, results): # Print classes print("- Classes ---------------------------------------------------------------------") if len(results['classes']) > 0: classes = results['classes'] log.info("Found %d results" % len(classes)) for c in classes: print("%s\n" % c) else: log.warn("No found classes.\n") # Print properties print("- Properties ------------------------------------------------------------------") if len(results['properties']) > 0: properties = results['properties'] log.info("Found %d results" % len(properties)) for p in properties: print("%s\n" % p) else: log.warn("No found properties.\n") # Print const strings print("- Const strings ---------------------------------------------------------------") if len(results['consts']) > 0: const_strings = results['consts'] log.info("Found %d results" % len(const_strings)) for s in const_strings: print("%s\n" % s) else: log.warn("No found const strings.\n") # Print methods print("- Methods ---------------------------------------------------------------------") if len(results['methods']) > 0: methods = results['methods'] log.info("Found %d results" % len(methods)) for m in methods: print("%s\n" % m) else: log.warn("No found methods.\n")
def add_class(self, class_obj): """Add new class object to graph Args: class_obj (dict): Class dict as obtained directly from the DB """ # First add a package node if class_obj.class_package not in self.packages: # Create new graph subgraph = gv.Digraph(name='cluster_%s' % class_obj.class_package) # Add additional attributes subgraph.body.append('label = "%s"' % class_obj.class_name) # Add new node add_nodes(subgraph, [('%s' % class_obj.class_package, { 'label': class_obj.class_package })]) package_node = class_obj.class_package self.packages[package_node] = subgraph package_graph = subgraph else: package_node = class_obj.class_package package_graph = self.packages[package_node] class_label = "--- %s\\r\\r" % class_obj.class_name # Add properties class_label += "Properties:\l\l" for p in class_obj.properties: class_label += "[P] %s %s\l" % (p.property_type, p.property_name) # Add methods to graph class_label += "\l\lMethods:\l\l" for m in class_obj.methods: # method_node = "%s_method_%d" % (class_obj.class_name, m.id) class_label += "[M] %s %s()\l" % (m.method_type, m.method_name) # Add class node class_node = "%s_class_%d" % (class_obj.class_name, class_obj.id) # Add class node to package graph node_attr = {'label': class_label} # Add methode node attributes for k in self.graphviz_opts['classes']['class_nodes']['nodes'].keys(): node_attr[k] = self.graphviz_opts['classes']['class_nodes'][ 'nodes'][k] add_nodes(package_graph, [(class_node, node_attr)]) # Add edge if not (package_node, class_node) in self.edges: add_edges(package_graph, [((package_node, class_node))]) self.edges.append((package_node, class_node)) else: log.info("match") if class_obj.class_package not in self.subgraphs: self.subgraphs[class_obj.class_package] = package_graph
def add_class(self, class_obj): """Add new class object to graph Args: class_obj (dict): Class dict as obtained directly from the DB """ # First add a package node if class_obj.class_package not in self.packages: # Create new graph subgraph = gv.Digraph( name='cluster_%s' % class_obj.class_package) # Add additional attributes subgraph.body.append('label = "%s"' % class_obj.class_name) # Add new node add_nodes(subgraph, [ ( '%s' % class_obj.class_package, { 'label': class_obj.class_package } ) ]) package_node = class_obj.class_package self.packages[package_node] = subgraph package_graph = subgraph else: package_node = class_obj.class_package package_graph = self.packages[package_node] class_label = "--- %s\\r\\r" % class_obj.class_name # Add properties class_label += "Properties:\l\l" for p in class_obj.properties: class_label += "[P] %s %s\l" % (p.property_type, p.property_name) # Add methods to graph class_label += "\l\lMethods:\l\l" for m in class_obj.methods: # method_node = "%s_method_%d" % (class_obj.class_name, m.id) class_label += "[M] %s %s()\l" % (m.method_type, m.method_name) # Add class node class_node = "%s_class_%d" % (class_obj.class_name, class_obj.id) # Add class node to package graph node_attr = {'label': class_label} # Add methode node attributes for k in GraphConfig.ClassGraphConfig.class_nodes['nodes'].keys(): node_attr[k] = GraphConfig.ClassGraphConfig.class_nodes['nodes'][k] add_nodes(package_graph, [(class_node, node_attr)]) # Add edge if not (package_node, class_node) in self.edges: add_edges(package_graph, [((package_node, class_node))]) self.edges.append((package_node, class_node)) else: log.info("match") if class_obj.class_package not in self.subgraphs: self.subgraphs[class_obj.class_package] = package_graph
def default(self): """Default command""" if self.app.pargs.location and self.app.pargs.suffix: self.location = self.app.pargs.location self.suffix = self.app.pargs.suffix # How many jobs (workers)? if self.app.pargs.jobs and self.app.pargs.jobs > 0: self.jobs = self.app.pargs.jobs else: self.jobs = multiprocessing.cpu_count() # Walk location to which depth? if self.app.pargs.depth and self.app.pargs.depth > 0: self.depth = self.app.pargs.depth else: self.depth = 1 # Create new concurrent parser instance concurrent_parser = ConcurrentParser( self.location, self.suffix, self.jobs, self.depth) concurrent_parser.walk_location() concurrent_parser.run() # Output results if (self.app.pargs.output) and (self.app.pargs.fileformat): results = concurrent_parser.get_results() app = App(__name__) # Add additional info app.add_location(self.location) app.add_parser("%s - %s" % (config.PROJECT_NAME, config.PROJECT_VERSION)) # Append classes for c in results: app.add_class_obj(c) # Write results to JSON if self.app.pargs.fileformat == 'json': log.info("Exporting results to JSON") app.write_json(self.app.pargs.output) log.info("\tWrote results to %s" % self.app.pargs.output) # Write results to sqlite elif self.app.pargs.fileformat == 'sqlite': appSQL = AppSQLModel(self.app.pargs.output) try: log.info("Exporting results to SQLite") # Add classes log.info("\tExtract classes ...") for c in app.get_classes(): appSQL.add_class(c) # Add properties log.info("\tExtract class properties ...") for p in app.get_properties(): appSQL.add_property(p) # Add const-strings log.info("\tExtract class const-strings ...") for c in app.get_const_strings(): appSQL.add_const_string(c) # Add methods log.info("\tExtract class methods ...") for m in app.get_methods(): appSQL.add_method(m) # Add calls log.info("\tExtract calls ...") for c in app.get_calls(): appSQL.add_call(c) # Commit changes log.info("\tCommit changes to SQLite DB") appSQL.commit() log.info("\tWrote results to %s" % self.app.pargs.output) finally: log.info("Finished scanning")
def default(self): """Default command""" if self.app.pargs.location and self.app.pargs.suffix: self.location = self.app.pargs.location self.suffix = self.app.pargs.suffix # Create new parser parser = SmaliParser(self.location, self.suffix) parser.run() # Output results if (self.app.pargs.output) and (self.app.pargs.fileformat): results = parser.get_results() app = App(__name__) # Add additional info app.add_location(self.location) app.add_parser("%s - %s" % (config.PROJECT_NAME, config.PROJECT_VERSION)) # Append classes for c in results: app.add_class_obj(c) # Write results to JSON if self.app.pargs.fileformat == 'json': log.info("Exporting results to JSON") app.write_json(self.app.pargs.output) log.info("\tWrote results to %s" % self.app.pargs.output) # Write results to sqlite elif self.app.pargs.fileformat == 'sqlite': appSQL = AppSQLModel(self.app.pargs.output) try: log.info("Exporting results to SQLite") # Add classes log.info("\tExtract classes ...") for c in app.get_classes(): appSQL.add_class(c) # Add properties log.info("\tExtract class properties ...") for p in app.get_properties(): appSQL.add_property(p) # Add const-strings log.info("\tExtract class const-strings ...") for c in app.get_const_strings(): appSQL.add_const_string(c) # Add methods log.info("\tExtract class methods ...") for m in app.get_methods(): appSQL.add_method(m) # Add calls log.info("\tExtract calls ...") for c in app.get_calls(): appSQL.add_call(c) # Commit changes log.info("\tCommit changes to SQLite DB") appSQL.commit() log.info("\tWrote results to %s" % self.app.pargs.output) finally: log.info("Finished scanning")