def run(self): """Run file information gathering. @return: information dict. """ self.key = "file" file_info = File(self.file_path).get_all() file_info["name"] = self.cfg.analysis.file_name return file_info
def run(self, results): """Writes report. @param results: Cuckoo results dict. @raise CuckooReportError: if fails to write report. """ if not HAVE_MAKO: raise CuckooReportError( "Failed to generate HTML report: python Mako library is not installed" ) shots_path = os.path.join(self.analysis_path, "shots") if os.path.exists(shots_path): shots = [] counter = 1 for shot_name in os.listdir(shots_path): if not shot_name.endswith(".png"): continue shot_path = os.path.join(shots_path, shot_name) if os.path.getsize(shot_path) == 0: continue shot = {} shot["id"] = os.path.splitext(File(shot_path).get_name())[0] shot["data"] = base64.b64encode(open(shot_path, "rb").read()) #shot["cum"] = "Seppia is gay" shots.append(shot) counter += 1 shots.sort(key=lambda shot: shot["id"]) results["screenshots"] = shots else: results["screenshots"] = [] lookup = TemplateLookup( directories=[os.path.join(CUCKOO_ROOT, "data", "html")], output_encoding='utf-8', encoding_errors='replace') template = lookup.get_template("report.html") try: html = template.render(**results) except Exception as e: raise CuckooReportError("Failed to generate HTML report: %s" % e) try: report = open(os.path.join(self.reports_path, "report.html"), "w") report.write(html) report.close() except (TypeError, IOError) as e: raise CuckooReportError("Failed to generate HTML report: %s" % e) return True
def build_options(self): """Get analysis options. @return: options dict. """ options = {} options["file_path"] = self.task.file_path options["package"] = self.task.package options["machine"] = self.task.machine options["platform"] = self.task.platform options["options"] = self.task.options options["custom"] = self.task.custom if not self.task.timeout or self.task.timeout == 0: options["timeout"] = self.cfg.cuckoo.analysis_timeout else: options["timeout"] = self.task.timeout options["file_name"] = File(self.task.file_path).get_name() options["file_type"] = File(self.task.file_path).get_type() options["started"] = time.time() return options
def run(self): """Run analysis. @return: results dict. """ self.key = "static" static = {} file_type = File(self.file_path).get_type() if file_type: if "PE32" in file_type: static = PortableExecutable(self.file_path).run() return static
def run(self): """Run analysis. @return: list of dropped files with related information. """ self.key = "dropped" dropped_files = [] for dir_name, dir_names, file_names in os.walk(self.dropped_path): for file_name in file_names: file_path = os.path.join(dir_name, file_name) file_info = File(file_path=file_path, strip_name=True).get_all() dropped_files.append(file_info) return dropped_files
def store_file(self): """Store sample file. @raise CuckooAnalysisError: if unable to store file.""" md5 = File(self.task.file_path).get_md5() self.analysis.stored_file_path = os.path.join(CUCKOO_ROOT, "storage", "binaries", md5) if os.path.exists(self.analysis.stored_file_path): log.info("File already exists at \"%s\"" % self.analysis.stored_file_path) else: try: shutil.copy(self.task.file_path, self.analysis.stored_file_path) except (IOError, shutil.Error) as e: raise CuckooAnalysisError( "Unable to store file from \"%s\" to \"%s\", analysis aborted" % (self.task.file_path, self.analysis.stored_file_path)) try: new_binary_path = os.path.join(self.analysis.results_folder, "binary") # On Windows systems, symlink is obviously not supported, therefore we'll just copy # the binary until we find a more efficient solution. if hasattr(os, "symlink"): os.symlink(self.analysis.stored_file_path, new_binary_path) else: shutil.copy(self.analysis.stored_file_path, new_binary_path) except (AttributeError, OSError) as e: raise CuckooAnalysisError( "Unable to create symlink/copy from \"%s\" to \"%s\"" % (self.analysis.stored_file_path, self.analysis.results_folder)) if self.cfg.cuckoo.delete_original: try: os.remove(self.task.file_path) except OSError as e: log.error("Unable to delete original file at path \"%s\": %s" % (self.task.file_path, e))
def run(self): """Runs VirusTotal processing @return: full VirusTotal report. """ self.key = "virustotal" virustotal = [] if not os.path.exists(self.file_path): raise CuckooProcessingError("File %s not found, skip" % self.file_path) if not VIRUSTOTAL_KEY: raise CuckooProcessingError( "VirusTotal API key not configured, skip") try: md5 = File(self.file_path).get_md5() except IOError as e: raise CuckooProcessingError("Unable to open \"%s\": %s" % (self.file_path, e)) data = urllib.urlencode({"resource": md5, "apikey": VIRUSTOTAL_KEY}) try: req = urllib2.Request(VIRUSTOTAL_URL, data) response = urllib2.urlopen(req) virustotal = json.loads(response.read()) except urllib2.URLError as e: raise CuckooProcessingError( "Unable to establish connection to VirusTotal: %s" % e) except urllib2.HTTPError as e: raise CuckooProcessingError( "Unable to perform HTTP request to VirusTotal (http code=%s)" % e.code) return virustotal
def main(): parser = argparse.ArgumentParser() parser.add_argument("path", type=str, help="Path to the file to analyze") parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False) parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False) parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False) parser.add_argument( "--options", type=str, action="store", default="", help= "Specify options for the analysis package (e.g. \"name=value,name2=value2\")", required=False) parser.add_argument( "--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False) parser.add_argument( "--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False) parser.add_argument( "--platform", type=str, action="store", default="", help= "Specify the operating system platform you want to use (windows/darwin/linux)", required=False) try: args = parser.parse_args() except IOError as e: parser.error(e) return False if not os.path.exists(args.path): print("ERROR: the specified file does not exist at path \"%s\"" % args.path) return False db = Database() task_id = db.add(file_path=args.path, md5=File(args.path).get_md5(), package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom) print("SUCCESS: Task added with id %d" % task_id)
def run(self, results): """Writes report. @param results: Cuckoo results dict. @raise CuckooReportError: if fails to connect or write to MongoDB. """ self._connect() # Set an unique index on stored files, to avoid duplicates. if not self._db.fs.files.ensure_index("md5", unique=True): self._db.fs.files.create_index("md5", unique=True, name="md5_unique") # Add pcap file, check for dups and in case add only reference. pcap_file = os.path.join(self.analysis_path, "dump.pcap") if os.path.exists(pcap_file) and os.path.getsize(pcap_file) != 0: pcap = File(pcap_file) try: pcap_id = self._fs.put(pcap.get_data(), filename=pcap.get_name()) except FileExists: pcap_id = self._db.fs.files.find({"md5": pcap.get_md5()})[0][u"_id"] # Preventive key check. if "network" in results: results["network"]["pcap_id"] = pcap_id else: results["network"] = {"pcap_id": pcap_id} # Add dropped files, check for dups and in case add only reference. if "dropped" in results: for dropped in results["dropped"]: if "name" in dropped: drop_file = os.path.join(self.analysis_path, "files", dropped["name"]) if os.path.exists( drop_file) and os.path.getsize(drop_file) != 0: try: drop = open(drop_file, 'r') except IOError as e: raise CuckooReportError( "Failed to read file %s: %s" % (drop_file, e)) try: drop_id = self._fs.put(drop, filename=dropped["name"]) except FileExists: drop_id = self._db.fs.files.find( {"md5": dropped["md5"]})[0][u"_id"] dropped["dropped_id"] = drop_id # Add screenshots. results["shots"] = [] shots_path = os.path.join(self.analysis_path, "shots") if os.path.exists(shots_path): shots = [f for f in os.listdir(shots_path) if f.endswith(".jpg")] for shot_file in shots: shot_path = os.path.join(self.analysis_path, "shots", shot_file) try: shot = File(shot_path) except IOError as e: raise CuckooReportError( "Failed to read screenshot %s: %s" % (shot_path, e)) try: shot_id = self._fs.put(shot.get_data(), filename=shot.get_name()) except FileExists: shot_id = self._db.fs.files.find({"md5": shot.get_md5() })[0][u"_id"] results["shots"].append(shot_id) # Save all remaining results. self._db.analysis.save(results)
def run(self, results): """Writes report. @param results: Cuckoo results dict. @raise CuckooReportError: if fails to connect or write to MongoDB. """ self._connect() # Set an unique index on stored files, to avoid duplicates. if not self._db.fs.files.ensure_index("md5", unique=True): self._db.fs.files.create_index("md5", unique=True, name="md5_unique") # Add pcap file, check for dups and in case add only reference. pcap_file = os.path.join(self.analysis_path, "dump.pcap") if os.path.exists(pcap_file) and os.path.getsize(pcap_file) != 0: pcap = File(pcap_file) try: pcap_id = self._fs.put(pcap.get_data(), filename=pcap.get_name()) except FileExists: pcap_id = self._db.fs.files.find({"md5": pcap.get_md5()})[0][u"_id"] # Preventive key check. if "network" in results: results["network"]["pcap_id"] = pcap_id else: results["network"] = {"pcap_id": pcap_id} # Add dropped files, check for dups and in case add only reference. if "dropped" in results: for dropped in results["dropped"]: if "name" in dropped: drop_file = os.path.join(self.analysis_path, "files", dropped["name"]) if os.path.exists(drop_file) and os.path.getsize(drop_file) != 0: try: drop = open(drop_file, 'r') except IOError as e: raise CuckooReportError("Failed to read file %s: %s" % (drop_file, e)) try: drop_id = self._fs.put(drop, filename=dropped["name"]) except FileExists: drop_id = self._db.fs.files.find({"md5": dropped["md5"]})[0][u"_id"] dropped["dropped_id"] = drop_id # Add screenshots. results["shots"] = [] shots_path = os.path.join(self.analysis_path, "shots") if os.path.exists(shots_path): shots = [f for f in os.listdir(shots_path) if f.endswith(".jpg")] for shot_file in shots: shot_path = os.path.join(self.analysis_path, "shots", shot_file) try: shot = File(shot_path) except IOError as e: raise CuckooReportError("Failed to read screenshot %s: %s" % (shot_path, e)) try: shot_id = self._fs.put(shot.get_data(), filename=shot.get_name()) except FileExists: shot_id = self._db.fs.files.find({"md5": shot.get_md5()})[0][u"_id"] results["shots"].append(shot_id) # Save all remaining results. self._db.analysis.save(results)
def main(): parser = argparse.ArgumentParser() parser.add_argument("path", type=str, help="Path to the file to analyze") parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False) parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False) parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False) parser.add_argument( "--options", type=str, action="store", default="", help= "Specify options for the analysis package (e.g. \"name=value,name2=value2\")", required=False) parser.add_argument( "--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False) parser.add_argument( "--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False) parser.add_argument( "--platform", type=str, action="store", default="", help= "Specify the operating system platform you want to use (windows/darwin/linux)", required=False) try: args = parser.parse_args() except IOError as e: parser.error(e.message) return False if not os.path.exists(args.path): print("ERROR: the specified file does not exist at path \"%s\"" % args.path) return False db = Database() # Add executable to db exe_id = db.add_exe(file_path=args.path, md5=File(args.path).get_md5()) print("SUCCESS: Created executable id: %d" % exe_id) # Create analysis anal_id = db.add_analysis("New analysis", exe_id) print("SUCCESS: Created new analysis with id: %d" % anal_id) # Add tasks for every machine for machine in args.machine.split(","): task_id = db.add(file_path=args.path, anal_id=anal_id, md5=File(args.path).get_md5(), package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=machine, platform=args.platform, custom=args.custom) print("SUCCESS: Task added with id %d" % task_id) # Sleep needed for multiple VM startup with VMWare sleep(5) print("SUCCESS: All Tasks added to Analysis")