class Reporter(object): ''' Does stdout reporting ''' def __init__(self): self.output = collections.namedtuple('Summary', 'iid, cid, os, sevs,' 'log, msg',) self.list_of_outputs = [] self.appc = ApplicationConfiguration() self.report_dir = os.path.join(self.appc.reportdir, "openscap_reports") if not os.path.exists(self.report_dir): os.mkdir(self.report_dir) self.content = "" def add_content(self, content): self.content = self.content + content def report_summary(self): ''' This function is the primary function to output results to stdout when running the image-scanner ''' self.appc._print("Summary:") if self.appc.api: baseurl = urlparse.urljoin(self.appc.url_root, os.path.basename(self.report_dir)) for image in self.list_of_outputs: short_cid_list = [] dtype = self._get_dtype(image.iid) self.appc._print("{0}{1}: {2}".format(" " * 5, dtype, image.iid)) if self.appc.api: image_json = {image.iid: {}} image_json[image.iid]['http_url'] = \ baseurl + "/{0}.html".format(image.iid) image_json[image.iid]['xml_url'] = \ baseurl + "/{0}.xml".format(image.iid) if image.msg is None: html_p = "<tr><td><b>{0}</b>:<td colspan=2><a href='{1}.html'>{2}</a></td></tr>".format(dtype, image.iid, image.iid) self.add_content(html_p) for cid in image.cid: short_cid_list.append(cid[:12]) if self.appc.api: image_json[image.iid]['cids'] = short_cid_list self.appc._print("{0}OS: {1}".format(" " * 5, image.os.rstrip())) html_p = "<tr><td>{0}</td><td><b>OS:</b></td><td>{1}</td></tr>".format(" " * 5, image.os.rstrip()) self.add_content(html_p) if dtype is not "Container": self.appc._print("{0}Containers affected " "({1}): {2}".format(" " * 5, len(short_cid_list), ', '.join(short_cid_list))) html_p = "<tr><td>{0}</td><td><b>Containers affected:</b></td><td>" \ "({1}): {2}</td></tr>".format(" " * 5, len(short_cid_list), ', '.join(short_cid_list)) self.add_content(html_p) self.appc._print("{0}Results: Critical({1}) Important({2}) Moderate({3})"\ " Low({4})".format(" " * 5, image.sevs['Critical'], image.sevs['Important'], image.sevs['Moderate'], image.sevs['Low'])) html_p = "<tr><td>{0}</td><td><b>Results:</b></td><td>Critical({1}) Important({2}) Moderate({3})"\ " Low({4})</td></tr>".format(" " * 5, image.sevs['Critical'], image.sevs['Important'], image.sevs['Moderate'], image.sevs['Low']) self.add_content(html_p) if self.appc.api: image_json[image.iid]['critical'] = image.sevs['Critical'] image_json[image.iid]['important'] = image.sevs['Important'] image_json[image.iid]['moderate'] = image.sevs['Moderate'] image_json[image.iid]['low'] = image.sevs['Low'] self.appc._print("") else: html_p = "<tr><td><b>{0}</b>:<td colspan=2>{1}</td></tr>".format(dtype, image.iid) self.add_content(html_p) self.appc._print("{0}Results: {1}".format(" " * 5, image.msg)) html_p = "<tr><td>{0}</td><td><b>Results</b>:</td><td>{1}</td></tr>".format(" " * 5, image.msg) self.add_content(html_p) if self.appc.api: image_json[image.iid]['msg'] = image.msg self.appc._print("") html_p = "<tr><td colspan=3> </td></tr>" self.add_content(html_p) if self.appc.api: self.appc.return_json.append(image_json) report_files = [] for image in self.list_of_outputs: if image.msg is None: short_image = image.iid[:12] + ".scap" out = open(os.path.join(self.report_dir, short_image), 'wb') report_files.append(short_image) out.write(image.log) out.close self.appc._print("Writing summary and reports to {0}".format(self.report_dir)) for report in report_files: os.path.join(self.report_dir, report) sum_out = open(os.path.join(self.report_dir, "summary.html"), "wb") sum_html = """ <html><body> <h2>Summary</h2> <table> %(content)s </table> </body> </html> """ % {'content': self.content} sum_out.write(sum_html) sum_out.close() def _get_dtype(self, iid): ''' Returns whether the given id is an image or container ''' # Images for image in self.appc.images: if image['Id'].startswith(iid): return "Image" # Containers for con in self.appc.cons: if con['Id'].startswith(iid): return "Container" return None
class Worker(object): min_procs = 2 max_procs = 4 def __init__(self, args): self.args = args if not args.startweb and not args.stopweb: self.procs = self.set_procs(args.number) self.ac = ApplicationConfiguration(parserargs=args) self.cs = ContainerSearch() self.output = Reporter() self.cve_file = os.path.join(self.ac.workdir, "com.redhat.rhsa-all.xml") self.cve_file_bz = os.path.join(self.ac.workdir, "com.redhat.rhsa-all.xml.bz2") def set_procs(self, number): numThreads = psutil.NUM_CPUS if number is None else number if numThreads < self.min_procs: if self.args.number is not None: print "The image-scanner requires --number to be a minimum " \ "of {0}. Setting --number to {1}".format(self.min_procs, self.min_procs) return self.min_procs elif numThreads <= self.max_procs: return numThreads else: if self.args.number is not None: print "Due to docker issues, we limit the max number "\ "of threads to {0}. Setting --number to "\ "{1}".format(self.max_procs, self.max_procs) return self.max_procs def _get_cids_for_image(self, cs, image): cids = [] if image in cs.fcons: for container in cs.fcons[image]: cids.append(container['uuid']) return cids def get_cve_data(self): # FIXME # Wrap this in an exception hdr = {'User-agent': 'Mozilla/5.0'} url = ("http://www.redhat.com/security/data/oval/" "com.redhat.rhsa-all.xml.bz2") self.ac._print("Obtaining CVE file data from {0}".format(url)) bar = urllib2.Request(url, "", hdr) resp = urllib2.urlopen(bar) fh = open(self.cve_file_bz, "w") fh.write(resp.read()) fh.close() def extract_cve_data(self): # Extract the XML bz bzfile = bz2.BZ2File(self.cve_file_bz) cve_data = bzfile.read() open(self.cve_file, 'wb').write(cve_data) def return_active_threadnames(self, threads): thread_names = [] for thread in threads: thread_name = thread._Thread__name if thread_name is not "MainThread": thread_names.append(thread_name) return thread_names def onlyactive(self): ''' This function sorts of out only the active containers''' con_list = [] # Rid ourselves of 0 size containers for container in self.cs.active_containers: con_list.append(container['Id']) if len(con_list) == 0: print "There are no active containers on this system" sys.exit(1) else: self._do_work(con_list) def allimages(self): if len(self.cs.imagelist) == 0: print "There are no images on this system" sys.exit(1) self._do_work(self.cs.imagelist) def list_of_images(self, image_list): self._do_work(image_list) def allcontainers(self): if len(self.cs.cons) == 0: print "There are no containers on this system" sys.exit(1) else: con_list = [] for con in self.cs.cons: con_list.append(con['Id']) self._do_work(con_list) def _do_work(self, image_list): cp = CVEParse(self.ac.workdir) if (not os.path.exists(cp.xmlf)) or \ (self.ac.nocache) or \ ((time.time() - os.path.getmtime(cp.xmlf)) / (60 ** 2) > 12): # If we find a tarball of the dist break outs and # it is less than 12 hours old, use it to speed things # up self.get_cve_data() self.extract_cve_data() self.ac._print("Splitting master XML file into distribution " "specific XML files") # Run dist breakout to make dist specific XML # files t = timeit.Timer(cp.parse_for_platform).timeit(number=1) logging.debug("Parsed distribution breakup in " "{0} seconds".format(t)) self.ac._print("\nBegin processing\n") threads = [] for image in image_list: cids = self._get_cids_for_image(self.cs, image) t = threading.Thread(target=self.search_containers, name=image, args=(image, cids, self.output,)) threads.append(t) logging.info("Number of containers to scan: {0}".format(len(threads))) total_images = len(threads) if isinstance(threading.current_thread(), threading._MainThread): signal.signal(signal.SIGINT, self.signal_handler) self.threads_complete = 0 self.ac._print("") while len(threads) > 0: if len(threading.enumerate()) < self.procs: new_thread = threads.pop() new_thread.start() self._progress(float(self.threads_complete), float(total_images)) if self.ac.api: exit_thread_count = 2 else: exit_thread_count = 1 while len(threading.enumerate()) > exit_thread_count: self._progress(float(self.threads_complete), float(total_images)) time.sleep(1) pass self._progress(float(self.threads_complete), float(total_images)) self.ac._print("\n" * 2) self.output.report_summary() def _progress(self, complete, total): if not self.ac.api: sys.stdout.write("\r[{0:20s}] {1}% {2}/{3}" .format('#' * int(complete / total * 20), int(complete / total * 100), int(complete), int(total))) sys.stdout.flush() def signal_handler(self, signal, frame): print "\n\nExiting..." sys.exit(0) def search_containers(self, image, cids, output): f = Scan(image, cids, output) try: if f.get_release(): t = timeit.Timer(f.scan).timeit(number=1) logging.debug("Scanned chroot for image {0}" " completed in {1} seconds" .format(image, t)) timeit.Timer(f.report_results).timeit(number=1) else: # This is not a RHEL image or container f._report_not_rhel(image) except subprocess.CalledProcessError: pass start = time.time() f.DM.cleanup(f.dm_results) logging.debug("Removing temporary chroot for image {0} completed in" " {1} seconds".format(image, time.time() - start)) self.threads_complete += 1 def _check_input(self, image_list): ''' Takes a list of image ids, image-names, container ids, or container-names and returns a list of images ids and container ids ''' dm = DockerMount(dockerclient=self.ac.conn) work_list = [] # verify try: for image in image_list: iid, dtype = dm.get_iid(image) work_list.append(iid) except DockerMountError: print "Unable to associate {0} with any image " \ "or container".format(image) sys.exit(1) return work_list def start_web(self, workdir): report_dir = os.path.join(workdir, "openscap_reports") cmd = ['uwsgi', '--plugin', 'python,http', '--http-socket', ':9090', '--check-static', report_dir, '--static-index', 'summary.html', '--wsgi-file', 'serv.py', '--daemonize', os.path.join(workdir, "uwsgi.log")] p = threading.Thread(target=subprocess.call, args=(cmd,)) p.daemon = True p.start() def stop_web(self): import requests stop = requests.Session() try: stop.get('http://localhost:9090/serv') except requests.exceptions.ConnectionError: pass def start_application(self): start_time = time.time() logging.basicConfig(filename=self.args.logfile, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', level=logging.DEBUG) if self.args.startweb: self.start_web(self.args.workdir) if self.args.stopweb: self.stop_web() if self.args.onlyactive: self.onlyactive() if self.args.allcontainers: self.allcontainers() if self.args.allimages: self.allimages() if self.args.images: # Check to make sure we have valid input image_list = self._check_input(self.args.images) self.list_of_images(image_list) end_time = time.time() duration = (end_time - start_time) if duration < 60: unit = "seconds" else: unit = "minutes" duration = duration / 60 logging.info("Completed entire scan in {0} {1}".format(duration, unit)) if self.ac.api: return self.ac.return_json
class Reporter(object): ''' Does stdout reporting ''' def __init__(self): self.output = collections.namedtuple('Summary', 'iid, cid, os, sevs,' 'log, msg',) self.list_of_outputs = [] self.appc = ApplicationConfiguration() self.report_dir = os.path.join(self.appc.reportdir, "reports") self.appc.docker_state = os.path.join(self.report_dir, "docker_state.json") if not os.path.exists(self.report_dir): os.mkdir(self.report_dir) self.content = "" def report_summary(self): ''' This function is the primary function to output results to stdout when running the image-scanner ''' self.appc._print("Summary:") if self.appc.api: baseurl = urlparse.urljoin(self.appc.url_root, os.path.basename(self.report_dir)) self.appc.json_url = baseurl + '/docker_state.json' for image in self.list_of_outputs: short_cid_list = [] dtype = self._get_dtype(image.iid) self.appc._print("{0}{1}: {2}".format(" " * 5, dtype, image.iid)) if self.appc.api: image_json = {image.iid: {}} image_json[image.iid]['http_url'] = \ baseurl + "/{0}.html".format(image.iid) image_json[image.iid]['xml_url'] = \ baseurl + "/{0}.xml".format(image.iid) image_json[image.iid]['xml_path'] = os.path.join( self.report_dir, image.iid + ".xml") if image.msg is None: for cid in image.cid: short_cid_list.append(cid[:12]) if self.appc.api: image_json[image.iid]['cids'] = short_cid_list self.appc._print("{0}OS: {1}" .format(" " * 5, image.os.rstrip())) if dtype is not "Container": self.appc._print("{0}Containers affected " "({1}): {2}" .format(" " * 5, len(short_cid_list), ', '.join(short_cid_list))) self.appc._print("{0}Results: Critical({1}) Important({2}) " "Moderate({3}) Low({4})" .format(" " * 5, image.sevs['Critical'], image.sevs['Important'], image.sevs['Moderate'], image.sevs['Low'])) if self.appc.api: image_json[image.iid]['critical'] = image.sevs['Critical'] image_json[image.iid]['important'] = \ image.sevs['Important'] image_json[image.iid]['moderate'] = image.sevs['Moderate'] image_json[image.iid]['low'] = image.sevs['Low'] image_json[image.iid]['os'] = self.appc.os_release self.appc._print("") else: self.appc._print("{0}Results: {1}".format(" " * 5, image.msg)) if self.appc.api: image_json[image.iid]['msg'] = image.msg self.appc._print("") if self.appc.api: self.appc.return_json[image.iid] = image_json[image.iid] report_files = [] for image in self.list_of_outputs: if image.msg is None: short_image = image.iid[:12] + ".scap" out = open(os.path.join(self.report_dir, short_image), 'wb') report_files.append(short_image) out.write(image.log) out.close self.appc._print("Writing summary and reports to {0}" .format(self.report_dir)) for report in report_files: os.path.join(self.report_dir, report) def _get_dtype(self, iid): ''' Returns whether the given id is an image or container ''' # Images for image in self.appc.allimages: if image['Id'].startswith(iid): return "Image" # Containers for con in self.appc.cons: if con['Id'].startswith(iid): return "Container" return None