class Worker(object):

    min_procs = 2
    max_procs = 4

    def __init__(self, args):
        self.args = args
        if not args.startweb and not args.stopweb:
            self.procs = self.set_procs(args.number)
        self.ac = ApplicationConfiguration(parserargs=args)
        self.cs = ContainerSearch()
        self.output = Reporter()
        self.cve_file = os.path.join(self.ac.workdir,
                                     "com.redhat.rhsa-all.xml")
        self.cve_file_bz = os.path.join(self.ac.workdir,
                                        "com.redhat.rhsa-all.xml.bz2")

    def set_procs(self, number):

        numThreads = psutil.NUM_CPUS if number is None else number

        if numThreads < self.min_procs:
            if self.args.number is not None:
                print "The image-scanner requires --number to be a minimum " \
                      "of {0}. Setting --number to {1}".format(self.min_procs,
                                                               self.min_procs)
            return self.min_procs
        elif numThreads <= self.max_procs:
            return numThreads
        else:
            if self.args.number is not None:
                print "Due to docker issues, we limit the max number "\
                      "of threads to {0}. Setting --number to "\
                      "{1}".format(self.max_procs, self.max_procs)
            return self.max_procs

    def _get_cids_for_image(self, cs, image):
        cids = []
        if image in cs.fcons:
            for container in cs.fcons[image]:
                cids.append(container['uuid'])
        return cids

    def get_cve_data(self):

        # FIXME
        # Wrap this in an exception

        hdr = {'User-agent': 'Mozilla/5.0'}
        url = ("http://www.redhat.com/security/data/oval/"
               "com.redhat.rhsa-all.xml.bz2")

        self.ac._print("Obtaining CVE file data from {0}".format(url))

        bar = urllib2.Request(url, "", hdr)
        resp = urllib2.urlopen(bar)
        fh = open(self.cve_file_bz, "w")
        fh.write(resp.read())
        fh.close()

    def extract_cve_data(self):
        # Extract the XML bz
        bzfile = bz2.BZ2File(self.cve_file_bz)
        cve_data = bzfile.read()
        open(self.cve_file, 'wb').write(cve_data)

    def return_active_threadnames(self, threads):
        thread_names = []
        for thread in threads:
            thread_name = thread._Thread__name
            if thread_name is not "MainThread":
                thread_names.append(thread_name)

        return thread_names

    def onlyactive(self):
        ''' This function sorts of out only the active containers'''
        con_list = []
        # Rid ourselves of 0 size containers
        for container in self.cs.active_containers:
            con_list.append(container['Id'])
        if len(con_list) == 0:
            print "There are no active containers on this system"
            sys.exit(1)
        else:
            self._do_work(con_list)

    def allimages(self):
        if len(self.cs.imagelist) == 0:
            print "There are no images on this system"
            sys.exit(1)
        self._do_work(self.cs.imagelist)

    def list_of_images(self, image_list):
        self._do_work(image_list)

    def allcontainers(self):
        if len(self.cs.cons) == 0:
            print "There are no containers on this system"
            sys.exit(1)
        else:
            con_list = []
            for con in self.cs.cons:
                con_list.append(con['Id'])
            self._do_work(con_list)

    def _do_work(self, image_list):
        cp = CVEParse(self.ac.workdir)
        if (not os.path.exists(cp.xmlf)) or \
                (self.ac.nocache) or \
                ((time.time() - os.path.getmtime(cp.xmlf)) / (60 ** 2) > 12):
            # If we find a tarball of the dist break outs and
            # it is less than 12 hours old, use it to speed things
            # up

            self.get_cve_data()
            self.extract_cve_data()

            self.ac._print("Splitting master XML file into distribution "
                           "specific XML files")

            # Run dist breakout to make dist specific XML
            # files
            t = timeit.Timer(cp.parse_for_platform).timeit(number=1)
            logging.debug("Parsed distribution breakup in "
                          "{0} seconds".format(t))
        self.ac._print("\nBegin processing\n")
        threads = []

        for image in image_list:
            cids = self._get_cids_for_image(self.cs, image)
            t = threading.Thread(target=self.search_containers, name=image,
                                 args=(image, cids, self.output,))
            threads.append(t)

        logging.info("Number of containers to scan: {0}".format(len(threads)))
        total_images = len(threads)
        if isinstance(threading.current_thread(), threading._MainThread):
            signal.signal(signal.SIGINT, self.signal_handler)
        self.threads_complete = 0
        self.ac._print("")
        while len(threads) > 0:
            if len(threading.enumerate()) < self.procs:
                new_thread = threads.pop()
                new_thread.start()
                self._progress(float(self.threads_complete),
                               float(total_images))
        if self.ac.api:
            exit_thread_count = 2
        else:
            exit_thread_count = 1
        while len(threading.enumerate()) > exit_thread_count:
            self._progress(float(self.threads_complete), float(total_images))
            time.sleep(1)
            pass
        self._progress(float(self.threads_complete), float(total_images))
        self.ac._print("\n" * 2)
        self.output.report_summary()

    def _progress(self, complete, total):
        if not self.ac.api:
            sys.stdout.write("\r[{0:20s}] {1}%    {2}/{3}"
                             .format('#' * int(complete / total * 20),
                                     int(complete / total * 100),
                                     int(complete), int(total)))
            sys.stdout.flush()

    def signal_handler(self, signal, frame):
        print "\n\nExiting..."
        sys.exit(0)

    def search_containers(self, image, cids, output):
        f = Scan(image, cids, output)
        try:
            if f.get_release():

                t = timeit.Timer(f.scan).timeit(number=1)
                logging.debug("Scanned chroot for image {0}"
                              " completed in {1} seconds"
                              .format(image, t))

                timeit.Timer(f.report_results).timeit(number=1)
            else:
                # This is not a RHEL image or container
                f._report_not_rhel(image)
        except subprocess.CalledProcessError:
            pass

        start = time.time()
        f.DM.cleanup(f.dm_results)
        logging.debug("Removing temporary chroot for image {0} completed in"
                      " {1} seconds".format(image, time.time() - start))
        self.threads_complete += 1

    def _check_input(self, image_list):
        '''
        Takes a list of image ids, image-names, container ids, or
        container-names and returns a list of images ids and
        container ids
        '''
        dm = DockerMount(dockerclient=self.ac.conn)
        work_list = []
        # verify
        try:
            for image in image_list:
                iid, dtype = dm.get_iid(image)
                work_list.append(iid)
        except DockerMountError:
            print "Unable to associate {0} with any image " \
                  "or container".format(image)
            sys.exit(1)
        return work_list

    def start_web(self, workdir):
        report_dir = os.path.join(workdir, "openscap_reports")
        cmd = ['uwsgi', '--plugin', 'python,http', '--http-socket', ':9090',
               '--check-static', report_dir, '--static-index', 'summary.html',
               '--wsgi-file', 'serv.py', '--daemonize',
               os.path.join(workdir, "uwsgi.log")]
        p = threading.Thread(target=subprocess.call, args=(cmd,))
        p.daemon = True
        p.start()

    def stop_web(self):
        import requests
        stop = requests.Session()
        try:
            stop.get('http://localhost:9090/serv')
        except requests.exceptions.ConnectionError:
            pass

    def start_application(self):
        start_time = time.time()
        logging.basicConfig(filename=self.args.logfile,
                            format='%(asctime)s %(levelname)-8s %(message)s',
                            datefmt='%m-%d %H:%M', level=logging.DEBUG)
        if self.args.startweb:
            self.start_web(self.args.workdir)
        if self.args.stopweb:
            self.stop_web()
        if self.args.onlyactive:
            self.onlyactive()
        if self.args.allcontainers:
            self.allcontainers()
        if self.args.allimages:
            self.allimages()
        if self.args.images:
            # Check to make sure we have  valid input
            image_list = self._check_input(self.args.images)
            self.list_of_images(image_list)

        end_time = time.time()
        duration = (end_time - start_time)

        if duration < 60:
            unit = "seconds"
        else:
            unit = "minutes"
            duration = duration / 60

        logging.info("Completed entire scan in {0} {1}".format(duration, unit))
        if self.ac.api:
            return self.ac.return_json