Esempio n. 1
0
    def run(self, edit):
        self.api_key = get_api_key()
        if not self.api_key:
            return

        regions = self.view.sel()
        if not (len(regions) > 0) or (regions[0].empty()):
            status("Error: No content selected")
            return

        self.snippet = {
            'title': None,
            'tags': None,
            'language': None,
            'source': self.view.substr(regions[0])
        }

        self.threads = {}
        self.server = ServerProxy("http://snipplr.com/xml-rpc.php")

        t = Worker(self.get_languages)
        t.start()
        self.threads['get_languages'] = t

        self.title_prompt()
Esempio n. 2
0
    def run(self, edit):
        self.api_key = get_api_key()
        if not self.api_key:
            return

        regions = self.view.sel()
        if not (len(regions) > 0) or (regions[0].empty()):
            status("Error: No content selected")
            return

        self.snippet = {
            'title': None,
            'tags': None,
            'language': None,
            'source': self.view.substr(regions[0])
        }

        self.threads = {}
        self.server = ServerProxy("http://snipplr.com/xml-rpc.php")

        t = Worker(self.get_languages)
        t.start()
        self.threads['get_languages'] = t

        self.title_prompt()
Esempio n. 3
0
 def result_selection_cb(self, index):
     if index >= 0:
         selection = self.search_results[index]
         t = Worker(lambda: self.download(selection['id']))
         t.start()
         self.threads['download'] = t
         handle_thread(t, 'Downloading snippet (%s)' % (selection['title'],),
                       self.download_cb)
Esempio n. 4
0
 def result_selection_cb(self, index):
     if index >= 0:
         selection = self.search_results[index]
         t = Worker(lambda: self.download(selection['id']))
         t.start()
         self.threads['download'] = t
         handle_thread(t,
                       'Downloading snippet (%s)' % (selection['title'], ),
                       self.download_cb)
Esempio n. 5
0
 def viewRangeChanged(self):
     if not self.plotting:
         self.plotting = True
         worker = Worker(self.updateOHLC)
         QtCore.QThreadPool.globalInstance().start(worker)
         self.sigXRangeChanged.emit()
         self.sigResized.emit()
Esempio n. 6
0
    def __init__(self):
        super(Window, self).__init__()
        uic.loadUi('material.ui', self)

        self.db = 'patient_try'
        self.identityTable = 'patient'
        self.visitTable = 'visits'
        self.dbu = DB_manager.DatabaseUtility(self.db)
        self.qu = Queue()
        self.worker = Worker(qu=self.qu, parent=self)
        self.entry_app = entry.Entry(self, self.dbu, self.identityTable,
                                     self.visitTable, self.qu)
        self.katar_app = katar.Katar(self, self.dbu, self.identityTable,
                                     self.visitTable, self.qu)
        self.addPushButton.clicked.connect(self.katar_app.update_queue)

        self.worker.start()
Esempio n. 7
0
class M4AtxHw:


    def __init__(self):
        self._temp = 0
        self._voltage_in = 0
        self._voltage_ignition = 0
        self._33v = 0
        self._5v = 0
        self._12v = 0
        self._voltages = {'VIN' : self._voltage_in, 'IGN' : self._voltage_ignition, '33V' : self._33v, '5V' : self._5v, '12V' : self._12v}

        try:
            self._m4 = M4Device()
        except M4DeviceError:
            raise M4AtxHwError("Unable to create M4 device")

        self._worker = Worker("m4 xtv", self._read_data)

        print("working")

    def Start(self):
        print("Starting M4 ATX ...")
        self._worker.start()
        print("M4 ATX started")

    def Stop(self):
        print("Stopping M4 ATX ...")
        self._worker.stop()
        print("M4 ATX stopped")

    def _read_data(self):
        result = self._m4.get_diag()
        if result:
            self._voltages = result[:5]
            self._temp = result[5]
        time.sleep(1)

    def GetVoltages(self):
        return self._voltages

    def GetTemp(self):
        return {'c': self._temp}
  def run_page_crawler(self):
    ''' listen to crawler priority queue and crawl pages '''

    worker = Worker(self.args.thread)
    while True:
      # get one item from the queue
      # initialize a generic crawler instance
      page = self.queue.de_queue()
      if page:
        self.stats.crawl_in_progress += 1
        page.time_dequeue = time.time()
        worker.add_task(self.call_crawl_page, page)

      if self.stats.crawl_in_progress == self.max_num_pages:
        break

    worker.join()
    self.shutdown = True
    self.log_queue.put(self.end_page_log_item)
Esempio n. 9
0
    def make_worker(self):
        worker = Worker()

        # use cubicle coordinates
        counter = 0
        while True:
            counter += 1
            worker.center_x, worker.center_y = worker.get_coords(
                self.level_number)
            plist = arcade.SpriteList()
            plist.append(self.player)
            player_hits = arcade.check_for_collision_with_list(worker, plist)
            worker_hits = arcade.check_for_collision_with_list(
                worker, self.worker_list)

            if len(worker_hits) == len(player_hits) == 0:
                break
            if counter == len(Worker.coords):
                return

        self.worker_list.append(worker)
        self.all_sprites.append(worker)
Esempio n. 10
0
    def __init__(self):
        self._temp = 0
        self._voltage_in = 0
        self._voltage_ignition = 0
        self._33v = 0
        self._5v = 0
        self._12v = 0
        self._voltages = {'VIN' : self._voltage_in, 'IGN' : self._voltage_ignition, '33V' : self._33v, '5V' : self._5v, '12V' : self._12v}

        try:
            self._m4 = M4Device()
        except M4DeviceError:
            raise M4AtxHwError("Unable to create M4 device")

        self._worker = Worker("m4 xtv", self._read_data)

        print("working")
Esempio n. 11
0
    def read_images(self):
        """ read in image samples """

        worker = Worker(50)

        for line in self.fio_r:
            line = line.strip()
            try:
                item = json.loads(line)
            except:
                continue
            worker.add_task(self.tag_image, item)

        logging.debug('%d images have been read' % len(items))

        worker.join()
Esempio n. 12
0
    def run_page_crawler(self):
        ''' listen to crawler priority queue and crawl pages '''

        worker = Worker(self.args.thread)
        while True:
            # get one item from the queue
            # initialize a generic crawler instance
            page = self.queue.de_queue()
            if page:
                self.stats.crawl_in_progress += 1
                page.time_dequeue = time.time()
                worker.add_task(self.call_crawl_page, page)

            if self.stats.crawl_in_progress == self.max_num_pages:
                break

        worker.join()
        self.shutdown = True
        self.log_queue.put(self.end_page_log_item)
Esempio n. 13
0
from flask import Flask, request, jsonify
app = Flask(__name__)

from utils import Worker
worker = Worker()


@app.route('/')
def hello_world():
    return 'Hello, World!'


@app.route('/response/', methods=['GET', 'POST'])
def get_response():
    if request.method == 'GET':
        return "We are live! Accepting POST requests for connection."

    if request.method == 'POST':
        # getting inputs
        name = str(request.form.get('name'))

        # response init
        response = {'status': None, 'message': None, 'age': None}

        # input validation
        if any(v is None for v in [name]):
            # client problem
            response['status'] = False
            response['message'] = "Send all parameters"
            response['age'] = None
            return jsonify(response), 400
Esempio n. 14
0
def publish(routing_key: str, payload: dict):
    worker = Worker(conn, exchange)
    worker.publish(routing_key, payload)
Esempio n. 15
0
 def setInterval(self, interval):
     worker = Worker(self.candlestick.setInterval, interval)
     QtCore.QThreadPool.globalInstance().start(worker)
Esempio n. 16
0
def _generate_all_charts(spec, input_data):
    """Generate all charts specified in the specification file.

    :param spec: Specification.
    :param input_data: Full data set.
    :type spec: Specification
    :type input_data: InputData
    """

    def _generate_chart(_, data_q, graph):
        """Generates the chart.
        """

        logs = list()

        logging.info("  Generating the chart '{0}' ...".
                     format(graph.get("title", "")))
        logs.append(("INFO", "  Generating the chart '{0}' ...".
                     format(graph.get("title", ""))))

        job_name = graph["data"].keys()[0]

        csv_tbl = list()
        res = list()

        # Transform the data
        logs.append(("INFO", "    Creating the data set for the {0} '{1}'.".
                     format(graph.get("type", ""), graph.get("title", ""))))
        data = input_data.filter_data(graph, continue_on_error=True)
        if data is None:
            logging.error("No data.")
            return

        chart_data = dict()
        for job, job_data in data.iteritems():
            if job != job_name:
                continue
            for index, bld in job_data.items():
                for test_name, test in bld.items():
                    if chart_data.get(test_name, None) is None:
                        chart_data[test_name] = OrderedDict()
                    try:
                        chart_data[test_name][int(index)] = \
                            test["result"]["throughput"]
                    except (KeyError, TypeError):
                        pass

        # Add items to the csv table:
        for tst_name, tst_data in chart_data.items():
            tst_lst = list()
            for bld in builds_dict[job_name]:
                itm = tst_data.get(int(bld), '')
                tst_lst.append(str(itm))
            csv_tbl.append("{0},".format(tst_name) + ",".join(tst_lst) + '\n')
        # Generate traces:
        traces = list()
        win_size = 14
        index = 0
        for test_name, test_data in chart_data.items():
            if not test_data:
                logs.append(("WARNING", "No data for the test '{0}'".
                             format(test_name)))
                continue
            test_name = test_name.split('.')[-1]
            trace, rslt = _generate_trending_traces(
                test_data,
                job_name=job_name,
                build_info=build_info,
                name='-'.join(test_name.split('-')[3:-1]),
                color=COLORS[index])
            traces.extend(trace)
            res.append(rslt)
            index += 1

        if traces:
            # Generate the chart:
            graph["layout"]["xaxis"]["title"] = \
                graph["layout"]["xaxis"]["title"].format(job=job_name)
            name_file = "{0}-{1}{2}".format(spec.cpta["output-file"],
                                            graph["output-file-name"],
                                            spec.cpta["output-file-type"])

            logs.append(("INFO", "    Writing the file '{0}' ...".
                         format(name_file)))
            plpl = plgo.Figure(data=traces, layout=graph["layout"])
            try:
                ploff.plot(plpl, show_link=False, auto_open=False,
                           filename=name_file)
            except plerr.PlotlyEmptyDataError:
                logs.append(("WARNING", "No data for the plot. Skipped."))

        data_out = {
            "job_name": job_name,
            "csv_table": csv_tbl,
            "results": res,
            "logs": logs
        }
        data_q.put(data_out)

    builds_dict = dict()
    for job in spec.input["builds"].keys():
        if builds_dict.get(job, None) is None:
            builds_dict[job] = list()
        for build in spec.input["builds"][job]:
            status = build["status"]
            if status != "failed" and status != "not found":
                builds_dict[job].append(str(build["build"]))

    # Create "build ID": "date" dict:
    build_info = dict()
    for job_name, job_data in builds_dict.items():
        if build_info.get(job_name, None) is None:
            build_info[job_name] = OrderedDict()
        for build in job_data:
            build_info[job_name][build] = (
                input_data.metadata(job_name, build).get("generated", ""),
                input_data.metadata(job_name, build).get("version", "")
            )

    work_queue = multiprocessing.JoinableQueue()
    manager = multiprocessing.Manager()
    data_queue = manager.Queue()
    cpus = multiprocessing.cpu_count()

    workers = list()
    for cpu in range(cpus):
        worker = Worker(work_queue,
                        data_queue,
                        _generate_chart)
        worker.daemon = True
        worker.start()
        workers.append(worker)
        os.system("taskset -p -c {0} {1} > /dev/null 2>&1".
                  format(cpu, worker.pid))

    for chart in spec.cpta["plots"]:
        work_queue.put((chart, ))
    work_queue.join()

    anomaly_classifications = list()

    # Create the header:
    csv_tables = dict()
    for job_name in builds_dict.keys():
        if csv_tables.get(job_name, None) is None:
            csv_tables[job_name] = list()
        header = "Build Number:," + ",".join(builds_dict[job_name]) + '\n'
        csv_tables[job_name].append(header)
        build_dates = [x[0] for x in build_info[job_name].values()]
        header = "Build Date:," + ",".join(build_dates) + '\n'
        csv_tables[job_name].append(header)
        versions = [x[1] for x in build_info[job_name].values()]
        header = "Version:," + ",".join(versions) + '\n'
        csv_tables[job_name].append(header)

    while not data_queue.empty():
        result = data_queue.get()

        anomaly_classifications.extend(result["results"])
        csv_tables[result["job_name"]].extend(result["csv_table"])

        for item in result["logs"]:
            if item[0] == "INFO":
                logging.info(item[1])
            elif item[0] == "ERROR":
                logging.error(item[1])
            elif item[0] == "DEBUG":
                logging.debug(item[1])
            elif item[0] == "CRITICAL":
                logging.critical(item[1])
            elif item[0] == "WARNING":
                logging.warning(item[1])

    del data_queue

    # Terminate all workers
    for worker in workers:
        worker.terminate()
        worker.join()

    # Write the tables:
    for job_name, csv_table in csv_tables.items():
        file_name = spec.cpta["output-file"] + "-" + job_name + "-trending"
        with open("{0}.csv".format(file_name), 'w') as file_handler:
            file_handler.writelines(csv_table)

        txt_table = None
        with open("{0}.csv".format(file_name), 'rb') as csv_file:
            csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
            line_nr = 0
            for row in csv_content:
                if txt_table is None:
                    txt_table = prettytable.PrettyTable(row)
                else:
                    if line_nr > 1:
                        for idx, item in enumerate(row):
                            try:
                                row[idx] = str(round(float(item) / 1000000, 2))
                            except ValueError:
                                pass
                    try:
                        txt_table.add_row(row)
                    except Exception as err:
                        logging.warning("Error occurred while generating TXT "
                                        "table:\n{0}".format(err))
                line_nr += 1
            txt_table.align["Build Number:"] = "l"
        with open("{0}.txt".format(file_name), "w") as txt_file:
            txt_file.write(str(txt_table))

    # Evaluate result:
    if anomaly_classifications:
        result = "PASS"
        for classification in anomaly_classifications:
            if classification == "regression" or classification == "outlier":
                result = "FAIL"
                break
    else:
        result = "FAIL"

    logging.info("Partial results: {0}".format(anomaly_classifications))
    logging.info("Result: {0}".format(result))

    return result
Esempio n. 17
0
from utils import read_file_line_by_line, logger, Worker
import time
import ast

IP_ADDRESSES = [('127.0.0.1', 2020)]
gen = read_file_line_by_line('mew.txt', 'cp1251')
#gen = read_file_line_by_line('aaaa.txt')

workers = [Worker(address=addr, name='worker_' + str(i)) for i, addr in enumerate(IP_ADDRESSES)]

solution_list = []
result = []
i = 0
t1 = time.time()
while True:
    solution = next(gen)
    if solution == 'stop':
        print('end read', i)
        break
    if solution.find('установил:') == -1 or solution.find('постановил:') == -1:
        continue
    solution_parse = solution.split('установил:')[1]
    solution_parse = solution_parse.split('постановил:')[0]
    solution_list.append(solution_parse)
    i += 1
    if i % 10 == 0:
        message = str(solution_list)
        print(i)
        solution_list = []
        while message:
            time.sleep(0.5)
Esempio n. 18
0
 def setIndex(self, index):
     worker = Worker(self.candlestick.setIndex, index)
     QtCore.QThreadPool.globalInstance().start(worker)
     self.volumeProfile.removeAll()
Esempio n. 19
0
 def search(self, keywords):
     t = Worker(lambda: self._search(keywords))
     t.start()
     self.threads['search'] = t
     handle_thread(t, 'Searching Snipplr for: ' + keywords, self.search_cb)
Esempio n. 20
0
 def search(self, keywords):
     t = Worker(lambda: self._search(keywords))
     t.start()
     self.threads['search'] = t
     handle_thread(t, 'Searching Snipplr for: ' + keywords, self.search_cb)
Esempio n. 21
0
def _generate_all_charts(spec, input_data):
    """Generate all charts specified in the specification file.

    :param spec: Specification.
    :param input_data: Full data set.
    :type spec: Specification
    :type input_data: InputData
    """
    def _generate_chart(_, data_q, graph):
        """Generates the chart.
        """

        logs = list()

        logging.info("  Generating the chart '{0}' ...".format(
            graph.get("title", "")))
        logs.append(
            ("INFO",
             "  Generating the chart '{0}' ...".format(graph.get("title",
                                                                 ""))))

        job_name = graph["data"].keys()[0]

        csv_tbl = list()
        res = list()

        # Transform the data
        logs.append(
            ("INFO", "    Creating the data set for the {0} '{1}'.".format(
                graph.get("type", ""), graph.get("title", ""))))
        data = input_data.filter_data(graph, continue_on_error=True)
        if data is None:
            logging.error("No data.")
            return

        chart_data = dict()
        chart_tags = dict()
        for job, job_data in data.iteritems():
            if job != job_name:
                continue
            for index, bld in job_data.items():
                for test_name, test in bld.items():
                    if chart_data.get(test_name, None) is None:
                        chart_data[test_name] = OrderedDict()
                    try:
                        chart_data[test_name][int(index)] = \
                            test["result"]["receive-rate"]
                        chart_tags[test_name] = test.get("tags", None)
                    except (KeyError, TypeError):
                        pass

        # Add items to the csv table:
        for tst_name, tst_data in chart_data.items():
            tst_lst = list()
            for bld in builds_dict[job_name]:
                itm = tst_data.get(int(bld), '')
                if not isinstance(itm, str):
                    itm = itm.avg
                tst_lst.append(str(itm))
            csv_tbl.append("{0},".format(tst_name) + ",".join(tst_lst) + '\n')

        # Generate traces:
        traces = list()
        index = 0
        groups = graph.get("groups", None)
        visibility = list()

        if groups:
            for group in groups:
                visible = list()
                for tag in group:
                    for test_name, test_data in chart_data.items():
                        if not test_data:
                            logs.append(
                                ("WARNING",
                                 "No data for the test '{0}'".format(test_name)
                                 ))
                            continue
                        if tag in chart_tags[test_name]:
                            message = "index: {index}, test: {test}".format(
                                index=index, test=test_name)
                            test_name = test_name.split('.')[-1]
                            try:
                                trace, rslt = _generate_trending_traces(
                                    test_data,
                                    job_name=job_name,
                                    build_info=build_info,
                                    name='-'.join(test_name.split('-')[2:-1]),
                                    color=COLORS[index])
                            except IndexError:
                                message = "Out of colors: {}".format(message)
                                logs.append(("ERROR", message))
                                logging.error(message)
                                index += 1
                                continue
                            traces.extend(trace)
                            visible.extend([True for _ in range(len(trace))])
                            res.append(rslt)
                            index += 1
                            break
                visibility.append(visible)
        else:
            for test_name, test_data in chart_data.items():
                if not test_data:
                    logs.append(
                        ("WARNING",
                         "No data for the test '{0}'".format(test_name)))
                    continue
                message = "index: {index}, test: {test}".format(index=index,
                                                                test=test_name)
                test_name = test_name.split('.')[-1]
                try:
                    trace, rslt = _generate_trending_traces(
                        test_data,
                        job_name=job_name,
                        build_info=build_info,
                        name='-'.join(test_name.split('-')[2:-1]),
                        color=COLORS[index])
                except IndexError:
                    message = "Out of colors: {}".format(message)
                    logs.append(("ERROR", message))
                    logging.error(message)
                    index += 1
                    continue
                traces.extend(trace)
                res.append(rslt)
                index += 1

        if traces:
            # Generate the chart:
            try:
                layout = deepcopy(graph["layout"])
            except KeyError as err:
                logging.error("Finished with error: No layout defined")
                logging.error(repr(err))
                return
            if groups:
                show = list()
                for i in range(len(visibility)):
                    visible = list()
                    for r in range(len(visibility)):
                        for _ in range(len(visibility[r])):
                            visible.append(i == r)
                    show.append(visible)

                buttons = list()
                buttons.append(
                    dict(label="All",
                         method="update",
                         args=[
                             {
                                 "visible":
                                 [True for _ in range(len(show[0]))]
                             },
                         ]))
                for i in range(len(groups)):
                    try:
                        label = graph["group-names"][i]
                    except (IndexError, KeyError):
                        label = "Group {num}".format(num=i + 1)
                    buttons.append(
                        dict(label=label,
                             method="update",
                             args=[
                                 {
                                     "visible": show[i]
                                 },
                             ]))

                layout['updatemenus'] = list([
                    dict(active=0,
                         type="dropdown",
                         direction="down",
                         xanchor="left",
                         yanchor="bottom",
                         x=-0.12,
                         y=1.0,
                         buttons=buttons)
                ])

            name_file = "{0}-{1}{2}".format(spec.cpta["output-file"],
                                            graph["output-file-name"],
                                            spec.cpta["output-file-type"])

            logs.append(
                ("INFO", "    Writing the file '{0}' ...".format(name_file)))
            plpl = plgo.Figure(data=traces, layout=layout)
            try:
                ploff.plot(plpl,
                           show_link=False,
                           auto_open=False,
                           filename=name_file)
            except plerr.PlotlyEmptyDataError:
                logs.append(("WARNING", "No data for the plot. Skipped."))

        data_out = {
            "job_name": job_name,
            "csv_table": csv_tbl,
            "results": res,
            "logs": logs
        }
        data_q.put(data_out)

    builds_dict = dict()
    for job in spec.input["builds"].keys():
        if builds_dict.get(job, None) is None:
            builds_dict[job] = list()
        for build in spec.input["builds"][job]:
            status = build["status"]
            if status != "failed" and status != "not found" and \
                status != "removed":
                builds_dict[job].append(str(build["build"]))

    # Create "build ID": "date" dict:
    build_info = dict()
    tb_tbl = spec.environment.get("testbeds", None)
    for job_name, job_data in builds_dict.items():
        if build_info.get(job_name, None) is None:
            build_info[job_name] = OrderedDict()
        for build in job_data:
            testbed = ""
            tb_ip = input_data.metadata(job_name, build).get("testbed", "")
            if tb_ip and tb_tbl:
                testbed = tb_tbl.get(tb_ip, "")
            build_info[job_name][build] = (input_data.metadata(
                job_name,
                build).get("generated",
                           ""), input_data.metadata(job_name,
                                                    build).get("version",
                                                               ""), testbed)

    work_queue = multiprocessing.JoinableQueue()
    manager = multiprocessing.Manager()
    data_queue = manager.Queue()
    cpus = multiprocessing.cpu_count()

    workers = list()
    for cpu in range(cpus):
        worker = Worker(work_queue, data_queue, _generate_chart)
        worker.daemon = True
        worker.start()
        workers.append(worker)
        os.system("taskset -p -c {0} {1} > /dev/null 2>&1".format(
            cpu, worker.pid))

    for chart in spec.cpta["plots"]:
        work_queue.put((chart, ))
    work_queue.join()

    anomaly_classifications = list()

    # Create the header:
    csv_tables = dict()
    for job_name in builds_dict.keys():
        if csv_tables.get(job_name, None) is None:
            csv_tables[job_name] = list()
        header = "Build Number:," + ",".join(builds_dict[job_name]) + '\n'
        csv_tables[job_name].append(header)
        build_dates = [x[0] for x in build_info[job_name].values()]
        header = "Build Date:," + ",".join(build_dates) + '\n'
        csv_tables[job_name].append(header)
        versions = [x[1] for x in build_info[job_name].values()]
        header = "Version:," + ",".join(versions) + '\n'
        csv_tables[job_name].append(header)

    while not data_queue.empty():
        result = data_queue.get()

        anomaly_classifications.extend(result["results"])
        csv_tables[result["job_name"]].extend(result["csv_table"])

        for item in result["logs"]:
            if item[0] == "INFO":
                logging.info(item[1])
            elif item[0] == "ERROR":
                logging.error(item[1])
            elif item[0] == "DEBUG":
                logging.debug(item[1])
            elif item[0] == "CRITICAL":
                logging.critical(item[1])
            elif item[0] == "WARNING":
                logging.warning(item[1])

    del data_queue

    # Terminate all workers
    for worker in workers:
        worker.terminate()
        worker.join()

    # Write the tables:
    for job_name, csv_table in csv_tables.items():
        file_name = spec.cpta["output-file"] + "-" + job_name + "-trending"
        with open("{0}.csv".format(file_name), 'w') as file_handler:
            file_handler.writelines(csv_table)

        txt_table = None
        with open("{0}.csv".format(file_name), 'rb') as csv_file:
            csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
            line_nr = 0
            for row in csv_content:
                if txt_table is None:
                    txt_table = prettytable.PrettyTable(row)
                else:
                    if line_nr > 1:
                        for idx, item in enumerate(row):
                            try:
                                row[idx] = str(round(float(item) / 1000000, 2))
                            except ValueError:
                                pass
                    try:
                        txt_table.add_row(row)
                    except Exception as err:
                        logging.warning("Error occurred while generating TXT "
                                        "table:\n{0}".format(err))
                line_nr += 1
            txt_table.align["Build Number:"] = "l"
        with open("{0}.txt".format(file_name), "w") as txt_file:
            txt_file.write(str(txt_table))

    # Evaluate result:
    if anomaly_classifications:
        result = "PASS"
        for classification in anomaly_classifications:
            if classification == "regression" or classification == "outlier":
                result = "FAIL"
                break
    else:
        result = "FAIL"

    logging.info("Partial results: {0}".format(anomaly_classifications))
    logging.info("Result: {0}".format(result))

    return result
Esempio n. 22
0
 def refresh(self):
     if not self.plotting:
         self.plotting = True
         # self.updateOHLC(True)
         worker = Worker(self.updateOHLC, True)
         QtCore.QThreadPool.globalInstance().start(worker)
Esempio n. 23
0
def subscribe(binding_key: str, sub_request: SubscriptionRequest):
    worker = Worker(conn, exchange)
    sub = worker.subscribe(binding_key)
    for key, on_handler in sub_request._on.items():
        sub.on(key, on_handler)
Esempio n. 24
0
    def download_and_parse_data(self, repeat=1):
        """Download the input data files, parse input data from input files and
        store in pandas' Series.

        :param repeat: Repeat the download specified number of times if not
            successful.
        :type repeat: int
        """

        logging.info("Downloading and parsing input files ...")

        work_queue = multiprocessing.JoinableQueue()
        manager = multiprocessing.Manager()
        data_queue = manager.Queue()
        cpus = multiprocessing.cpu_count()

        workers = list()
        for cpu in range(cpus):
            worker = Worker(work_queue, data_queue,
                            self._download_and_parse_build)
            worker.daemon = True
            worker.start()
            workers.append(worker)
            os.system("taskset -p -c {0} {1} > /dev/null 2>&1".format(
                cpu, worker.pid))

        for job, builds in self._cfg.builds.items():
            for build in builds:
                work_queue.put((job, build, repeat))

        work_queue.join()

        logging.info("Done.")

        while not data_queue.empty():
            result = data_queue.get()

            job = result["job"]
            build_nr = result["build"]["build"]

            if result["data"]:
                data = result["data"]
                build_data = pd.Series({
                    "metadata":
                    pd.Series(data["metadata"].values(),
                              index=data["metadata"].keys()),
                    "suites":
                    pd.Series(data["suites"].values(),
                              index=data["suites"].keys()),
                    "tests":
                    pd.Series(data["tests"].values(),
                              index=data["tests"].keys())
                })

                if self._input_data.get(job, None) is None:
                    self._input_data[job] = pd.Series()
                self._input_data[job][str(build_nr)] = build_data

                self._cfg.set_input_file_name(job, build_nr,
                                              result["build"]["file-name"])

            self._cfg.set_input_state(job, build_nr, result["state"])

            for item in result["logs"]:
                if item[0] == "INFO":
                    logging.info(item[1])
                elif item[0] == "ERROR":
                    logging.error(item[1])
                elif item[0] == "DEBUG":
                    logging.debug(item[1])
                elif item[0] == "CRITICAL":
                    logging.critical(item[1])
                elif item[0] == "WARNING":
                    logging.warning(item[1])

        del data_queue

        # Terminate all workers
        for worker in workers:
            worker.terminate()
            worker.join()

        logging.info("Done.")
Esempio n. 25
0
 def get_mouse_position(self, variable_name_to_set: str):
     worker = Worker(self._get_mouse_position, 2, variable_name_to_set)
     self.thread_pool.start(worker)