Ejemplo n.º 1
0
                def taskrunner():
                    conn = sqlite3.connect(db_path, check_same_thread=False)
                    c = conn.cursor()

                    # Attach a default scratch space to every connection such that we can write temporary dbs concurrently
                    c.execute("ATTACH DATABASE ':memory:' AS scratch_default;")
                    ma = MetaFetcher(local_data_source,
                                     self.canned_data_programid)
                    if dbg_print:
                        print("params: " + str(params))
                    d = ma.getAllSectionNodeIds(c, *params)
                    resp = '{ "type": "fetcher", "seqid": %d, "msg": %s }' % (
                        int(seqid), str(d))
                    self.add_to_command_queue(resp)

                    conn.close()
Ejemplo n.º 2
0
    def message_handler(self, msg):

        db_path = "perfdata.db"
        if self.data_source != "fresh":
            db_path = self.data_source

        local_data_source = self.data_source

        dbg_print = False
        m = json.loads(msg)
        if m["msg_type"] == "info":
            pass
        elif m["msg_type"] == "roofline":
            resp = self.message_handler_roofline(m)
            if resp != None:
                self.add_to_command_queue(json.dumps(resp))

        elif m["msg_type"] == "click":

            def mainthread_click_cb():
                self.on_click_cb(m["clicked_elements"])
                return False

            GObject.idle_add(mainthread_click_cb)

        elif m["msg_type"] == "heartbeat":
            pass  # Ignored, this unblocks the recv() call
        elif m["msg_type"] == "fetcher":
            if dbg_print:
                print("Lazy fetch request received")

            seqid = m["seqid"]
            method = m["msg"]["method"]
            params = m["msg"]["params"]

            synchronous_execution = method != "Analysis"

            if synchronous_execution:
                conn = sqlite3.connect(db_path, check_same_thread=False)
                c = conn.cursor()

                # Attach a default scratch space to every connection such that we can write temporary dbs concurrently
                c.execute("ATTACH DATABASE ':memory:' AS scratch_default;")

            if method == "getSuperSectionCount":

                ma = MetaFetcher(local_data_source, self.canned_data_programid)
                d = ma.getSuperSectionCount(c, *params)
                resp = '{ "type": "fetcher", "seqid": %d, "msg": %s }' % (
                    int(seqid), str(d))
                self.add_to_command_queue(resp)

            elif method == "getAllSectionStateIds":

                def taskrunner():
                    conn = sqlite3.connect(db_path, check_same_thread=False)
                    c = conn.cursor()

                    # Attach a default scratch space to every connection such that we can write temporary dbs concurrently
                    c.execute("ATTACH DATABASE ':memory:' AS scratch_default;")
                    ma = MetaFetcher(local_data_source,
                                     self.canned_data_programid)
                    if dbg_print:
                        print("params: " + str(params))
                    d = ma.getAllSectionStateIds(c, *params)
                    resp = '{ "type": "fetcher", "seqid": %d, "msg": %s }' % (
                        int(seqid), str(d))
                    self.add_to_command_queue(resp)

                    conn.close()

                # Run asynchronously
                self.run_async_request(taskrunner)

            elif method == "getAllSectionNodeIds":

                def taskrunner():
                    conn = sqlite3.connect(db_path, check_same_thread=False)
                    c = conn.cursor()

                    # Attach a default scratch space to every connection such that we can write temporary dbs concurrently
                    c.execute("ATTACH DATABASE ':memory:' AS scratch_default;")
                    ma = MetaFetcher(local_data_source,
                                     self.canned_data_programid)
                    if dbg_print:
                        print("params: " + str(params))
                    d = ma.getAllSectionNodeIds(c, *params)
                    resp = '{ "type": "fetcher", "seqid": %d, "msg": %s }' % (
                        int(seqid), str(d))
                    self.add_to_command_queue(resp)

                    conn.close()

                # Run asynchronously
                #threading.Thread(target=taskrunner).start()
                self.run_async_request(taskrunner)
            elif method == "Analysis":
                # Any analysis, name given in params[0]
                cname = params[0]
                params = params[1:]

                transself = self

                def taskrunner():
                    if dbg_print:
                        print("Running analysis " + cname)
                        print("Params: " + str(params))

                    conn = sqlite3.connect(db_path, check_same_thread=False)
                    c = conn.cursor()

                    # Attach a default scratch space to every connection such that we can write temporary dbs concurrently
                    c.execute("ATTACH DATABASE ':memory:' AS scratch_default;")

                    if local_data_source == "fresh":

                        def my_import(name):
                            components = name.split('.')
                            mod = __import__(components[0])
                            for comp in components[1:]:
                                mod = getattr(mod, comp)
                            return mod

                        _module = my_import("diode.db_scripts.sql_to_json")
                        _cl = getattr(_module, cname)
                        _instance = _cl()

                        d = _instance.query_values(c, *params)
                        respval = json.dumps(d)
                        if d == None:
                            # Special case of undefined
                            d = "null"

                    else:  # Canned data
                        # This is easier as in: The results can be read from a database directly

                        argparams = [*params]
                        query_ss = True
                        if cname == "CriticalPathAnalysis":
                            # This has split IDs (instead of the unified id)
                            tmp = [*params]

                            if tmp[0] == None:
                                tmp[0] = 0x0FFFF
                            # Recreate the correct pair and remove the supersection part from the query
                            argparams = [
                                (int(tmp[1]) << 16) | (int(tmp[0]) & 0xFFFF)
                            ]
                            query_ss = False

                        if argparams[0] == -1 or argparams[0] == "-1":
                            argparams[0] = 0x0FFFFFFFF

                        c.execute(
                            """
SELECT
    json
FROM
    `AnalysisResults`
WHERE
    {progid_q}
    AnalysisName = ?
    AND forUnifiedID = ?
    {ss_q}
--  AND forSection = ?
;""".format(ss_q="AND forSuperSection = ?" if query_ss else "",
                        progid_q="forProgramID = %d AND" % self.canned_data_programid
                        if self.canned_data_programid != None else ""), (
                        cname,
                        *argparams,
                        ))

                        result = c.fetchall()

                        if len(result) == 0:
                            # This can actually happen (and is validly caught at client-side)
                            respval = "null"
                        else:
                            # Unpack
                            respval, = result[0]

                    resp = '{ "type": "fetcher", "seqid": %d, "msg": %s }' % (
                        int(seqid), respval)

                    conn.close()

                    if dbg_print:
                        print("Analysis result:" + str(resp))
                    transself.add_to_command_queue(resp)

                # Run asynchronously
                self.run_async_request(taskrunner)
            else:
                # Arbitrary execution string
                transself = self

                def taskrunner():
                    conn = sqlite3.connect(db_path, check_same_thread=False)
                    c = conn.cursor()

                    # Attach a default scratch space to every connection such that we can write temporary dbs concurrently
                    c.execute("ATTACH DATABASE ':memory:' AS scratch_default;")

                    ma = MetaFetcher(local_data_source,
                                     self.canned_data_programid)
                    if dbg_print:
                        print("method: " + str(method))
                        print("params: " + str(params))
                    d = getattr(MetaFetcher, method)(ma, c, *params)

                    conn.close()
                    tstr = str(d)
                    d = tstr.replace("'", '"')
                    resp = '{ "type": "fetcher", "seqid": %d, "msg": %s }' % (
                        int(seqid), str(d))
                    self.add_to_command_queue(resp)

                # Run asynchronously
                self.run_async_request(taskrunner)

            if synchronous_execution:
                conn.close()

        else:
            print("Unknown/Unhandled message from renderer: " +
                  str(m["msg_type"]))