コード例 #1
0
ファイル: postgresql.py プロジェクト: leibatt/ssb-tools
    def execute_request(self, request, result_queue, options):
        sql_statement = request.sql_statement

        # get a connection from the pool - block if non is available
        connection = self.pool.get()
        cursor = connection.cursor()

        request.start_time = util.get_current_ms_time()
        try:
            cursor.execute(sql_statement)
        except psycopg2.extensions.QueryCanceledError as qce:
            request.result = {}
            request.end_time = util.get_current_ms_time()
            result_queue.put(request)
            return

        data = cursor.fetchall()
        request.end_time = util.get_current_ms_time()

        # put connection back in the queue so the next thread can use it.
        cursor.close()
        self.pool.put(connection)

        results = []
        for row in data:
            results.append(row)
        request.result = results
        result_queue.put(request)
コード例 #2
0
ファイル: verdictdb.py プロジェクト: leibatt/ssb-tools
    def execute_request(self, request, result_queue, options):
        sql_statement = request.sql_statement

        # get a connection from the pool - block if non is available
        # connection = self.pool.get()
        connection=self.conn

        request.start_time = util.get_current_ms_time()
        try:
            editedSqlStatement = self.verdictdbedit(sql_statement)
            data = connection.sql(editedSqlStatement)
        except ValueError as e:
            print(e, flush=True)
            request.result = []
            request.end_time = util.get_current_ms_time()
            request.verdictdb_query = editedSqlStatement
            result_queue.put(request)
            return
        request.end_time = util.get_current_ms_time()

        #results = []
        #for row in data.iterrows():
        #  results.append(row)
        results = json.loads(data.to_json(orient="records"))
        #print(editedSqlStatement)
        #print(results)
        request.result = results
        request.verdictdb_query = editedSqlStatement
        result_queue.put(request)
コード例 #3
0
    def execute_request(self, request, result_queue, options):
        # get a connection from the pool - block if non is available
        sql_statement = request.sql_statement
        #logger.info("(%s) %s" % (request.ssb_id,sql_statement))
        connection = self.conn
        cursor = connection.cursor()
        request.start_time = util.get_current_ms_time()
        cursor.execute(self.sqlitefix(sql_statement))
        data = cursor.fetchall()
        request.end_time = util.get_current_ms_time()

        # put connection back in the queue so the next thread can use it.
        cursor.close()

        results = []
        for row in data:
            results.append(row)
        request.result = results
        result_queue.put(request)
コード例 #4
0
    def end_run(self):
        self.benchmark_end_time = util.get_current_ms_time()
        logger.info("done processing queries")
        try:
            logger.info("calling 'workflow_end' on driver")
            self.driver.workflow_end()
        except AttributeError:
            pass

        path = os.path.join(
            "results", "results_" + str(util.get_current_ms_time()) + ".json")
        logger.info("saving results to %s" % path)
        with open(path, "w") as fp:
            res = OrderedDict({
                "args": self.query_results["args"],
                "results": list(self.query_results["results"])
            })
            res["workflow-start-time"] = self.workflow_start_time
            res["workflow-end-time"] = self.benchmark_end_time
            json.dump(res, fp, indent=4)
コード例 #5
0
    def run(self):
        with open(self.get_workflow_path()) as f:
            self.workflow = json.load(f)

        self.query_results = OrderedDict({
            "args": vars(self.options),
            "results": deque()
        })
        self.benchmark_start_time = util.get_current_ms_time()

        try:
            logger.info("calling 'workflow_start' on driver")
            self.driver.workflow_start()
        except AttributeError:
            pass

        self.workflow_queries = randomizeQueries(self.workflow["queries"])
        total_queries = len(self.workflow_queries)
        global total_processed
        total_processed = 0

        def poll_results(slf, queue):
            global total_processed
            while total_processed < total_queries:
                logger.info("polling for results")
                try:
                    process_result = queue.get(timeout=1)
                except Empty:
                    logger.info("result queue empty... trying again")
                    continue
                if process_result is None:
                    continue
                self.deliver_request(process_result)
                total_processed = total_processed + 1
            logger.info("stopped polling results")

        thread = Thread(target=poll_results, args=(self, SSB.result_queue))
        thread.start()

        for query_id, query in enumerate(self.workflow_queries):
            request = SqlRequest(query_id, query)
            self.driver.process_request(request, SSB.result_queue,
                                        self.options)
            time.sleep(
                0.002
            )  # so the request threads do not overwhelm some of the drivers (particularly verdictdb)

        thread.join()
        self.end_run()
コード例 #6
0
  def run(self):
    with open(self.get_workflow_path()) as f:
      json_data = json.load(f)
      self.workflow = json_data

    self.benchmark_start_time = util.get_current_ms_time()

    try:
      logger.info("calling 'workflow_start' on driver")
      self.driver.workflow_start()
    except AttributeError:
      pass

    # generate the queries
    generateFinalQueries(self.workflow,self.driver,logger)

    # check selectivities
    checkQuerySelectivities(self.workflow,self.driver,logger)
    self.end_run()