def check_candidate(cand, candidate_obj_val, perf_ratio, compare_to_optimal,
                    rs_search, opt_p, opt_obj_val):
    assert isinstance(cand, Package)
    is_feasible_candidate = cand.is_valid()
    print "Candidate:"
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        debug(
            pretty_table_str_named(cand.iter_tuples(),
                                   headers=["id"] + rs_search.all_attrs))
    if compare_to_optimal:
        print "Actual optimal objective value is: {}".format(opt_obj_val)
        print "Candiate objective value is: {}".format(candidate_obj_val)
        print "Deviation from optimal objective value: {:.3f}%".format(
            100 *
            abs((opt_obj_val - candidate_obj_val) /
                opt_obj_val) if opt_obj_val != 0 else (opt_obj_val -
                                                       candidate_obj_val))
        print "Performance Ratio: {}".format(perf_ratio)
        if cand > opt_p:
            print "========= ASSERTION WARNING! ==========="
            print "Actual optimal package is not better than or qual to candidate package!"
            print "opt_obj_val={}; candidate_obj_val={}".format(
                opt_obj_val, candidate_obj_val)

    try:
        assert is_feasible_candidate
    except AssertionError as assertion_e:
        assert isinstance(opt_p, Package)
        assert isinstance(cand, Package)
        print "========= ASSERTION ERROR! ==========="
        print "Query was:\n{}".format(rs_search.query)
        print "Solution returned by full backtrack is not feasible!"
        print "Candidate global vals ({}):\n{}".format([
            "{}({})".format(aggr, attr)
            for aggr, attr in rs_search.query.coalesced_gcs.iterkeys()
        ], cand.get_coalesced_global_scores())
        print "Optimal global vals ({}):\n{}".format([
            "{}({})".format(aggr, attr)
            for aggr, attr in rs_search.query.coalesced_gcs.iterkeys()
        ], opt_p.get_coalesced_global_scores())
        print "Optimal combo:", opt_p.combination
        print "Optimal table:\n", pretty_table_str_named(opt_p.iter_tuples(),
                                                         headers=["id"] +
                                                         rs_search.clust_attrs)
        print "Candidate combo:", cand.combination
        print "Candidate table:\n", pretty_table_str_named(
            cand.iter_tuples(), headers=["id"] + rs_search.clust_attrs)

        print opt_p.search
        print cand.search

        raise assertion_e
示例#2
0
def check_and_return_formula_pair_with_inputs(f1, f2, ptr_size1, ptr_size2):
    input1, input2 = check_if_inputs_match(f1, f2, True)
    # if len(input1) != len(input2):
    #     return False
    if len(input1) != len(input2):
        f1 = ignore_null_check(f1, ptr_size1)
        f2 = ignore_null_check(f2, ptr_size2)
        if f1 is None or f2 is None:
            return None, None
        log.debug(f'Ignore null check {str(f1)} {str(f2)}')
        input1, input2 = check_if_inputs_match(f1, f2, True)
        if len(input1) != len(input2):
            return None, None
    return (f1, input1), (f2, input2)
示例#3
0
 def send_sms(self, num, msg):
     log.info("[Send SMS] Number '%s', message '%s'" % (num, msg))
     # Two parts, UCS2, SMS with UDH
     parts, encoding_flag, msg_type_flag = gsm.make_parts(msg)
     for part in parts:
         pdu = self.client.send_message(
             source_addr_ton=consts.SMPP_TON_INTL,
             dest_addr_ton=consts.SMPP_TON_INTL,
             source_addr=SENDER_PHONE,
             destination_addr=num,
             short_message=part,
             data_coding=encoding_flag,
             esm_class=msg_type_flag,
             registered_delivery=True,
         )
         log.debug("[Send SMS] PDU Sequence # %d" % pdu.sequence)
     bot.send("Надсилаю СМС до %s\n%s" % num, msg)
示例#4
0
    def get_cves(self, limit = 6):
        params = {
            'length': limit,
            'start' : 0
        }

        response = requests.get(
            self.url_list,
            headers = self.headers(),
            params = params,
            timeout = self.timeout
        )

        cves = []
        if response.status_code == 200:
            ids = re.findall(r'\thref="/flaw/show/([^"]+)"', response.text)
            for id in ids :
                cve = self.to_cve(id)
                if cve.is_vaild():
                    cves.append(cve)
                    log.debug(cve)
        else:
            log.warn('获取 [%s] 威胁情报失败: [HTTP Error %i]' % (self.NAME_CH(), response.status_code))
        return cves
示例#5
0
def check_if_inputs_match(f1, f2, equal_var):
    input1 = get_expression_input(f1)
    input2 = get_expression_input(f2)
    input1 = list(input1)
    input2 = list(input2)
    if equal_var and len(input1) != len(input2):
        # tentatively we treat expressions with different number of inputs as different, we do not prove
        input1 = list(remove_extract(input1))
        input2 = list(remove_extract(input2))
        if len(input1) != len(input2):
            log.debug(f'diff # inputs {len(input1)} and {len(input2)}')
            log.debug(input1)
            log.debug(input2)
    return input1, input2
示例#6
0
    def sql_update(self, sql, *data, **kwdata):
        # print ("%"*50) + " UPDATE " + ("%"*49)
        self.last_update_id += 1

        self._psql_unnamed_cur = self.connection.get_cursor(named=False)

        n_retrials = kwdata.get("___n_retrials", 0)
        if n_retrials > 10:
            raise OperationalError

        # assert xor(xor(len(data) > 0, len(set(kwdata) - {"___n_retrials"}) > 0), _many_iter_data is not None), \
        assert not (len(data) > 0 and len(set(kwdata) - {"___n_retrials"}) > 0), \
         "Pass either keyword-based data or comma-separated data."

        if len(data) > 0:
            use_data = data
        elif len(kwdata) > 0:
            use_data = kwdata
        # elif _many_iter_data is not None:
        # 	use_data =
        else:
            use_data = []

        time_start = time.time()
        n_updated_records = None
        status = None

        if self.logfile is not None:
            self.logfile.write(
                ">>> {} {} {} START UPDATE\n{}\ndata={}\nkwdata={}\n\n".format(
                    self.cursor_id, self.last_update_id, time_start, sql, data,
                    kwdata))

        # print "\n*** UPDATE QUERY:", sql, "\n"

        try:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE..."
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE..."
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE..."

            # if _many_iter_data is not None:
            # 	self._psql_unnamed_cur.executemany(sql, _many_iter_data)
            # else:
            self._psql_unnamed_cur.execute(sql, use_data)

        except (KeyboardInterrupt, SystemExit) as e:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE INTERRUPT/EXIT"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE INTERRUPT/EXIT"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE INTERRUPT/EXIT"
            self.connection._psql_conn.cancel()  # FIXME: Make it general
            sys.exit(-1)

        except QueryCanceledError:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE CANCEL"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE CANCEL"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE CANCEL"
            debug("QueryCanceledError")
            status = "QueryCanceledError"
            raise TimeLimitElapsed

        except OperationalError as e:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE OP"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE OP"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE OP"
            self.connection.reconnect()
            self.create_cursors()
            kwdata["___n_retrials"] = n_retrials + 1
            self.sql_update(sql, *data, **kwdata)

        except Exception as e:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> EXCEPTION"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> EXCEPTION"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> EXCEPTION"
            print
            print '=' * 120
            print "Exception occured while executing query:\n{}".format(
                self._psql_unnamed_cur.query)
            print '=' * 120
            traceback.print_exc(file=sys.stdout)
            if hasattr(e, "pgerror"):
                print e.pgerror
                status = "pgcode:{}".format(e.pgcode)
            else:
                status = "exception"

            if self.logfile:
                for x in self._update_queries:
                    print x

            raise e

        else:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE OK"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE OK"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UDATE OK"

            status = "OK"
            if self.verbose:
                print self._psql_unnamed_cur.query
                print self._psql_unnamed_cur.statusmessage
            n_updated_records = self._psql_unnamed_cur.rowcount
            return n_updated_records

        finally:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UPDATE FINALLY"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UPDATE FINALLY"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> UPDATE FINALLY"
            # print ("%"*50) + "%%%%%%%%" + ("%"*49)
            # self._psql_unnamed_cur.close()
            time_end = time.time()

            self.last_update_query = self._psql_unnamed_cur.query

            # if self.logfile:
            # 	self._update_queries.append((time_start, time_end, status, sql, data, kwdata))

            if self.logfile is not None:
                self.logfile.write(
                    ">>> {} {} {} END UPDATE\n{} - {} record(s) updated\n\n".
                    format(self.cursor_id, self.last_update_id, time_end,
                           status, n_updated_records))
                self.logfile.flush()

            if self.sqlfile is not None:
                self.sqlfile.write("-- {} update\n".format(
                    self.last_update_id))
                self.sqlfile.write(self._psql_unnamed_cur.query + ";")
                self.sqlfile.write("\n\n")
示例#7
0
    def sql_query(self, sql, *data, **kwdata):
        """
		NOTE: This function returns a generator. So if you use it to do any kind of update to the dbms that doesn't
		return anything, it won't be executed!
		"""
        # print ("%"*50) + " QUERY " + ("%"*50)
        self.last_select_id += 1

        n_retrials = kwdata.get("___n_retrials", 0)
        if n_retrials > 10:
            raise OperationalError

        assert not (len(data) > 0 and len(set(kwdata) - {"___n_retrials"}) > 0), \
         "Pass either keyword-based data or comma-separated data."

        time_start = time.time()
        n_records_retrieved = 0
        status = None
        toclose = False

        if self.logfile is not None:
            self.logfile.write(
                ">>> {} {} {} START SELECT\n{}\ndata={}\nkwdata={}\n\n".format(
                    self.cursor_id, self.last_select_id, time_start, sql, data,
                    kwdata))

        # print "\n*** QUERY:", sql, "\n"

        try:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> QUERY..."
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> QUERY..."
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> QUERY..."
            if len(data) > 0:
                # self._psql_cur.execute(sql, data)
                cur, toclose = self._execute_query(sql, data)
            elif len(kwdata) > 0:
                # self._psql_cur.execute(sql, kwdata)
                cur, toclose = self._execute_query(sql, kwdata)
            else:
                cur, toclose = self._execute_query(sql, None)
            n_records_reported = cur.rowcount
            # print n_records_reported
            # Yield records
            for record in cur:
                n_records_retrieved += 1
                if n_records_retrieved == n_records_reported:
                    status = "Finished"
                yield record

        # except KeyboardInterrupt:
        # 	self._connection._psql_conn.cancel()  # FIXME: Make it general
        # 	raise KeyboardInterrupt

        except QueryCanceledError:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> CANCEL Q"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> CANCEL Q"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> CANCEL Q"
            debug("QueryCanceledError")
            status = "QueryCanceledError"
            raise TimeLimitElapsed

        except OperationalError as e:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> OP Q"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> OP Q"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> OP Q"
            self.connection.reconnect()
            self.create_cursors()
            kwdata["___n_retrials"] = n_retrials + 1
            self.sql_query(sql, *data, **kwdata)

        except pgres.Error as e:
            print '=' * 60
            print "Exception occured while executing query:\n{}\n".format(sql)
            traceback.print_exc(file=sys.stdout)
            print e.diag.message_primary
            print e.pgerror
            print "pgcode: ", e.pgcode
            status = "pgcode: {}".format(e.pgcode)
            raise e

        except Exception as e:
            print '=' * 60
            print "Exception occured while executing query:\n{}\n".format(sql)
            traceback.print_exc(file=sys.stdout)
            print e
            raise e

        else:
            status = "Finished"
            if self.verbose:
                print cur.query
                print cur.statusmessage
            self.last_select_query = cur.query
            if toclose and not cur.closed:
                cur.close()

        finally:
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> FINALLY Q"
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> FINALLY Q"
            # print sql
            # print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> FINALLY Q"

            # print ("%"*50) + "%%%%%%%" + ("%"*50)
            time_end = time.time()

            # if self.logfile:
            # 	self._queries.append((time_start, time_end, status, sql, data, kwdata, n_records_retrieved))

            if self.logfile is not None:
                log("Writing to log file '{}'...".format(self.logfile.name))
                self.logfile.write(
                    ">>> {} {} {} END SELECT\n{} - {} record(s) retrieved\n\n".
                    format(self.cursor_id, self.last_select_id, time_end,
                           status, n_records_retrieved))
                self.logfile.flush()

            if self.sqlfile is not None:
                log("Writing query to SQL file '{}'...".format(
                    self.sqlfile.name))
                self.sqlfile.write("-- {} select\n".format(
                    self.last_select_id))
                self.sqlfile.write(cur.query + ";")
                self.sqlfile.write("\n\n")
                self.sqlfile.flush()
    def _cplex_augment(self,
                       cid,
                       cid_orig_space_tuples,
                       cid_basis_sol,
                       empty_cids_representatives,
                       count_basis,
                       sums_basis,
                       try_solve_infeasible=False):
        assert cid is not None or len(cid_orig_space_tuples) == 0

        print "Creating augmenting problem on cluster {} ({} orig tuples, {} reprs) with CPLEX...".format(
            cid, len(cid_orig_space_tuples), len(empty_cids_representatives))

        c = self._create_cplex_augmenting_problem(
            "augmenting-problem__spaceid_{}_cid_{}".format(
                self.search.this_spaceid, cid), cid_orig_space_tuples,
            cid_basis_sol, empty_cids_representatives, count_basis, sums_basis)

        print "created."
        sys.stdout.flush()

        # True unless some trials are infeasible
        is_cid_feasible = True

        # Try to solve CPLEX problem until problem is feasible
        while True:
            # TRY TO SOLVE AUGMENTING PROBLEM
            print "Solving..."
            sys.stdout.flush()
            self._cplex_solve(c)
            print "solved."
            sys.stdout.flush()

            # =========================================== #
            # AUGMENTING PROBLEM FAILURE                  #
            # =========================================== #
            # If problem is infeasible, return a minimal set of infeasible constraints
            if c.solution.get_status(
            ) == cplex.Cplex.solution.status.MIP_infeasible:
                debug("Problem is INFEASIBLE.")

                is_cid_feasible = False

                #################################################
                # Try to augment this cluster (e.g. by modifying the sketch_refine)
                #################################################
                if try_solve_infeasible:
                    minimial_infeasible_constrs = \
                        Cplex_Interface.get_minimial_infeasible_constraints(c, self.search.query)

                    # Identify problematic constraints and their corresponding attributes
                    problematic_attrs = sorted([
                        m[2][0][1] for m in minimial_infeasible_constrs
                        if m[2][0][1] != "*"
                    ])

                    ##########################################################
                    # Project the sketch_refine onto fewer (problematic) dimensions
                    ##########################################################
                    if problematic_attrs != self.search.clust_attrs:
                        self.search.project_clustering(problematic_attrs)

                    ##########################################################
                    # Abstract the sketch_refine
                    ##########################################################
                    else:
                        self.search.abstract_clustering()

                    # Keep re-trying this augmenting problem (try to go down the paql_eval tree, don't give up!)
                    return "REDO", None

                elif False:
                    print "FeasOpt..."
                    c.parameters.feasopt.mode.set(1)
                    c.feasopt(c.feasopt.linear_constraints())
                    print "done"
                    n_vars = len(cid_orig_space_tuples) + len(
                        empty_cids_representatives)
                    sol_values = c.solution.get_values(list(xrange(n_vars)))

                    return is_cid_feasible, sol_values

                else:
                    return is_cid_feasible, None

            # =========================================== #
            # AUGMENTING PROBLEM SUCCESS                  #
            # =========================================== #
            # If problem is feasible, return the solution values
            elif c.solution.get_status() == cplex.Cplex.solution.status.MIP_optimal or \
                            c.solution.get_status() == cplex.Cplex.solution.status.optimal_tolerance:
                n_vars = len(cid_orig_space_tuples) + len(
                    empty_cids_representatives)
                sol_values = c.solution.get_values(list(xrange(n_vars)))

                print "FEASIBLE: RETURNING SOL VALUES"

                return is_cid_feasible, sol_values

            elif c.solution.get_status() == cplex.Cplex.solution.status.MIP_time_limit_infeasible or \
                            c.solution.get_status() == cplex.Cplex.solution.status.MIP_time_limit_feasible:

                raise TimeLimitElapsed

            else:
                raise Exception(
                    "CPLEX solution status not supported: '{}'".format(
                        c.solution.get_status()))
    def __call__(self, partial_p, start_empty, infeasible_cids):
        self.search.current_run_info.strategy_run_info.n_recursive_calls += 1

        # If start_empty is false, try to solve the representative tuples alone first (indicated by None)
        initial_cid = [] if start_empty else [None]

        # Always include clusters whose produced solution is infeasible (some constraints were removed)
        # or simply all clusters that are not completed yet (never solved or solved only in reduced space).
        missing_cids = [
            cid for cid in self.search.cids
            if cid not in partial_p.completed_cids
            and cid not in partial_p.infeasible_cids
        ]
        missing_cids.sort(key=lambda cid: -self.search.n_tuples_per_cid[cid])

        debug(">> PARTIAL SOL: {}".format(partial_p))
        debug("REMAINING CIDS: {}".format(missing_cids))

        # [Backtracking Base Case] No need to augment anything, this is already a solution
        if not initial_cid and not missing_cids:
            return partial_p

        failed_cids = set()

        # Make it a queue
        remaining_cids = deque()
        remaining_cids.extend(initial_cid)
        remaining_cids.extend(missing_cids)
        remaining_cids.append(GreedyBacktracking.delimiter)

        while len(remaining_cids) > 0:
            self.search.check_timelimit()

            cid = remaining_cids.popleft()

            if cid == GreedyBacktracking.delimiter: continue

            if cid in partial_p.completed_cids:
                continue

            # If you already tried to solve this cluster before and it failed, so exit the paql_eval (fail)
            if cid in failed_cids:
                break

            print ">>> CID: {}".format(cid)
            debug(">> Trying augmenting cluster {} based on {}".format(
                cid, partial_p.completed_cids))

            # ================================================
            # AUGMENTING PARTIAL SOLUTION WITH CLUSTER "cid"
            # ================================================
            # Try to augment partial solution by solving for cluster cid
            # augmented_partial_p = self.augment(partial_sol, cid, runinfo)
            try:
                rtime = -time.time()
                augmented_partial_p = self.augment(partial_p, cid)
                rtime += time.time()
                print ">> AUGMENTING TIME: {}".format(rtime)

            except TimeLimitElapsed as e:
                self.search.current_run_info.strategy_run_info.run_end()
                raise e

            while augmented_partial_p == "REDO":
                augmented_partial_p = self.augment(partial_p, cid)

            assert isinstance(augmented_partial_p, PartialPackage)

            # ================================================
            # CLUSTER "cid" FAILURE
            # ================================================
            if cid in augmented_partial_p.infeasible_cids:
                self.search.current_run_info.strategy_run_info.n_infeasible_augmenting_problems += 1

                print ">>>> COULD NOT SOLVE CLUSTER {}".format(cid)

                # Add this cluster id to the infeasible clusters
                if cid not in infeasible_cids:
                    infeasible_cids.append(cid)

                # If this is *not* the root, backtrack to parent node in paql_eval tree to prioritize infeasible
                # cluster
                # cid. If this *is* the root, then go to the next cluster to solve. At the end you may still fail if
                #  all
                # clusters fail.
                if len(partial_p.completed_cids) > 0:
                    self.search.current_run_info.strategy_run_info.n_backtracks += 1
                    return None

            # ================================================
            # CLUSTER "cid" SUCCESS
            # ================================================
            else:
                assert not augmented_partial_p.is_infeasible, augmented_partial_p
                assert cid is None or cid in augmented_partial_p.completed_cids,\
                 "{}\n{}".format(cid, augmented_partial_p)

                if len(self.search.current_run_info.strategy_run_info.
                       augmenting_problems_info) == 0:
                    # This is the initial package (first feasible partial package)
                    self.search.current_run_info.strategy_run_info.initial_partial_package_generated(
                    )

                self.search.current_run_info.strategy_run_info.augmenting_problems_info.append(
                    {
                        "cid": cid,
                    })

                # Cid was successful, solve recursively on the remaining clusters
                debug(">> OK - CAN AUGMENT")
                print ">>>> OK, SOLVED CID {}".format(cid)

                # RECURSION
                new_partial_sol = self(augmented_partial_p, True,
                                       infeasible_cids)

                # If entire subtree is solved successfully, return solution to parent node
                if new_partial_sol is not None:
                    return new_partial_sol

                # Otherwise, the subtree of the currently selected cluster "cid" failed.
                # You need to try with the next remaining clusters, but before you need to prioritize the failing
                # clusters
                else:
                    assert cid not in remaining_cids, (cid, remaining_cids)

                    # Prioritize this cid
                    print ">>>> PRIORITIZING CIDS: {}".format(infeasible_cids)
                    for inf_cid in infeasible_cids:
                        if inf_cid in remaining_cids:
                            remaining_cids.remove(inf_cid)
                            # FIXME: TRYING THIS EDIT! BEFORE, THE FOLLOWING LINE WAS INDENTED 1 STEP LEFT
                            remaining_cids.appendleft(inf_cid)

                    print ">>>> COULD AUGMENT {} BUT DIDN'T SUCCEED".format(
                        cid)

                    debug(
                        ">> NO! - COULD AUGMENT BUT DIDN'T SUCCEED [{} based on {}]"
                        .format(cid, augmented_partial_p.completed_cids))

            failed_cids.add(cid)

        self.search.current_run_info.strategy_run_info.n_backtracks += 1

        return None
    def augment_partial_solution_with_solution(self, cid_feasible, partial_p,
                                               cid, cid_solution,
                                               cid_orig_space_tuples,
                                               empty_cids,
                                               empty_cids_representatives):
        """
        Takes a partial solution object and a solution (assignment) to a problem involving original tuples (and
        eventually also representative tuples), and it augment the partial solution with that.
        """
        print "AUGMENTING PARTIAL PACKAGE WITH SOLVER SOLUTION"
        assert cid_feasible
        assert cid is not None or len(cid_orig_space_tuples) == 0

        augmented_partial_p = copy(partial_p)
        assert isinstance(augmented_partial_p, PartialPackage)

        # Add original space solutions for actual tuples
        if cid_feasible:
            print "AUGMENTING PARTIAL PACKAGE"
            k = 0
            for t in cid_orig_space_tuples:
                assert isinstance(t, Tuple)
                augmented_partial_p.add_original_space_solution_for_cluster(
                    cid, t, cid_solution[k])
                k += 1

            # Fix solutions to other clusters only if this cluster was feasible
            print "FIXING SOLUTIONS TO OTHER CLUSTERS"
            # Group reduced space solutions by cid
            reduced_space_sols = {cid2: [] for cid2 in empty_cids}
            for r in empty_cids_representatives:
                assert isinstance(r, Repr)
                reduced_space_sols[r.cid] += [(r, cid_solution[k])]
                k += 1

            assert k == len(cid_solution)

            # Add reduced space solutions for representative tuples
            print "ADDING REDUCED SOLUTION FOR REPRESENTATIVES"
            for cid2, sol in reduced_space_sols.iteritems():
                for r, v in sol:
                    assert isinstance(r, Repr)
                    augmented_partial_p.add_reduced_space_solution_for_cluster(
                        cid2, r, v)

                # [ PRUNING ] SPOT PREMATURE SOLUTION FOR A CLUSTER
                # If all reduced sols for a specific cluster are zero, then the cluster is completed
                # (no further actions are needed because the problem would just set the variables to zero.. SURE?)
                if cid_feasible and sum(v for r, v in sol) == 0:
                    assert isinstance(r, Repr)
                    debug(
                        "Premature solution in original space for cluster {}".
                        format(cid2))
                    # FIXME: Is the following correct?
                    for i in xrange(self.search.n_tuples_per_cid[cid2]):
                        # FIXME: I WAS WORKING TO FIX THIS.
                        augmented_partial_p.add_original_space_solution_for_cluster(
                            cid2, None, 0)
                        debug(
                            "Added original-space solution for cluster {}, tuple set to 0"
                            .format(cid2))

        else:
            raise NotImplementedError("Relaxing query not implemented yet.")

        print "RETURNING AUGMENTED PACKAGE"

        return augmented_partial_p
    def augment(self, partial_p, cid):
        """
        Takes a partial solution consisting of:
        1) Clusters that have been solved in the original space (solved_original_cids)
        2) Clusters that have been solved in the reduced space only (solved_reduced_cids)
        3) Clusters that have never been solved in either space (empty_cids).

        Cluster "cid" will be now solved in original space if it was solved in reduced space.
        Cluster cid will be solved in reduced space (along with other clusters) if it was never solved before.
        Cluster cid should not have been solved in original space already, otherwise it is an error.

        Each remaining cluster (except cid) will be solved in reduced space if it was not solved in there already,
        otherwise it will be left untouched and its current solution will be used as a basis solution for the new
        problem constraints. Notice that a basis solution for a specific cluster can be either in the reduced space
        or in the original space, depending on whether that cluster was solved only in the reduced space or in the
        original space.
        """
        debug("Augment cid: {}".format(cid))

        if cid is not None and self.search.n_tuples_per_cid[cid] == 1:
            cid_orig_space_tuples = self.get_original_space_tuples_from_cluster(
                cid)
            return self.augment_partial_solution_with_solution(
                True,
                partial_p,
                cid, [1],
                cid_orig_space_tuples,
                empty_cids=[],
                empty_cids_representatives=[])

        # Clusters that have been solved in the reduced space
        solved_reduced_cids = partial_p.get_solved_reduced_cids()

        # Clusters that have been solved in the original space
        solved_original_cids = partial_p.get_solved_original_cids()

        # TODO: CHECK THIS NEW ADDITION
        if cid in solved_original_cids:
            raise Exception("Disabled for now")
            # If cid was already solved before, then it must be an infeasible cluster (query was relaxed)
            assert cid in partial_p.infeasible_cids, (
                cid, partial_p.infeasible_cids)
            # In this case, we are trying to re-solve it from scratch
            partial_p.clear_reduced_space_solution_for_cluster(cid)
            partial_p.clear_original_space_solution_for_cluster(cid)
            solved_original_cids.remove(cid)
            solved_reduced_cids.discard(cid)

        # The order is: you first solve in reduced space and then in original space

        debug("R: {}".format(solved_reduced_cids))
        debug("O: {}".format(solved_original_cids))
        assert cid not in solved_original_cids

        cid_orig_space_tuples = self.get_original_space_tuples_from_cluster(
            cid)

        ################################################################################################################
        # BASIS SOLUTION OF SOLVING CLUSTER cid
        ################################################################################################################
        # This is a solution only in the reduced space. It will be used to generate cardinality bounds only.
        cid_basis_sol = []
        if cid not in partial_p.infeasible_cids:
            for r, s in partial_p.get_cluster_sol(cid):
                assert isinstance(r, Repr)
                assert not hasattr(r, "id") or r.id is None,\
                 "Cluster {} was already solved in original space".format(cid)
                if s > 0:
                    cid_basis_sol.append((cid, r, s))
        debug("cid basis sol: {}".format(cid_basis_sol))

        ################################################################################################################
        # EMPTY CLUSTERS: Clusters never solved in any space
        ################################################################################################################
        # Every cluster with empty solution will be solved in the reduced space now,
        # except cid which will be solved directly in the original space
        all_cids = self.search.n_tuples_per_cid
        empty_cids = set(c for c in all_cids
                         if c != cid and c not in solved_reduced_cids
                         and c not in solved_original_cids)
        empty_cids_representatives = self.get_reduced_space_representatives_from_clusters(
            empty_cids)
        debug("empty: {}".format(empty_cids))

        if len(cid_orig_space_tuples) + len(empty_cids_representatives) == 0:
            print "empty cids:", empty_cids
            print partial_p
            print cid
            print cid_orig_space_tuples
            print empty_cids_representatives

            raise Exception("Should not happen.")

        ################################################################################################################
        # BASIS SOLUTION
        ################################################################################################################
        # Every remaining cluster that has been solved in either the reduced or original space will be used as basis
        # solution, i.e., their aggregates will be used as constants to modify each constraint bound
        basis_cids = [c for c in all_cids if c != cid and c not in empty_cids]

        # Pre-compute all aggregates of the basis solution (sums among all cids except current solving cid)
        count_basis = 0
        sums_basis = {attr: 0 for attr in self.search.query_attrs}
        for c in basis_cids:
            count_basis += partial_p.get_count(c)
            for attr in self.search.query_attrs:
                sums_basis[attr] += partial_p.get_sum(c, attr)

        # Augment using CPLEX
        cplex_interface = CplexInterface(self.search,
                                         self.search.store_lp_problems_dir)
        cid_feasible, cid_results = cplex_interface._cplex_augment(
            cid, cid_orig_space_tuples, cid_basis_sol,
            empty_cids_representatives, count_basis, sums_basis)

        if cid_feasible == "REDO":
            return "REDO"

        # If CPLEX problem was feasible, great! Return the new augmented partial solution
        if cid_feasible:
            augmented_partial_p = self.augment_partial_solution_with_solution(
                cid_feasible, partial_p, cid, cid_results,
                cid_orig_space_tuples, empty_cids, empty_cids_representatives)

            del cid_orig_space_tuples[:]
            del empty_cids_representatives[:]

            return augmented_partial_p

        else:
            if cid_results is None:
                # CPLEX problem was infeasible and has not been relaxed.
                augmented_partial_p = copy(partial_p)
                augmented_partial_p.set_infeasible(cid,
                                                   infeasible_constraints=None)
                return augmented_partial_p

            else:
                # CPLEX problem was infeasible but it has been relaxed and solved.
                # The solution obtained is infeasible for the original problem.
                augmented_partial_p = self.augment_partial_solution_with_solution(
                    cid_feasible, partial_p, cid, cid_results,
                    cid_orig_space_tuples, empty_cids,
                    empty_cids_representatives)

                del cid_orig_space_tuples[:]
                del empty_cids_representatives[:]

                return augmented_partial_p
示例#12
0
def ast_prove_f1_equi_f2(f1, f2, ptr_size1, ptr_size2, cmp_limit=720, equal_var=True):
    log.debug('ast_prove_f1_equi_f2')
    f1_in1, f2_in2 = check_and_return_formula_pair_with_inputs(f1, f2, ptr_size1, ptr_size2)
    if f1_in1 is None or f2_in2 is None:
        log.debug('inputs variables do not match')
        return False
    f1, input1 = f1_in1
    f2, input2 = f2_in2
    ne_f1_f2 = ne_formulas(f1, f2)
    if ne_f1_f2 is None:
        log.debug("fail2 to create not_equal formula")
        log.debug(f1)
        log.debug(f2)
        return False
    log.debug(f"To prove {str(ne_f1_f2)} UNSAT")

    solver = claripy.Solver(backend=_MyBackendZ3())
    solver.add(ne_f1_f2)
    count = 0
    for in2 in permutations(input2):
        count += 1
        if count > cmp_limit:
            raise TooManyVariables4Comparison(f1, f2, cmp_limit)
        try:
            ec1 = get_var_constraints(input1, in2)
            if ec1 is None:
                continue
            ec1 = claripy.And(*ec1)
            if not solver.satisfiable(extra_constraints=(ec1,)):
                return True
        except Exception as e:
            log.warning('Meet Z3 solver error %s' % str(e))
    return False
示例#13
0
def prove_equal(merged_f1, merged_f2, ptr_size1, ptr_size2, c1=None, c2=None, cmp_limit=120, equal_var=True):
    """
    prove f1 == f2, using SMT solver Z3
    :param f1:
    :param f2:
    :param c1: extra-constraints of f1
    :param c2: extra-constraints of f2
    :return:
    """
    log.debug('prove_equal')
    f1, orig_f1 = merged_f1
    f2, orig_f2 = merged_f2
    if c1 is not None and c2 is not None:
        if len(c1) > 0 and len(c2) > 0:
            solver = claripy.Solver(backend=_MyBackendZ3())
            # our normalization may make the constraints become UNSAT, ignore it when it happens
            try:
                if not solver.satisfiable(c1) or not solver.satisfiable(c2):
                    f1 = orig_f1
                    f2 = orig_f2
                    c1 = None
                    c2 = None
            except Exception:
                f1 = orig_f1
                f2 = orig_f2
                c1 = None
                c2 = None

    # ne_f1_f2 = ne_formulas(f1, f2)
    # if ne_f1_f2 is None:
    #     log.debug("fail1 to create not_equal formula")
    #     log.debug(f1)
    #     log.debug(f2)
    #     return False
    f1_in1, f2_in2 = check_and_return_formula_pair_with_inputs(f1, f2, ptr_size1, ptr_size2)
    if f1_in1 is None or f2_in2 is None:
        log.debug('inputs variables do not match')
        return False
    f1, input1 = f1_in1
    f2, input2 = f2_in2
    ne_f1_f2 = ne_formulas(f1, f2)
    if ne_f1_f2 is None:
        log.debug("fail to create not_equal formula")
        log.debug(f1)
        log.debug(f2)
        return False
    log.debug(f"To prove {str(ne_f1_f2)} UNSAT")

    count = 0
    min_num_var = min(len(input1), len(input2))
    if min_num_var == 0:
        # this means no extra-constraints should be added, input1 or input2 is a constant
        solver = claripy.Solver(backend=_MyBackendZ3())
        solver.add(ne_f1_f2)
        return not solver.satisfiable()

    if factorial(min_num_var) > cmp_limit:
        raise TooManyVariables4Comparison(f1, f2, cmp_limit)

    for in1 in permutations(input1, min_num_var):
        constraints = []
        count += 1
        if count > cmp_limit:
            raise TooManyVariables4Comparison(f1, f2, cmp_limit)
        try:
            constraints = get_var_constraints(in1, input2[:min_num_var])
            if constraints is None:
                continue
            cmp_expr = ne_f1_f2
            # print(str(constraints))
            # print(str(cmp_expr))

            # if we have extra constraints, we need to ensure that constraints can be satisfied first
            need_pre_condition_sat_check = False
            if c1 is not None:
                need_pre_condition_sat_check = True
                constraints.extend(c1)
            if c2 is not None:
                need_pre_condition_sat_check = True
                constraints.extend(c2)

            if need_pre_condition_sat_check:
                solver = claripy.Solver(backend=_MyBackendZ3())
                # check the constraints first
                solver.add(constraints)
                if not solver.satisfiable():
                    continue

            solver = claripy.Solver(backend=_MyBackendZ3())
            solver.add(cmp_expr)
            if not solver.satisfiable(extra_constraints=constraints):
                return True
        except Exception as e:
            # print('f1', f1)
            # print('f2', f2)
            # print('input1', input1)
            # print('input2', input2)
            # constraints = get_var_constraints(in1, input2[:min_num_var])
            # print('constraints')
            # for c in constraints:
            #     print(c)
            log.warning('Meet Z3 solver error %s' % str(e))
    return False