Exemple #1
0
    def __init__(self, pages, edges: Edge, vertices: List[int], constraints):
        """
        Initializes the model with the given params. Also does some basic validation.

        :param pages: the pages
        :param edges: the edges
        :param vertices: the vertices
        :param constraints: the constraints
        """

        self.result = {}
        self.pages = pages
        self.edges = edges
        self.vertices = vertices
        self.constraints = constraints
        self.clauses = []

        # Check for dublicates in ids.
        if len(get_duplicates(vertices)) > 0:
            abort(
                400,
                "Vertex ids have to be unique. The id(s) {} occurred multiple times"
                .format(get_duplicates(vertices)))

        if len(get_duplicates([e.id for e in edges])) > 0:
            abort(
                400,
                "Edge ids have to be unique. The id(s) {} occurred multiple times"
                .format(get_duplicates([e.id for e in edges])))

        if len(get_duplicates([p['id'] for p in pages])) > 0:
            abort(
                400,
                "Page ids have to be unique. The id(s) {} occurred multiple times"
                .format(get_duplicates([p['id'] for p in pages])))

        n = len(vertices)
        self._node_idxs = list(range(n))
        self._node_idx_to_id = {i: n_id for i, n_id in enumerate(vertices)}
        self._node_id_to_idx = {n_id: i for i, n_id in enumerate(vertices)}

        page_number = len(pages)
        self._page_idxs = list(range(page_number))
        self._page_idx_to_id = {i: p['id'] for i, p in enumerate(pages)}
        self._page_id_to_idx = {p['id']: i for i, p in enumerate(pages)}

        m = len(edges)
        self._edge_idxs = list(range(m))
        self._edge_idx_to_id = {i: e.id for i, e in enumerate(edges)}
        self._edge_id_to_idx = {e.id: i for i, e in enumerate(edges)}

        # enumerates all constraints from one on. zero is excluded because its delimiter meaning in dimacs format
        self.max_var = 0

        # self._precedes[i,j] <=> vertex i precedes vertex j
        self._precedes = self._create_variables(n * n).reshape((n, n))

        # self._edges_to_pages[e,p] <=> edge e is assigned to page p
        self._edge_to_page = self._create_variables(page_number * m).reshape(
            (page_number, m))
Exemple #2
0
            def post(self):
                """
                Create a new embedding
                """
                entity = request.get_json()

                # looks weird but is the only reliable way to find out if a string value is a true boolean ;-)
                # see https://stackoverflow.com/questions/715417/converting-from-a-string-to-boolean-in-python
                handle_async = request.args.get('async', "",
                                                type=str).lower() in yes_list
                try:
                    entity['created'] = datetime.datetime.now(
                        datetime.timezone.utc).isoformat()

                    b64_graph_str = entity.get('graph')
                    try:
                        graph_str = base64.b64decode(b64_graph_str)
                        node_ids, edges = get_nodes_and_edges_from_graph(
                            graph_str)
                        # node_ids ==> List(str)
                        # edges ==> List(Edge)

                    except Exception as e:
                        app.logger.exception(e)
                        raise BadRequest(
                            "The graph string has to be a base64 encoded graphml string! "
                            "The exact error was: " + str(e))

                    len_nodes = len(node_ids)  # Number of nodes
                    len_edges = len(edges)  # Number of edges

                    if len_edges > 1900 or len_nodes > 600:
                        raise BadRequest(
                            "For fairness reasons this API will only handle graphs with less than 300 vertices and 900 "
                            "edges. Your graph has {} vertices and {} edges which exceed the limit."
                            "".format(len_nodes, len_edges))

                    # Check if self loops are present! We do not support self loops
                    for e in edges:
                        if e.source == e.target:
                            raise BadRequest(
                                "The Implementation only supports graphs where "
                                "every edge has two distinct start and end nodes"
                            )

                    # ignore double edges
                    # # validate for no double edges
                    # all_edge_endpoints = [{e.source, e.target} for e in edges]
                    # duplicate_edges = get_duplicates(all_edge_endpoints)
                    # if len(duplicate_edges) > 0:
                    #     abort(400,
                    #           "Multiedges are not allowed. "
                    #           "The following edges were recognized as duplicate {}".format(duplicate_edges))

                    # validate for unique edge ids
                    duplicate_edge_ids = get_duplicates([e.id for e in edges])
                    if len(duplicate_edge_ids) > 0:
                        abort(
                            400, "Edge ids have to be unique"
                            "The following ids were recognized as duplicate {}"
                            .format(duplicate_edge_ids))

                    # validate page id uniqueness
                    page_ids = [p['id'] for p in entity.get('pages')]
                    duplicate_page_ids = get_duplicates(page_ids)
                    if len(duplicate_page_ids) > 0:
                        abort(
                            400, "Duplicated page ids are not allowed. "
                            "The following id were recognized as duplicate {}".
                            format(duplicate_page_ids))

                    entity['status'] = 'IN_PROGRESS'
                    entity = data_store.insert_new_element(
                        entity)  # entity id is returned here

                    # validate graph not empty
                    if len(page_ids) == 0 or len_edges == 0 or len_nodes == 0:
                        abort(
                            400,
                            "Please submit a graph with at least one node, edge and page"
                        )

                    if handle_async:
                        # abort(501, "Async handling is not enabled.")
                        future_result: ProcessFuture = pool.schedule(
                            SolverInterface.solve,
                            (node_ids, edges, entity.get('pages'),
                             entity.get('constraints'), entity['id']))
                        future_result.add_done_callback(
                            processing_finished_callback)

                        future_result.done()
                        # remove old futures
                        remove_old_jobs()
                        jobs.append(QueueItem(entity.get('id'), future_result))

                    else:
                        try:
                            entity = handle_solver_result(
                                SolverInterface.solve(
                                    node_ids, edges, entity.get('pages'),
                                    entity.get('constraints'), entity['id']))
                        except Exception as e1:
                            error_callback(e1)
                            entity = data_store.get_by_id(entity['id'])

                    return jsonify(entity)
                except HTTPException as e:
                    raise e
                except Exception as e:
                    raise InternalServerError(
                        "The error {} \noccured from this body \n{}".format(
                            str(e), request.get_data(as_text=True))) from e
Exemple #3
0
    def _add_additional_page_constraint(self, edge_to_page: ndarray,
                                        edges: ndarray, constraint: str,
                                        p: int):
        """
        This method generates the clauses to encode additional page constraints like dispensable or tree.

        :param edge_to_page: all edge to page variables
        :param edges: all edges
        :param constraint: the constraint for this page
        :param p: the index of the current page
        :return: the generated clauses
        """

        clauses = []
        if constraint == 'NONE' or constraint is None:
            pass
        elif constraint == 'DISPERSIBLE':
            for i in range(edges.shape[0]):
                e1 = edges[i]
                e1_idx = e1[0]
                e1n1 = e1[1]
                e1n2 = e1[2]
                e1_page_var = edge_to_page[p, e1_idx]
                for j in range(i):
                    e2 = edges[j]
                    if e1[0] == e2[0]:
                        continue
                    e2_idx = e2[0]
                    e2_page_var = edge_to_page[p, e2_idx]
                    e2n1 = e2[1]
                    e2n2 = e2[2]

                    duplicates = get_duplicates([e1n1, e1n2, e2n1, e2n2])
                    len_duplicates = len(duplicates)
                    if len_duplicates == 1:
                        clauses.append([-e1_page_var, -e2_page_var])
                    if len_duplicates > 1:
                        continue
                    else:
                        continue
        elif constraint == 'FOREST':
            node_len = len(self.vertices)
            parents = self._create_variables(node_len**2).reshape(
                (node_len, node_len))
            ancestors = self._create_variables(node_len**2).reshape(
                (node_len, node_len))
            self._add_forrest_constraints(ancestors, edge_to_page, clauses,
                                          edges, p, parents)

        elif constraint == 'TREE':
            node_len = len(self.vertices)
            parents = self._create_variables(node_len**2).reshape(
                (node_len, node_len))
            ancestors = self._create_variables(node_len**2).reshape(
                (node_len, node_len))
            is_root = self._create_variables(node_len).reshape((node_len, ))
            self._add_forrest_constraints(ancestors, edge_to_page, clauses,
                                          edges, p, parents)

            for i in range(parents.shape[0]):
                parents_of_i: List[int] = list(parents[:, i])
                parents_of_i.remove(parents[i, i])
                # if there is a parent to i, it is not root
                for parent in parents_of_i:
                    clauses.append([-parent, -is_root[i]])

                # no_parents and at least one child implies is_root
                parents_of_i.append(is_root[i])
                for child in list(parents[i, :]):
                    tmp = parents_of_i.copy()
                    tmp.append(-child)
                    clauses.append(tmp)

            # single root
            for i in range(is_root.shape[0]):
                for j in range(i):
                    clauses.append([-is_root[i], -is_root[j]])

        else:
            abort(
                501, "The page constraint {} is not implemented yet".format(
                    constraint))
        return clauses
Exemple #4
0
def static_encode_queue_page(precedes: ndarray, edge_to_page: ndarray,
                             edges: ndarray, p: int) -> List[List[int]]:
    """
    Encodes the page type queue

    :param precedes: precedes[i, j] <=> vertex i precedes vertex j
    :param edge_to_page: edge_to_page[p, e] <=> edge e is assigned to page p
    :param edges: all edges
    :param p: the index of the current page
    """
    clauses = []
    for e in range(edges.shape[0]):
        e1 = edges[e][0]
        e1v1 = edges[e][1]
        e1v2 = edges[e][2]
        for f in range(e):
            e2 = edges[f][0]
            if e1 == e2:
                continue
            e2v1 = edges[f][1]
            e2v2 = edges[f][2]

            duplicates = get_duplicates([e1v1, e1v2, e2v1, e2v2])

            if len(duplicates) > 1:
                # ignore double edges
                continue
                # abort(400,
                #       "Got more than one shared nodes. Multi edges are not allowed. "
                #       "The duplicated nodes where {}".format(duplicates))
            # if the edges share one vertex
            elif len(duplicates) == 1:
                # adjacent edges do not need handling
                continue
            else:

                # forbid enclosing patterns
                forbidden_patterns = np.array([
                    # e1 encloses e2
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e1v1, e2v1, e2v2,
                                                e1v2),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e1v1, e2v2, e2v1,
                                                e1v2),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e1v2, e2v1, e2v2,
                                                e1v1),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e1v2, e2v2, e2v1,
                                                e1v1),

                    # e2 encloses e1
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e2v1, e1v1, e1v2,
                                                e2v2),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e2v1, e1v2, e1v1,
                                                e2v2),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e2v2, e1v1, e1v2,
                                                e2v1),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e2v2, e1v2, e1v1,
                                                e2v1),
                ])
                clauses.extend((forbidden_patterns * -1).tolist())
    return clauses
Exemple #5
0
def static_encode_stack_page(precedes: ndarray, edge_to_page: ndarray,
                             edges: ndarray, p: int) -> List[List[int]]:
    """
    Encodes a stack page

    :param precedes: precedes[i, j] <=> vertex i precedes vertex j
    :param edge_to_page: edge_to_page[p, e] <=> edge e is assigned to page p
    :param edges: all edges
    :param p: the index of the current page
            """
    clauses = []
    for e in range(edges.shape[0]):
        e1 = edges[e][0]
        e1v1 = edges[e][1]
        e1v2 = edges[e][2]
        for f in range(e):
            e2 = edges[f][0]
            if e1 == e2:
                continue
            e2v1 = edges[f][1]
            e2v2 = edges[f][2]

            duplicates = get_duplicates([e1v1, e1v2, e2v1, e2v2])

            if len(duplicates) > 1:
                # ignore double edges
                continue
                # abort(400,
                #       "Got more than one shared nodes. Multi edges are not allowed. "
                #       "The duplicated nodes where {}".format(duplicates))
            # if the edges share one vertex
            elif len(duplicates) == 1:
                # adjacent edges do not need handling
                continue
            else:
                # forbid alternating patterns
                forbidden_patterns = np.array([
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e1v1, e2v1, e1v2,
                                                e2v2),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e1v1, e2v2, e1v2,
                                                e2v1),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e1v2, e2v1, e1v1,
                                                e2v2),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e1v2, e2v2, e1v1,
                                                e2v1),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e2v1, e1v1, e2v2,
                                                e1v2),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e2v1, e1v2, e2v2,
                                                e1v1),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e2v2, e1v1, e2v1,
                                                e1v2),
                    [edge_to_page[p, e1], edge_to_page[p, e2]] +
                    static_encode_partial_order(precedes, e2v2, e1v2, e2v1,
                                                e1v1),
                ])
                clauses.extend((forbidden_patterns * -1).tolist())

    # If the graph is not big, add additional constraint regarding k4s
    if precedes.shape[0] < 300:
        n = precedes.shape[0]
        m = edges.shape[0]
        matrix = [[0 for u in range(n)] for v in range(n)]
        map = [[0 for u in range(n)] for v in range(n)]
        # Create the adjacent matrix
        for e in range(m):
            s = edges[e][1]
            t = edges[e][2]
            matrix[s][t] = 1
            matrix[t][s] = 1
            map[s][t] = e
            map[t][s] = e

        for u in range(n):
            for v in range(u, n):
                for w in range(v, n):
                    for z in range(w, n):
                        if (matrix[u][v] == 1 and matrix[u][w] == 1
                                and matrix[u][z] == 1 and matrix[v][w] == 1
                                and matrix[v][z] == 1 and matrix[w][z] == 1):
                            clauses.append(
                                static_encode_not_all_in_page(
                                    edge_to_page,
                                    np.array([
                                        map[u][v], map[u][w], map[u][z],
                                        map[v][w], map[v][z], map[w][z]
                                    ]), p))
    return clauses