Exemple #1
0
def load_graph(request, pagename, urladd, load_origin=True):
    if not request.user.may.read(pagename):
        return None

    def add_adata_link(adata, edge, type):
        # Add edge if it does not already exist
        e = adata.edges.get(*edge)
        if not e:
            e = adata.edges.add(*edge)
            e.linktype = set([type])
        else:
            e.linktype.add(type)
        return adata

    cat_re = category_regex(request)
    temp_re = template_regex(request)

    page = request.graphdata.getpage(pagename)
    if not page:
        return None

    # Make graph, initialise head node
    adata = Graph()
    if load_origin:
        adata = _add_node(request, pagename, adata, urladd, 'page')
    else:
        adata.nodes.add(pagename)

    # Add links to page
    links = request.graphdata.get_in(pagename)
    for linktype in links:
        for src in links[linktype]:
            # Filter Category, Template pages
            if cat_re.search(src) or temp_re.search(src):
                continue
            # Add page and its metadata
            # Currently pages can have links in them only
            # from local pages, thus nodetype == page
            adata = _add_node(request, src, adata, urladd, 'page')
            adata = add_adata_link(adata, (src, pagename), linktype)
    # Add links from page
    links = request.graphdata.get_out(pagename)
    for linktype in links:
        #print "add_node", pagename, dst
        for i, dst in enumerate(links[linktype]):
            # Filter Category, Template pages
            if cat_re.search(dst) or temp_re.search(dst):
                continue

            # Fix links to everything but pages
            label = ''
            gwikiurl = ''
            tooltip = ''
            nodetype = node_type(request, dst)

            if nodetype == 'attachment':
                # get the absolute name ie both page and filename
                gwikiurl = absolute_attach_name(pagename, dst)
                att_parts = gwikiurl.split(':')[1].split('/')
                att_page = '/'.join(att_parts[:-1])
                att_file = att_parts[-1]

                if pagename == att_page:
                    label = "Attachment: %s" % (att_file)
                else:
                    label = "Attachment: %s/%s" % (att_page, att_file)

                _, exists = attachment_file(request, 
                                            att_page, att_file)

                # For existing attachments, have link to view it
                if exists:
                    gwikiurl = "./%s?action=AttachFile&do=get&target=%s" % \
                        (att_page, att_file)
                    tooltip = request.getText('View attachment')
                # For non-existing, have a link to upload it
                else:
                    gwikiurl = "./%s?action=AttachFile&target=%s" % \
                        (att_page, att_file)
                    tooltip = request.getText('Add attachment')

            elif nodetype == 'interwiki':
                # Get effective URL:s to interwiki URL:s
                iwname = dst.split(':')
                iw_list = wikiutil.load_wikimap(request)
                
                gwikiurl = iw_list[iwname[0]] + iwname[1]
                tooltip = iwname[1] + ' ' + \
                    request.getText('page on') + \
                    ' ' + iwname[0] + ' ' + \
                    request.getText('wiki')

            elif nodetype == 'url':
                # URL:s have the url already, keep it
                gwikiurl = dst
                tooltip = dst

            elif nodetype == 'none':
                # Was not a valid link after all, eg. an
                # interwiki-style link, but the wiki name was not in
                # intermap.
                continue

            # Add page and its metadata
            adata = _add_node(request, dst, adata, urladd, nodetype)
            adata = add_adata_link(adata, (pagename, dst), linktype)
            if label or gwikiurl or tooltip:
                node = adata.nodes.get(dst)

            # Add labels, gwikiurls and tooltips
            if label:
                node.gwikilabel = label
            if gwikiurl:
                node.gwikiURL = gwikiurl
            if tooltip:
                node.gwikitooltip = tooltip

    return adata
    def _changed_meta(self, request, pagename, old_outs, new_data):
        add_out = dict()
        del_out = dict()

        add_in = dict()
        del_in = dict()

        for page in new_data:
            add_in.setdefault(page, list())
            del_in.setdefault(page, list())

        # Code for making out which edges have changed.
        # We only want to save changes, not all the data,
        # as edges have a larger time footprint while saving.

        add_out.setdefault(pagename, list())
        del_out.setdefault(pagename, list())

        old_keys = set(old_outs.keys())
        new_keys = set(new_data.get(pagename, {}).get(u'out', {}).keys())
        changed_keys = old_keys.intersection(new_keys)

        # Changed edges == keys whose values have experienced changes
        for key in changed_keys:
            new_edges = len(new_data[pagename][u'out'][key])
            old_edges = len(old_outs[key])

            for i in range(max(new_edges, old_edges)):

                # old data had more links, delete old
                if new_edges <= i:
                    val = old_outs[key][i]

                    del_out[pagename].append((key, val))

                    # Only local pages will have edges and metadata
                    if node_type(request, val) == 'page':
                        del_in.setdefault(val, list()).append((key, pagename))

                # new data has more links, add new
                elif old_edges <= i:
                    val = new_data[pagename][u'out'][key][i]

                    add_out[pagename].append((key, val))

                    # Only save in-links to local pages, not eg. url or interwiki
                    if node_type(request, val) == 'page':
                        add_in.setdefault(val, list()).append((key, pagename))

                # check if the link i has changed
                else:
                    val = old_outs[key][i]
                    new_val = new_data[pagename][u'out'][key][i]

                    if val == new_val:
                        continue

                    # link changed, replace old link with new
                    # add and del out-links
                    add_out[pagename].append((key, new_val))

                    del_out[pagename].append((key, val))

                    # Only save in-links to local pages, not eg. url or interwiki
                    if node_type(request, new_val) == 'page':
                        add_in.setdefault(new_val, list()).append((key, pagename))
                    # Only save in-links to local pages, not eg. url or interwiki
                    if node_type(request, val) == 'page':
                        del_in.setdefault(val, list()).append((key, pagename))

        # Added edges of a new linktype
        for key in new_keys.difference(old_keys):
            for i, val in enumerate(new_data[pagename][u'out'][key]):

                add_out[pagename].append((key, val))

                # Only save in-links to local pages, not eg. url or interwiki
                if node_type(request, val) == 'page':
                    add_in.setdefault(val, list()).append((key, pagename))

        # Deleted edges
        for key in old_keys.difference(new_keys):
            for val in old_outs[key]:

                del_out[pagename].append((key, val))

                # Only local pages will have edges and metadata
                if node_type(request, val) == 'page':
                    del_in.setdefault(val, list()).append((key, pagename))

        # Adding and removing in-links are the most expensive operation in a
        # shelve, so we'll try to minimise them. Eg. if page TestPage is
        #  a:: ["b"]\n a:: ["a"] 
        # and it is resaved as
        #  a:: ["a"]\n a:: ["b"]
        # the ordering of out-links in TestPage changes, but we do not have
        # to touch the in-links in pages a and b. This is possible because
        # in-links do not have any sensible order.
        for page in new_data:
            #print repr(page), add_in[page], del_in[page]

            changes = set(add_in[page] + del_in[page])

            #print changes

            for key, val in changes:
                #print 'change', repr(key), repr(val)

                add_count = add_in[page].count((key, val))
                del_count = del_in[page].count((key, val))

                if not add_count or not del_count:
                    #print "No changes"
                    #print
                    continue

                change_count = add_count - del_count

                # If in-links added and deleted as many times, 
                # there are effectively no changes to be saved
                if change_count == 0:
                    for x in range(add_count):
                        add_in[page].remove((key, val))
                        del_in[page].remove((key, val))
                        #print "No changes"

                elif change_count < 0:
                    for x in range(abs(change_count)):
                        del_in[page].remove((key, val))
                        #print "No need to delete %s from %s" % (val, page)

                else:
                    for x in range(abs(change_count)):
                        #print "No need to add %s to %s" % (val, page)
                        add_in[page].remove((key, val))

                #print

        #print

        return add_out, del_out, add_in, del_in
Exemple #3
0
    def _changed_meta(self, request, pagename, old_outs, new_data):
        add_out = dict()
        del_out = dict()

        add_in = dict()
        del_in = dict()

        for page in new_data:
            add_in.setdefault(page, list())
            del_in.setdefault(page, list())

        # Code for making out which edges have changed.
        # We only want to save changes, not all the data,
        # as edges have a larger time footprint while saving.

        add_out.setdefault(pagename, list())
        del_out.setdefault(pagename, list())

        old_keys = set(old_outs.keys())
        new_keys = set(new_data.get(pagename, {}).get(u'out', {}).keys())
        changed_keys = old_keys.intersection(new_keys)

        # Changed edges == keys whose values have experienced changes
        for key in changed_keys:
            new_edges = len(new_data[pagename][u'out'][key])
            old_edges = len(old_outs[key])

            for i in range(max(new_edges, old_edges)):

                # old data had more links, delete old
                if new_edges <= i:
                    val = old_outs[key][i]

                    del_out[pagename].append((key, val))

                    # Only local pages will have edges and metadata
                    if node_type(request, val) == 'page':
                        del_in.setdefault(val, list()).append((key, pagename))

                # new data has more links, add new
                elif old_edges <= i:
                    val = new_data[pagename][u'out'][key][i]

                    add_out[pagename].append((key, val))

                    # Only save in-links to local pages, not eg. url or interwiki
                    if node_type(request, val) == 'page':
                        add_in.setdefault(val, list()).append((key, pagename))

                # check if the link i has changed
                else:
                    val = old_outs[key][i]
                    new_val = new_data[pagename][u'out'][key][i]

                    if val == new_val:
                        continue

                    # link changed, replace old link with new
                    # add and del out-links
                    add_out[pagename].append((key, new_val))

                    del_out[pagename].append((key, val))

                    # Only save in-links to local pages, not eg. url or interwiki
                    if node_type(request, new_val) == 'page':
                        add_in.setdefault(new_val, list()).append(
                            (key, pagename))
                    # Only save in-links to local pages, not eg. url or interwiki
                    if node_type(request, val) == 'page':
                        del_in.setdefault(val, list()).append((key, pagename))

        # Added edges of a new linktype
        for key in new_keys.difference(old_keys):
            for i, val in enumerate(new_data[pagename][u'out'][key]):

                add_out[pagename].append((key, val))

                # Only save in-links to local pages, not eg. url or interwiki
                if node_type(request, val) == 'page':
                    add_in.setdefault(val, list()).append((key, pagename))

        # Deleted edges
        for key in old_keys.difference(new_keys):
            for val in old_outs[key]:

                del_out[pagename].append((key, val))

                # Only local pages will have edges and metadata
                if node_type(request, val) == 'page':
                    del_in.setdefault(val, list()).append((key, pagename))

        # Adding and removing in-links are the most expensive operation in a
        # shelve, so we'll try to minimise them. Eg. if page TestPage is
        #  a:: ["b"]\n a:: ["a"]
        # and it is resaved as
        #  a:: ["a"]\n a:: ["b"]
        # the ordering of out-links in TestPage changes, but we do not have
        # to touch the in-links in pages a and b. This is possible because
        # in-links do not have any sensible order.
        for page in new_data:
            #print repr(page), add_in[page], del_in[page]

            changes = set(add_in[page] + del_in[page])

            #print changes

            for key, val in changes:
                #print 'change', repr(key), repr(val)

                add_count = add_in[page].count((key, val))
                del_count = del_in[page].count((key, val))

                if not add_count or not del_count:
                    #print "No changes"
                    #print
                    continue

                change_count = add_count - del_count

                # If in-links added and deleted as many times,
                # there are effectively no changes to be saved
                if change_count == 0:
                    for x in range(add_count):
                        add_in[page].remove((key, val))
                        del_in[page].remove((key, val))
                        #print "No changes"

                elif change_count < 0:
                    for x in range(abs(change_count)):
                        del_in[page].remove((key, val))
                        #print "No need to delete %s from %s" % (val, page)

                else:
                    for x in range(abs(change_count)):
                        #print "No need to add %s to %s" % (val, page)
                        add_in[page].remove((key, val))

                #print

        #print

        return add_out, del_out, add_in, del_in