Пример #1
0
def build_pages_fileinfos(pages):
    '''
    Creates fileinfo entries for the template mappings associated with
    an iterable list of Page objects.
    '''

    n = 0
    for page in pages:
        n += 1
        template_mappings = page.template_mappings

        if template_mappings.count() == 0:
            raise TemplateMapping.DoesNotExist(
                'No template mappings found for this page.')

        tags = template_tags(page_id=page.id)

        for t in template_mappings:
            path_string = generate_date_mapping(page.publication_date.date(),
                                                tags, t.path_string)
            if path_string == '':
                continue
            master_path_string = path_string + "." + page.blog.base_extension
            add_page_fileinfo(page, t, master_path_string,
                              page.blog.url + "/" + master_path_string,
                              page.blog.path + '/' + master_path_string,
                              str(page.publication_date))

    return n
Пример #2
0
def build_archives_fileinfos(pages):
    '''
    Takes a page (maybe a collection of same) and produces fileinfos
    for the date-based archive entries for each
    '''

    counter = 0

    mapping_list = {}

    for page in pages:

        tags = template_tags(page_id=page.id)

        if page.archive_mappings.count() == 0:
            raise TemplateMapping.DoesNotExist(
                'No template mappings found for the archives for this page.')

        for m in page.archive_mappings:

            path_string = generate_date_mapping(page.publication_date, tags,
                                                m.path_string)
            if path_string == '':
                continue
            if path_string in mapping_list:
                continue

            # tag_context = generate_archive_context_from_page(m.archive_xref, page.blog, page)
            mapping_list[path_string] = ((
                None,
                m,
                path_string,
                page.blog.url + "/" + path_string,
                page.blog.path + '/' + path_string,
            ), (page))

    for n in mapping_list:
        counter += 1
        new_fileinfo = add_page_fileinfo(*mapping_list[n][0])
        archive_context = []
        m = mapping_list[n][0][1]

        for r in m.archive_xref:
            archive_context.append(archive_functions[r]["format"](
                archive_functions[r]["mapping"](mapping_list[n][1])))

        for t, r in zip(archive_context, m.archive_xref):
            new_fileinfo_context = FileInfoContext.get_or_create(
                fileinfo=new_fileinfo, object=r, ref=t)

        new_fileinfo.mapping_sort = archive_context
        new_fileinfo.save()

    # @return mapping_list
    return counter
Пример #3
0
def build_pages_fileinfos(pages, template_mappings=None):
    '''
    Creates fileinfo entries for the template mappings associated with
    an iterable list of Page objects.
    :param pages:
        List of page objects to build fileinfos for.
    '''

    fileinfos = []

    for n, page in enumerate(pages):

        if template_mappings is None:
            mappings = page.template_mappings
        else:
            mappings = template_mappings

        if mappings.count() == 0:
            raise TemplateMapping.DoesNotExist('No template mappings found for this page.')

        tags = template_tags(page=page)

        for t in mappings:

            # path_string = replace_mapping_tags(t.path_string)
            path_string = generate_date_mapping(
                page.publication_date_tz.date(), tags,
                replace_mapping_tags(t.path_string))

            # for tag archives, we need to return a list from the date mapping
            # in the event that we have a tag present that's an iterable like the tag list
            # e.g., for /%t/%Y, for a given page that has five tags
            # we return five values, one for each tag, along with the year

            if path_string == '' or path_string is None:
                continue

            # master_path_string = path_string

            fileinfos.append(
                add_page_fileinfo(page, t, path_string,
                    page.blog.url + "/" + path_string,
                    page.blog.path + '/' + path_string,
                    str(page.publication_date_tz))
                )

    return fileinfos
Пример #4
0
def build_archives_fileinfos(pages):
    '''
    Takes a page (maybe a collection of same) and produces fileinfos
    for the date-based archive entries for each
    :param pages:
        List of pages to produce fileinfos for date-based archive entries for.
    '''

    counter = 0
    mapping_list = {}

    try:

        for page in pages:
            tags = template_tags(page=page)
            if page.archive_mappings.count() == 0:
                raise TemplateMapping.DoesNotExist('No template mappings found for the archives for this page.')
            s = []
            for m in page.archive_mappings:
                q = replace_mapping_tags(m.path_string)
                s.append(q)
                paths_list = eval_paths(m.path_string, tags.__dict__)

                if type(paths_list) in (list,):
                    paths = []
                    for n in paths_list:
                        if n is None:
                            continue
                        p = page.proxy(n[0])
                        # FIXME: eliminate the need for page proxies passed manually
                        # at this stage of the process we should generate those
                        # page context in one column, whatever it is, and path strings in another
                        paths.append((p, n[1]))

                else:
                    paths = (
                        (page, paths_list)
                        ,)

                for page, path in paths:
                    path_string = generate_date_mapping(page.publication_date_tz,
                        tags, path, do_eval=False)

                    if path_string == '' or path_string is None:
                        continue
                    if path_string in mapping_list:
                        continue

                    mapping_list[path_string] = (
                        (None, m, path_string,
                        page.blog.url + "/" + path_string,
                        page.blog.path + '/' + path_string,)
                        ,
                        (page),
                        )
        # raise Exception(s)
        for counter, n in enumerate(mapping_list):
            # TODO: we should bail if there is already a fileinfo for this page?
            new_fileinfo = add_page_fileinfo(*mapping_list[n][0])
            FileInfoContext.delete().where(FileInfoContext.fileinfo == new_fileinfo).execute()
            archive_context = []
            m = mapping_list[n][0][1]

            for r in m.archive_xref:
                archive_context.append(
                    archive_functions[r]["format"](
                        archive_functions[r]["mapping"](mapping_list[n][1])
                        )
                    )

            for t, r in zip(archive_context, m.archive_xref):
                new_fileinfo_context = FileInfoContext.get_or_create(
                    fileinfo=new_fileinfo,
                    object=r,
                    ref=t
                    ).save()

            new_fileinfo.mapping_sort = '/'.join(archive_context)
            new_fileinfo.save()

        return counter + 1

    except Exception:
        return 0
Пример #5
0
def build_archives_fileinfos_by_mappings(template, pages=None, early_exit=False):

    # build list of mappings if not supplied
    # if the list has no dirty mappings, exit

    # also check to make sure we're not using a do-not-publish template

    # TODO: Maybe the smart thing to do is to check the
    # underlying archive type for the template first,
    # THEN create the mappings, so we don't have to do the awkward
    # stuff that we do with tag archives

    # counter = 0
    mapping_list = {}

    if pages is None:
        pages = template.blog.pages.published

    for page in pages:
        tags = template_tags(page=page)
        if page.archive_mappings.count() == 0:
            raise TemplateMapping.DoesNotExist('No template mappings found for the archives for this page.')
        for mapping in template.mappings:
            paths_list = eval_paths(mapping.path_string, tags.__dict__)

            if type(paths_list) in (list,):
                paths = []
                for n in paths_list:
                    if n is None:
                        continue
                    p = page.proxy(n[0])
                    paths.append((p, n[1]))
            else:
                paths = (
                    (page, paths_list)
                    ,)

            for page, path in paths:
                path_string = generate_date_mapping(page.publication_date_tz,
                    tags, path, do_eval=False)

                if path_string == '' or path_string is None:
                    continue
                if path_string in mapping_list:
                    continue

                mapping_list[path_string] = (
                    (None, mapping, path_string,
                    page.blog.url + "/" + path_string,
                    page.blog.path + '/' + path_string,)
                    ,
                    (page),
                    )

        if early_exit and len(mapping_list) > 0:
            # return mapping_list
            break

    fileinfo_list = []

    for n in mapping_list:
        # TODO: we should bail if there is already a fileinfo for this page?
        new_fileinfo = add_page_fileinfo(*mapping_list[n][0])
        FileInfoContext.delete().where(FileInfoContext.fileinfo == new_fileinfo).execute()
        archive_context = []
        m = mapping_list[n][0][1]

        for r in m.archive_xref:
            archive_context.append(
                archive_functions[r]["format"](
                    archive_functions[r]["mapping"](mapping_list[n][1])
                   )
                )

        for t, r in zip(archive_context, m.archive_xref):
            new_fileinfo_context = FileInfoContext.get_or_create(
                fileinfo=new_fileinfo,
                object=r,
                ref=t
                )

        new_fileinfo.mapping_sort = '/'.join(archive_context)
        new_fileinfo.save()
        fileinfo_list.append(new_fileinfo)

    return fileinfo_list