Ejemplo n.º 1
0
    def generate_context(self):
        """change the context"""

        # return the list of files to use
        files = self.get_files(self.path, exclude=['pages',])
        all_articles = []
        for f in files:
            
            try:
                content, metadata, source = read_file(f, settings=self.settings)
            except Exception, e:
                warning(u'Could not process %s\n%s' % (f, str(e)))
                continue

            # if no category is set, use the name of the path as a category
            if 'category' not in metadata.keys():

                if os.path.dirname(f) == self.path:
                    category = self.settings['DEFAULT_CATEGORY']
                else:
                    category = os.path.basename(os.path.dirname(f)).decode('utf-8')

                if category != '':
                    metadata['category'] = unicode(category)

            if 'date' not in metadata.keys()\
                and self.settings['FALLBACK_ON_FS_DATE']:
                    metadata['date'] = datetime.fromtimestamp(os.stat(f).st_ctime)

            article = Article(content, metadata, settings=self.settings,
                              filename=f, source=source)
            if not is_valid_content(article, f):
                continue

            add_to_url = u''
            if 'ARTICLE_PERMALINK_STRUCTURE' in self.settings:
                article_permalink_structure = self.settings['ARTICLE_PERMALINK_STRUCTURE']
                article_permalink_structure = article_permalink_structure.lstrip('/').replace('%(', "%%(")

                # try to substitute any python datetime directive
                add_to_url = article.date.strftime(article_permalink_structure)
                # try to substitute any article metadata in rest file
                add_to_url = add_to_url % article.__dict__
                add_to_url = [slugify(i) for i in add_to_url.split('/')]
                add_to_url = os.path.join(*add_to_url)

            article.url = urlparse.urljoin(add_to_url, article.url)
            article.save_as = urlparse.urljoin(add_to_url, article.save_as)
            article.source_url = urlparse.urljoin(add_to_url, article.source_url)

            if article.status == "published":
                if hasattr(article, 'tags'):
                    for tag in article.tags:
                        self.tags[tag].append(article)
                all_articles.append(article)
            elif article.status == "draft":
                self.drafts.append(article)