Exemplo n.º 1
0
class LocalFileBackend(StardateBackend):
    def __init__(self):
        self.name = u'localfile'
        self.parser = FileParser()
        self.social_auth = None

    def get_name(self):
        return self.name

    def set_social_auth(self, *args, **kwargs):
        return

    def serialize_posts(self, posts):
        """
        Returns dictionary of individual Post
        """
        posts_as_dicts = []
        serialized = serialize(
            'python',
            posts,
            fields=('title', 'slug', 'publish', 'stardate', 'body')
        )
        for post in serialized:
            posts_as_dicts.append(post['fields'])
        return posts_as_dicts

    def _get_post_path(self, folder, post):
        """
        Dynamically guess post file path from slug / blog folder
        """
        filename = post['slug']
        filename = '{0}.md'.format(filename)
        path = os.path.join(folder, filename)
        return path

    def _posts_from_file(self, file_path):
        """
        Return list of post dictionaries from file
        """
        if os.path.exists(file_path):
            with open(file_path, 'r') as f:
                content = f.read()
            posts = self.parser.unpack(content)
        else:
            posts = []
        return posts

    def _posts_from_dir(self, folder, posts=[]):
        """
        Get posts dicts from files in a directory
        """
        files = os.listdir(folder)
        remote_posts = []
        for filename in files:
            with open(filename, 'r') as f:
                remote_post = f.read()
            remote_post = self.parser.parse(remote_post)
            remote_posts.append(remote_post)

        return remote_posts

    def push(self, posts):
        """
        Render posts to files

        posts: List of Post object instances
        """
        # Grab the file or folder path associated
        # with a blog
        blog_path = posts[0].blog.backend_file

        # Separate blog path into directory and filename
        blog_dir, blog_file = os.path.split(blog_path)

        # pushing works differently depending on whether
        # We are using a single file or a directory of files
        if blog_file:
            responses = [self._push_blog_file(blog_path, posts)]

        else:
            responses = self._push_post_files(blog_dir, posts)

        return responses

    def _push_blog_file(self, file_path, posts):
        """
        Update posts in a single blog file
        """
        remote_posts = self._posts_from_file(file_path)

        # Use serialized version of posts to find
        # and update
        local_posts = self.serialize_posts(posts)

        # Update remote_posts with local versions
        ## FIXME: n^2 crawl, use stardate as keys
        ## in dicts instead of lists?
        for local_post in local_posts:
            exists = False
            for remote_post in remote_posts:
                if remote_post['stardate']:
                    if local_post['stardate'] == remote_post['stardate']:
                        exists = True
                        remote_post.update(local_post)
                # Post may exist on backend with uuid, but
                # also exist in local from last pull where
                # uuid was assigned. Use 'title' field as
                # backup
                else:
                    if local_post['title'] == remote_post['title']:
                        exists = True
                        remote_post.update(local_post)
            # Add new remote post if it does not exist yet
            if not exists:
                remote_posts.append(local_post)

        # Turn post list back into string
        content = self.parser.pack(remote_posts)
        with open(file_path, 'w') as f:
            f.write(content)
        return

    def _push_post_files(self, folder, posts):
        """
        Update posts in multiple files
        """
        local_posts = self.serialized_posts(posts)

        for local_post in local_posts:
            # Generate the post file path dynamically
            post_path = self._get_post_path(folder, local_post)

            # Get the existing remote post as a post dict
            if os.path.exists(post_path):
                with open(post_path, 'r') as f:
                    remote_post = f.read()
                    remote_post = self.parser.parse(remote_post)
            else:
                remote_post = {}

            # Update the contents of the remote post
            remote_post.update(local_post)
            content = self.parser.render(remote_post)
            with open(post_path, 'w') as f:
                f.write(content)
        return

    def _update_from_dict(self, blog, post_dict, post=None):
        """
        Create or update a Post from a dictionary
        """
        # If a post is not provided, try an fetch it
        if not post:
            if 'stardate' in post_dict:
                post = Post.objects.filter(
                    blog=blog,
                    stardate=post_dict['stardate']
                )
                if post:
                    post = post[0]
            if not post:
                post = Post(blog=blog)

        # Update from dict values
        for att, value in post_dict.items():
            setattr(post, att, value)
        post.save(push=False)
        return post

    def pull(self, blog):
        """
        Update local posts from remote source

        blog: Blog instance
        """
        blog_path = blog.backend_file
        blog_dir, blog_file = os.path.split(blog_path)

        # Extract remote posts from single file
        if blog_file:
            remote_posts = self._posts_from_file(blog_path)
        # Extract posts from multiple files
        else:
            remote_posts = self._posts_from_dir(blog_dir)

        updated_list = []
        for remote_post in remote_posts:
            updated = self._update_from_dict(blog, remote_post)
            updated_list.append(updated)

        return updated_list
Exemplo n.º 2
0
class FileParserTestCase(TestCase):
    def setUp(self):
        self.parser = FileParser()
        self.test_string = "publish: {0}\ntimezone: US/Eastern\ntitle: Tingling of the spine\n\n\nExtraordinary claims require extraordinary evidence!".format(TIMESTAMP)

    def tearDown(self):
        Blog.objects.all().delete()
        User.objects.all().delete()

    def test_pack(self):
        file_path = tempfile.mkstemp(suffix='.txt')[1]
        user = User.objects.create(username='******')
        blog = Blog.objects.create(
            backend_file=file_path,
            backend_class='stardate.backends.local_file.LocalFileBackend',
            name='test blog',
            user=user,
        )

        Post.objects.create(
            blog=blog,
            title='My first post',
            publish=datetime.datetime(2015, 1, 1, 6, 0),
            body='This is the first post.'
        )

        Post.objects.create(
            blog=blog,
            title='My second post',
            publish=datetime.datetime(2015, 1, 2, 6, 0),
            body='This is the second post.'
        )

        post_list = [post.serialized() for post in blog.posts.all()]

        packed = self.parser.pack(post_list)

        self.assertIsInstance(post_list, list)
        self.assertEqual(len(post_list), 2)
        try:
            self.assertIsInstance(packed, basestring)
        except NameError:
            self.assertIsInstance(packed, str)

        self.assertTrue(u'title: {0}'.format(post_list[0]['title']) in packed)
        self.assertTrue(u'title: {0}'.format(post_list[1]['title']) in packed)
        self.assertTrue(u'stardate: {0}'.format(post_list[0]['stardate']) in packed)
        self.assertTrue(u'stardate: {0}'.format(post_list[1]['stardate']) in packed)
        self.assertTrue(u'\n\n\n{0}'.format(post_list[0]['body']) in packed)
        self.assertTrue(u'\n\n\n{0}'.format(post_list[1]['body']) in packed)
        self.assertTrue(u'publish: {0}'.format(post_list[0]['publish']) in packed)
        self.assertTrue(u'publish: {0}'.format(post_list[1]['publish']) in packed)

    def test_parse_publish(self):
        timestamp = '01-01-2015 06:00AM+0000'
        expected = datetime.datetime(2015, 1, 1, 6, 0, tzinfo=timezone.utc)

        self.assertEqual(self.parser.parse_publish(timestamp), expected)
        self.assertEqual(self.parser.parse_publish(expected), expected)

        self.assertEqual(
            self.parser.parse_publish('2016-01-01 00:00:00 -0500'),
            datetime.datetime(2016, 1, 1, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(
            self.parser.parse_publish('2016-01-01'),
            datetime.datetime(2016, 1, 1, tzinfo=timezone.utc)
        )

        self.assertEqual(
            self.parser.parse_publish('2016-01-01 00:00:00'),
            datetime.datetime(2016, 1, 1, tzinfo=timezone.utc)
        )

        self.assertEqual(
            self.parser.parse_publish('2016-01-01 12AM', 'US/Eastern'),
            datetime.datetime(2016, 1, 1, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(
            self.parser.parse_publish(datetime.date(2016, 1, 1)),
            datetime.datetime(2016, 1, 1, tzinfo=timezone.utc)
        )

        self.assertEqual(
            self.parser.parse_publish('2016-01-01 12AM', 'EST'),
            datetime.datetime(2016, 1, 1, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(
            self.parser.parse_publish('2016-07-01 12AM', 'US/Eastern'),
            datetime.datetime(2016, 7, 1, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(
            self.parser.parse_publish(datetime.datetime(2016, 1, 1, 0, 0, tzinfo=timezone.utc), 'US/Eastern'),
            datetime.datetime(2016, 1, 1, tzinfo=tz.gettz('US/Eastern'))
        )

    def test_parse(self):
        parsed = self.parser.parse(self.test_string)

        self.assertEqual(parsed['title'], 'Tingling of the spine')
        expected = datetime.datetime(2012, 1, 2, tzinfo=tz.gettz('US/Eastern'))
        self.assertEqual(parsed['publish'], expected)
        self.assertEqual(parsed['body'], 'Extraordinary claims require extraordinary evidence!')
        self.assertEqual(parsed['timezone'], 'US/Eastern')

        # Check that extra_field is parsed
        string = u"title: My title\nextra_field: Something arbitrary\n\n\nThe body.\n"
        parsed = self.parser.parse(string)
        self.assertTrue('title' in parsed.keys())
        self.assertTrue('extra_field' in parsed.keys())

    def test_render(self):
        file_path = tempfile.mkstemp(suffix='.txt')[1]
        user = User.objects.create(username='******')
        blog = Blog.objects.create(
            backend_file=file_path,
            backend_class='stardate.backends.local_file.LocalFileBackend',
            name='test blog',
            user=user,
        )
        post = Post.objects.create(
            blog=blog,
            title='Test title',
            publish=datetime.datetime(2013, 6, 1),
            timezone='US/Eastern',
            body='The body.',
        )

        packed = self.parser.pack([post.serialized()])
        rendered = self.parser.render(post.serialized())

        self.assertTrue('publish: 2013-06-01 12:00 AM -0400' in rendered)

        parsed = self.parser.parse(rendered)

        self.assertEqual(parsed.get('title'), post.title)
        self.assertEqual(parsed.get('timezone'), post.timezone)
        self.assertEqual(parsed.get('body'), post.body.raw)

        self.assertEqual(rendered, packed)

    def test_unpack(self):
        content = self.test_string
        post_list = self.parser.unpack(content)

        #The file has one post to unpack
        self.assertEqual(len(post_list), 1)

        post = post_list[0]

        self.assertEqual(post.get('title'), 'Tingling of the spine')

        self.assertEqual(
            post.get('publish'),
            datetime.datetime(2012, 1, 2, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(post.get('body'), 'Extraordinary claims require extraordinary evidence!')

    @patch('stardate.parsers.logger')
    def test_bad_string(self, mock_logging):
        content = 'bad string\n\r'
        posts = self.parser.unpack(content)

        self.assertEqual(posts, [])
        mock_logging.warn.assert_called_once_with(
            'Not enough information found to parse string.')