Esempio n. 1
0
class FileParserTestCase(TestCase):
    def setUp(self):
        self.parser = FileParser()
        self.test_string = "publish: 2012-01-02 12:00 AM\ntitle: Tingling of the spine\n\n\nExtraordinary claims require extraordinary evidence!"

    def tearDown(self):
        Blog.objects.all().delete()
        User.objects.all().delete()
        UserSocialAuth.objects.all().delete()

    def test_pack(self):
        blog = create_blog()
        blog.backend.client_class = MockDropboxClient
        create_post(title="First post", blog=blog)
        create_post(title="Second post", blog=blog)

        post_list = blog.get_serialized_posts()
        # packed = self.parser.pack(post_list)

        self.assertIsInstance(post_list, list)
        self.assertEqual(len(post_list), 2)
        # self.assertIsInstance(packed, basestring)

    def test_parse(self):
        parsed = self.parser.parse(self.test_string)

        self.assertEqual(parsed['title'], 'Tingling of the spine')
        self.assertEqual(parsed['publish'], datetime.datetime(2012, 1, 2, 8, 0, tzinfo=timezone.utc))
        self.assertEqual(parsed['body'], 'Extraordinary claims require extraordinary evidence!')

    def test_unpack(self):
        content = self.test_string
        post_list = self.parser.unpack(content)

        #The file has one post to unpack
        self.assertEqual(len(post_list), 1)
        self.assertEqual(post_list[0].get('title'), 'Tingling of the spine')
        self.assertEqual(post_list[0].get('publish'), datetime.datetime(2012, 1, 2, 8, 0, tzinfo=timezone.utc))
        self.assertEqual(post_list[0].get('body'), 'Extraordinary claims require extraordinary evidence!')
Esempio n. 2
0
class LocalFileBackend(StardateBackend):
    def __init__(self, *args, **kwargs):
        super(LocalFileBackend, self).__init__(*args, **kwargs)
        self.name = u'localfile'
        self.parser = FileParser()

    def write_file(self, file_path, content):
        with open(file_path, 'w') as f:
            f.write(content)

    def get_file(self, path):
        if os.path.exists(path):
            with open(path, 'r') as f:
                content = f.read()
        else:
            content = None
        return content

    def get_post(self, path):
        if os.path.exists(path):
            content = self.get_file(path)
            post = self.parser.parse(content)
        else:
            post = {}
        return post

    def _list_path(self, path):
        paths = []
        for file in os.listdir(path):
            paths.append(os.path.join(path, file))
        return paths

    @property
    def last_sync(self):
        modified = time.ctime(os.path.getmtime(self.blog.backend_file))
        return make_aware(parse(modified), utc)
Esempio n. 3
0
 def __init__(self, *args, **kwargs):
     super(DropboxBackend, self).__init__(*args, **kwargs)
     self.client = self.get_dropbox_client()
     self.name = u'dropbox'
     self.parser = FileParser()
Esempio n. 4
0
class DropboxBackend(StardateBackend):
    def __init__(self, *args, **kwargs):
        super(DropboxBackend, self).__init__(*args, **kwargs)
        self.client = self.get_dropbox_client()
        self.name = u'dropbox'
        self.parser = FileParser()

    def get_file(self, path):
        return self.client.get_file(path).read()

    def write_file(self, file_path, content):
        return self.client.put_file(file_path, content, overwrite=True)

    def get_social_auth(self):
        return self.blog.user.social_auth.get(provider='dropbox')

    def get_post(self, path):
        try:
            content = self.get_file(path)
            post = self.parser.parse(content)
        except Exception:
            post = {}
        return post

    def get_access_token(self):
        extra_data = self.get_social_auth().extra_data
        try:
            if isinstance(extra_data, unicode):
                extra_data = json.loads(extra_data)
        except NameError:
            pass
        return extra_data.get('access_token')

    def get_cursor(self):
        extra_data = self.get_social_auth().extra_data
        try:
            if isinstance(extra_data, unicode):
                extra_data = json.loads(extra_data)
        except NameError:
            pass
        return extra_data.get('cursor')

    def get_dropbox_client(self):
        sess = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)
        token = self.get_access_token()
        sess.set_token(token['oauth_token'], token['oauth_token_secret'])
        return client.DropboxClient(sess)

    def delta(self):
        delta = self.client.delta(cursor=self.get_cursor())
        self.save_cursor(delta.get('cursor'))
        return delta

    def _list_path(self, path='/', hash=None):
        """
        List the contents of a path on the backend. Each path can be passed
        a `hash` argument that determines if anything new for the specified
        path is returned.

        """
        paths = cache.get('paths', [])
        meta_hash = cache.get('hash', None)

        try:
            meta = self.client.metadata(path, hash=meta_hash)
            cache.delete('paths')
            cache.set('hash', meta['hash'])
        except rest.ErrorResponse as err:
            if err.status == 304:
                return paths
            raise

        for content in meta.get('contents', []):
            paths.append(content['path'])
        cache.set('paths', paths)
        return paths

    def get_source_list(self):
        paths = cache.get('paths') or self._list_path()
        source_list = ((0, u'---'),)

        #  Instead of using the index, could use slugify
        try:
            for index, path in enumerate(paths):
                source_list += ((index + 1), path),
        except (AttributeError, TypeError):
            pass
        return source_list

    def save_cursor(self, cursor):
        social_auth = self.get_social_auth()
        extra_data = social_auth.extra_data
        try:
            if isinstance(extra_data, unicode):
                extra_data = json.loads(extra_data)
        except NameError:
            pass
        extra_data['cursor'] = cursor
        social_auth.extra_data = extra_data
        social_auth.save()

    @property
    def last_sync(self):
        return parse(self.client.metadata(self.blog.backend_file)['modified'])
Esempio n. 5
0
 def setUp(self):
     self.parser = FileParser()
     self.test_string = "publish: 2012-01-02 12:00 AM\ntitle: Tingling of the spine\n\n\nExtraordinary claims require extraordinary evidence!"
Esempio n. 6
0
 def __init__(self):
     self.name = u'localfile'
     self.parser = FileParser()
     self.social_auth = None
Esempio n. 7
0
class LocalFileBackend(StardateBackend):
    def __init__(self):
        self.name = u'localfile'
        self.parser = FileParser()
        self.social_auth = None

    def get_name(self):
        return self.name

    def set_social_auth(self, *args, **kwargs):
        return

    def serialize_posts(self, posts):
        """
        Returns dictionary of individual Post
        """
        posts_as_dicts = []
        serialized = serialize(
            'python',
            posts,
            fields=('title', 'slug', 'publish', 'stardate', 'body')
        )
        for post in serialized:
            posts_as_dicts.append(post['fields'])
        return posts_as_dicts

    def _get_post_path(self, folder, post):
        """
        Dynamically guess post file path from slug / blog folder
        """
        filename = post['slug']
        filename = '{0}.md'.format(filename)
        path = os.path.join(folder, filename)
        return path

    def _posts_from_file(self, file_path):
        """
        Return list of post dictionaries from file
        """
        if os.path.exists(file_path):
            with open(file_path, 'r') as f:
                content = f.read()
            posts = self.parser.unpack(content)
        else:
            posts = []
        return posts

    def _posts_from_dir(self, folder, posts=[]):
        """
        Get posts dicts from files in a directory
        """
        files = os.listdir(folder)
        remote_posts = []
        for filename in files:
            with open(filename, 'r') as f:
                remote_post = f.read()
            remote_post = self.parser.parse(remote_post)
            remote_posts.append(remote_post)

        return remote_posts

    def push(self, posts):
        """
        Render posts to files

        posts: List of Post object instances
        """
        # Grab the file or folder path associated
        # with a blog
        blog_path = posts[0].blog.backend_file

        # Separate blog path into directory and filename
        blog_dir, blog_file = os.path.split(blog_path)

        # pushing works differently depending on whether
        # We are using a single file or a directory of files
        if blog_file:
            responses = [self._push_blog_file(blog_path, posts)]

        else:
            responses = self._push_post_files(blog_dir, posts)

        return responses

    def _push_blog_file(self, file_path, posts):
        """
        Update posts in a single blog file
        """
        remote_posts = self._posts_from_file(file_path)

        # Use serialized version of posts to find
        # and update
        local_posts = self.serialize_posts(posts)

        # Update remote_posts with local versions
        ## FIXME: n^2 crawl, use stardate as keys
        ## in dicts instead of lists?
        for local_post in local_posts:
            exists = False
            for remote_post in remote_posts:
                if remote_post['stardate']:
                    if local_post['stardate'] == remote_post['stardate']:
                        exists = True
                        remote_post.update(local_post)
                # Post may exist on backend with uuid, but
                # also exist in local from last pull where
                # uuid was assigned. Use 'title' field as
                # backup
                else:
                    if local_post['title'] == remote_post['title']:
                        exists = True
                        remote_post.update(local_post)
            # Add new remote post if it does not exist yet
            if not exists:
                remote_posts.append(local_post)

        # Turn post list back into string
        content = self.parser.pack(remote_posts)
        with open(file_path, 'w') as f:
            f.write(content)
        return

    def _push_post_files(self, folder, posts):
        """
        Update posts in multiple files
        """
        local_posts = self.serialized_posts(posts)

        for local_post in local_posts:
            # Generate the post file path dynamically
            post_path = self._get_post_path(folder, local_post)

            # Get the existing remote post as a post dict
            if os.path.exists(post_path):
                with open(post_path, 'r') as f:
                    remote_post = f.read()
                    remote_post = self.parser.parse(remote_post)
            else:
                remote_post = {}

            # Update the contents of the remote post
            remote_post.update(local_post)
            content = self.parser.render(remote_post)
            with open(post_path, 'w') as f:
                f.write(content)
        return

    def _update_from_dict(self, blog, post_dict, post=None):
        """
        Create or update a Post from a dictionary
        """
        # If a post is not provided, try an fetch it
        if not post:
            if 'stardate' in post_dict:
                post = Post.objects.filter(
                    blog=blog,
                    stardate=post_dict['stardate']
                )
                if post:
                    post = post[0]
            if not post:
                post = Post(blog=blog)

        # Update from dict values
        for att, value in post_dict.items():
            setattr(post, att, value)
        post.save(push=False)
        return post

    def pull(self, blog):
        """
        Update local posts from remote source

        blog: Blog instance
        """
        blog_path = blog.backend_file
        blog_dir, blog_file = os.path.split(blog_path)

        # Extract remote posts from single file
        if blog_file:
            remote_posts = self._posts_from_file(blog_path)
        # Extract posts from multiple files
        else:
            remote_posts = self._posts_from_dir(blog_dir)

        updated_list = []
        for remote_post in remote_posts:
            updated = self._update_from_dict(blog, remote_post)
            updated_list.append(updated)

        return updated_list
Esempio n. 8
0
 def __init__(self, *args, **kwargs):
     super(LocalFileBackend, self).__init__(*args, **kwargs)
     self.name = u'localfile'
     self.parser = FileParser()
Esempio n. 9
0
 def setUp(self):
     self.parser = FileParser()
     self.test_string = "publish: {0}\ntimezone: US/Eastern\ntitle: Tingling of the spine\n\n\nExtraordinary claims require extraordinary evidence!".format(TIMESTAMP)
Esempio n. 10
0
class FileParserTestCase(TestCase):
    def setUp(self):
        self.parser = FileParser()
        self.test_string = "publish: {0}\ntimezone: US/Eastern\ntitle: Tingling of the spine\n\n\nExtraordinary claims require extraordinary evidence!".format(TIMESTAMP)

    def tearDown(self):
        Blog.objects.all().delete()
        User.objects.all().delete()

    def test_pack(self):
        file_path = tempfile.mkstemp(suffix='.txt')[1]
        user = User.objects.create(username='******')
        blog = Blog.objects.create(
            backend_file=file_path,
            backend_class='stardate.backends.local_file.LocalFileBackend',
            name='test blog',
            user=user,
        )

        Post.objects.create(
            blog=blog,
            title='My first post',
            publish=datetime.datetime(2015, 1, 1, 6, 0),
            body='This is the first post.'
        )

        Post.objects.create(
            blog=blog,
            title='My second post',
            publish=datetime.datetime(2015, 1, 2, 6, 0),
            body='This is the second post.'
        )

        post_list = [post.serialized() for post in blog.posts.all()]

        packed = self.parser.pack(post_list)

        self.assertIsInstance(post_list, list)
        self.assertEqual(len(post_list), 2)
        try:
            self.assertIsInstance(packed, basestring)
        except NameError:
            self.assertIsInstance(packed, str)

        self.assertTrue(u'title: {0}'.format(post_list[0]['title']) in packed)
        self.assertTrue(u'title: {0}'.format(post_list[1]['title']) in packed)
        self.assertTrue(u'stardate: {0}'.format(post_list[0]['stardate']) in packed)
        self.assertTrue(u'stardate: {0}'.format(post_list[1]['stardate']) in packed)
        self.assertTrue(u'\n\n\n{0}'.format(post_list[0]['body']) in packed)
        self.assertTrue(u'\n\n\n{0}'.format(post_list[1]['body']) in packed)
        self.assertTrue(u'publish: {0}'.format(post_list[0]['publish']) in packed)
        self.assertTrue(u'publish: {0}'.format(post_list[1]['publish']) in packed)

    def test_parse_publish(self):
        timestamp = '01-01-2015 06:00AM+0000'
        expected = datetime.datetime(2015, 1, 1, 6, 0, tzinfo=timezone.utc)

        self.assertEqual(self.parser.parse_publish(timestamp), expected)
        self.assertEqual(self.parser.parse_publish(expected), expected)

        self.assertEqual(
            self.parser.parse_publish('2016-01-01 00:00:00 -0500'),
            datetime.datetime(2016, 1, 1, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(
            self.parser.parse_publish('2016-01-01'),
            datetime.datetime(2016, 1, 1, tzinfo=timezone.utc)
        )

        self.assertEqual(
            self.parser.parse_publish('2016-01-01 00:00:00'),
            datetime.datetime(2016, 1, 1, tzinfo=timezone.utc)
        )

        self.assertEqual(
            self.parser.parse_publish('2016-01-01 12AM', 'US/Eastern'),
            datetime.datetime(2016, 1, 1, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(
            self.parser.parse_publish(datetime.date(2016, 1, 1)),
            datetime.datetime(2016, 1, 1, tzinfo=timezone.utc)
        )

        self.assertEqual(
            self.parser.parse_publish('2016-01-01 12AM', 'EST'),
            datetime.datetime(2016, 1, 1, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(
            self.parser.parse_publish('2016-07-01 12AM', 'US/Eastern'),
            datetime.datetime(2016, 7, 1, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(
            self.parser.parse_publish(datetime.datetime(2016, 1, 1, 0, 0, tzinfo=timezone.utc), 'US/Eastern'),
            datetime.datetime(2016, 1, 1, tzinfo=tz.gettz('US/Eastern'))
        )

    def test_parse(self):
        parsed = self.parser.parse(self.test_string)

        self.assertEqual(parsed['title'], 'Tingling of the spine')
        expected = datetime.datetime(2012, 1, 2, tzinfo=tz.gettz('US/Eastern'))
        self.assertEqual(parsed['publish'], expected)
        self.assertEqual(parsed['body'], 'Extraordinary claims require extraordinary evidence!')
        self.assertEqual(parsed['timezone'], 'US/Eastern')

        # Check that extra_field is parsed
        string = u"title: My title\nextra_field: Something arbitrary\n\n\nThe body.\n"
        parsed = self.parser.parse(string)
        self.assertTrue('title' in parsed.keys())
        self.assertTrue('extra_field' in parsed.keys())

    def test_render(self):
        file_path = tempfile.mkstemp(suffix='.txt')[1]
        user = User.objects.create(username='******')
        blog = Blog.objects.create(
            backend_file=file_path,
            backend_class='stardate.backends.local_file.LocalFileBackend',
            name='test blog',
            user=user,
        )
        post = Post.objects.create(
            blog=blog,
            title='Test title',
            publish=datetime.datetime(2013, 6, 1),
            timezone='US/Eastern',
            body='The body.',
        )

        packed = self.parser.pack([post.serialized()])
        rendered = self.parser.render(post.serialized())

        self.assertTrue('publish: 2013-06-01 12:00 AM -0400' in rendered)

        parsed = self.parser.parse(rendered)

        self.assertEqual(parsed.get('title'), post.title)
        self.assertEqual(parsed.get('timezone'), post.timezone)
        self.assertEqual(parsed.get('body'), post.body.raw)

        self.assertEqual(rendered, packed)

    def test_unpack(self):
        content = self.test_string
        post_list = self.parser.unpack(content)

        #The file has one post to unpack
        self.assertEqual(len(post_list), 1)

        post = post_list[0]

        self.assertEqual(post.get('title'), 'Tingling of the spine')

        self.assertEqual(
            post.get('publish'),
            datetime.datetime(2012, 1, 2, tzinfo=tz.gettz('US/Eastern'))
        )

        self.assertEqual(post.get('body'), 'Extraordinary claims require extraordinary evidence!')

    @patch('stardate.parsers.logger')
    def test_bad_string(self, mock_logging):
        content = 'bad string\n\r'
        posts = self.parser.unpack(content)

        self.assertEqual(posts, [])
        mock_logging.warn.assert_called_once_with(
            'Not enough information found to parse string.')