Exemplo n.º 1
0
    def get_last_media(self, num_images=5):
        # If there is a hit, get from cache
        if SocialNetworkItemCache.hit("instagram", num_images):
            return SocialNetworkItemCache.get("instagram",
                                              num_images).response_dict

        # Otherwise fetch data from instagram servers
        url = InstagramReader.URL.format(self.access_token)
        try:
            response = urlopen(url)
            data = json.load(response)
        except (HTTPError, HTTPException, ValueError) as e:
            if SocialNetworkItemCache.hit("instagram", num_images):
                return SocialNetworkItemCache.get("instagram",
                                                  num_images).response_dict
            return []

        meta = data.get('meta', None)
        code = meta.get('code', None)

        instagram_images = []

        if code == 200:
            fetched_images = data.get('data', None)
            if fetched_images:
                instagram_images = fetched_images[:num_images]

        # Update the cache accordingly
        SocialNetworkItemCache.create("instagram", num_images,
                                      instagram_images)

        return instagram_images
Exemplo n.º 2
0
	def get_last_tweets(self, num_tweets=5):

		# If there is a hit, get from cache
		if SocialNetworkItemCache.hit("twitter", num_tweets):
			return SocialNetworkItemCache.get("twitter", num_tweets).response_dict

		# Otherwise, get from Twitter
		try:
			tweets = self.api.GetUserTimeline(screen_name=self.username)[:num_tweets]
		except (HTTPError, HTTPException, ValueError, requests.exceptions.RequestException) as e:
			if SocialNetworkItemCache.hit("twitter", num_tweets):
				return SocialNetworkItemCache.get("twitter", num_tweets).response_dict
			return []

		tweet_list = []
		for tweet in tweets:
			tweet.created_at = dateutil.parser.parse(tweet.created_at)
			tweet_as_dict = {
				"id": tweet.id,
				"text": tweet.text,
				"created_at": tweet.created_at.isoformat()
			}
			tweet_list.append(tweet_as_dict)

		SocialNetworkItemCache.create("twitter", num_tweets, tweet_list)
		return tweet_list
    def get_last_posts(self, num_posts=5):

        # If there is a hit, get from cache
        if SocialNetworkItemCache.hit("facebook", num_posts):
            return SocialNetworkItemCache.get("facebook",
                                              num_posts).response_dict

        parameters = {
            'access_token': self.access_token,
            'fields':
            'type,created_time,link,permalink_url,message,message_tags,name,picture,full_picture,source',
            'limit': num_posts
        }

        try:
            response = requests.get(FacebookReader.GET_POSTS_URL.format(
                self.profile),
                                    params=parameters)
            posts = response.json().get('data')
        except (HttpError, HTTPException, ValueError,
                requests.exceptions.RequestException) as e:
            # If there is a hit, get from cache
            if SocialNetworkItemCache.hit("facebook", num_posts):
                return SocialNetworkItemCache.get("facebook",
                                                  num_posts).response_dict
            return []

        for post in posts:
            post['created_at'] = parser.parse(
                post.get('created_time')).isoformat()

        SocialNetworkItemCache.create("facebook", num_posts, posts)
        return posts
Exemplo n.º 4
0
    def get_last_activity(self, num_pins=5):

        if SocialNetworkItemCache.hit("pinterest", num_pins):
            return SocialNetworkItemCache.get("pinterest",
                                              num_pins).response_dict

        try:
            response = urlopen(
                PinterestReader.LAST_PINS_URL.format(self.access_token,
                                                     num_pins))
            response_data = json.load(response)
        except (HTTPError, HTTPException, ValueError) as e:
            if SocialNetworkItemCache.hit("pinterest", num_pins):
                return SocialNetworkItemCache.get("pinterest",
                                                  num_pins).response_dict
            return {"user": None, "last_pins": []}

        last_pins = response_data.get('data', [])
        for pin in last_pins:
            creator_id = pin["creator"]["id"]
            pin["creator"] = self._get_user_data(creator_id)

        pinterest_data = {"user": self._get_my_data(), "last_pins": last_pins}
        SocialNetworkItemCache.create("pinterest", num_pins, pinterest_data)
        return pinterest_data
    def get_last_media(self, num_images=5):

        # If there is a hit, get from cache
        if SocialNetworkItemCache.hit("fivehundred", num_images):
            return SocialNetworkItemCache.get("fivehundred",
                                              num_images).response_dict

            # Otherwise fetch data from 500px servers
            url = FiveHundredReader.URL.format(self.access_token, self.profile,
                                               num_images)
        try:
            response = urlopen(url)
            data = json.load(response)
        except (HttpError, HTTPException, ValueError) as e:
            if SocialNetworkItemCache.hit("fivehundred", num_images):
                return SocialNetworkItemCache.get("fivehundred",
                                                  num_images).response_dict
            return []

        media = data.get('photos', None)
        print media
        return media
    def get_last_items(self, num_items=5):

        # If there is a hit, get from cache
        if SocialNetworkItemCache.hit("rss",
                                      num_items=num_items,
                                      rss_url=self.rss_url):
            return SocialNetworkItemCache.get(
                "rss", num_items=num_items, rss_url=self.rss_url).response_dict

        # If this URL does not exist or the chache has expired, get a new version of the RSS and update it
        url = self.rss_url

        try:
            data = urlopen(url)
            soup = BeautifulSoup(data, 'xml')
        except (HttpError, HTTPException, ValueError) as e:
            if SocialNetworkItemCache.hit("rss",
                                          num_items=num_items,
                                          rss_url=self.rss_url):
                return SocialNetworkItemCache.get(
                    "rss", num_items=num_items,
                    rss_url=self.rss_url).response_dict
            return {'info': None, 'rss_items': [], 'url': self.url}

        #info
        title = soup.find('title')
        description = soup.find('description')
        categories_xml = soup.find_all('category')
        categories = []
        for category in categories_xml:
            categories.append(category.string)

        info = {
            'title': title.string,
            'description': description.string,
            'categories': categories
        }

        # Use BeautifulSoup to process the items
        items = soup.find_all('item', limit=num_items)
        rss_items = []
        for item in items:
            item_content = {}
            for content in item.contents:
                if content.name:
                    field_name = content.name.lower()
                    field_content = content.string
                    # If the field is a pubdate, convert it to date
                    if field_name == 'pubdate':
                        field_content = dateutil.parser.parse(
                            field_content).isoformat()
                    item_content[field_name] = field_content

            rss_items.append(item_content)

        rss_last_items = {
            'info': info,
            'rss_items': rss_items,
            'url': self.url
        }
        SocialNetworkItemCache.create("rss",
                                      num_items,
                                      response=rss_last_items,
                                      rss_url=self.rss_url)

        return rss_last_items