def get(self, request, format=None): content = MutableString() content = [] opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/24.0')] # submitter = User.objects.get(username='******') # logging.debug("value of user my var is %s", str(submitter.username)) def load(durl, greet): def remove_tags(text): text = TAG_RE.sub('', text) text = re.sub("\n", "", text) text = re.sub("\"", "\\\"", text) text = re.sub(u"(\u2018|\u2019)", "'", text) return "" . join(filter(lambda x: ord(x)<128, text)) content = MutableString() content = [] def entry_to_link_dict(entry): s = MLStripper() s.feed(entry.description) link = { "title": remove_tags(entry.title), "url": entry.link, "linksource": urlparse(entry.link).netloc, "votes": "1", "description": remove_tags(s.get_data()), } return link try: user_id = User.objects.filter(username='******')[0].id except IndexError: return for entry in parse(durl).entries: link = entry_to_link_dict(entry) link["submitter"] = user_id info = link logging.debug("value of user my var is %s", str(User.objects.filter(username='******')[0].get_username())) # info[title] = Wikifetch(title, description, url) content.append(info) # logging.debug("value of 1 my var is %s", str(title)) # logging.debug("value of 2 my var is %s", str(link)) return content # data = 'http://google.com' channel = request.GET['channel'] # logging.debug("value of 16 my var is %s", str(channel)) content = load(channel, channel) serializer2 = LinkSerializer(content, many=True) serializer = LinkSerializer(data=content) # submitterobj = User.objects.get(username='******') http://www.nairaland.com/feed # serializer2.object.submitter = submitterobj logging.debug("value of 27 my var is %s", str(serializer)) if serializer.is_valid(): serializer.save() return Response(serializer2.data)
class UserSerializer(ModelActionSerializer): urls = LinkSerializer(many=True, read_only=True) email = serializers.EmailField() password = serializers.CharField(write_only=True) membership = serializers.ReadOnlyField() class Meta: model = User fields = ( 'id', 'email', 'username', 'password', 'membership', 'urls', ) action_fields = { 'login': { 'fields': ('email', 'password') }, 'update': { 'fields': ('email', 'username') }, } def create(self, validated_data): user = super(UserSerializer, self).create(validated_data) if 'password' in validated_data: user.set_password(validated_data['password']) user.save() return user
def test_get_links(self): # Get data from request resp = self.client.get(reverse('link-list')) # Get data from model links = Link.objects.filter(profile=self.profile) serializer = LinkSerializer(links, many=True) self.assertEqual(resp.data, serializer.data)
def post(self, request, *args, **kwargs): serializer = LinkSerializer(data=request.DATA) linkprofile = Link.objects.save_link(serializer, request.DATA['url'], request.user) if linkprofile: return Response(LinkProfileSerializer(linkprofile).data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def get(self, request): url = request.GET.get('url', None) serializer = LinkSerializer(data=request.GET) linkprofile = Link.objects.save_link(serializer, url, request.user) if not linkprofile: message = 'You need to add an url' messages.error(request, message) else: message = 'Url saved' messages.info(request, message) return redirect('home')
class ProfileSerializer(serializers.HyperlinkedModelSerializer): links = LinkSerializer(many=True) class Meta: model = Profile fields = ( 'url', 'id', 'first_name', 'last_name', 'display_name', 'links', )
def get(self, request, format=None): # queryset = Vote.with_votes.all() MEMCACHE_GREETINGS = 'APILINKS' content = cache.get(MEMCACHE_GREETINGS) time = 1800 if content is None: content = Link.objects.all().order_by('-rank_score', '-votes')[:20] logging.debug("value of user my var is %s", str(content)) serializer = LinkSerializer(content) cache.add(MEMCACHE_GREETINGS, serializer.data, time) return Response(serializer.data) else: return Response(content)
def to_representation(self, value): if isinstance(value, Link): return LinkSerializer(value).data if isinstance(value, Image): return ImageSerializer(value).data raise Exception('Unexpected type of tagged object')
def get(self, request, format=None): content = MutableString() content = [] opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/24.0')] # submitter = User.objects.get(username='******') # logging.debug("value of user my var is %s", str(submitter.username)) def load(durl, greet): def remove_tags(text): text = TAG_RE.sub('', text) text = re.sub("\n", "", text) text = re.sub("\"", "\\\"", text) return "" . join(filter(lambda x: ord(x)<128, text)) content = MutableString() content = [] opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/24.0')] MEMCACHE_GREETINGS = greet data = cache.get(MEMCACHE_GREETINGS) time = 1800 if data is None: file = urllib2.urlopen(durl) data = file.read() file.close() cache.add(MEMCACHE_GREETINGS, data, time) doc = ET.fromstring(data) # logging.debug("value of 16 my var is %s", str(data)) gg = doc.findall('channel/item') # logging.debug("value of 1 my var is %s", str(gg)) for node in gg: title = node.find('./title').text description = node.find('./description').text url = node.find('./link').text info = {} info['title'] = remove_tags(title) info['description'] = remove_tags(description) info['url'] = url # submitterobj = User.objects.get(username='******') # submitter = submitterobj.username info['submitter'] = User.objects.filter(username='******')[0].id info['linksource'] = urlparse(url).netloc # info['submitter'] = User.objects.filter(is_superuser=1)[1].id info['votes'] = randrange(20) logging.debug("value of user my var is %s", str(User.objects.filter(username='******')[0].get_username())) # info[title] = Wikifetch(title, description, url) content.append(info) # logging.debug("value of 1 my var is %s", str(title)) # logging.debug("value of 2 my var is %s", str(link)) return content # url_id = request.GET['wiki'] # url = 'http://en.wikipedia.org/w/index.php?title='+url_id+'&action=history' medlineplus http://www.nlm.nih.gov/medlineplus/feeds/news_en.xml # reuters http://feeds.reuters.com/reuters/healthNews webmd http://rssfeeds.webmd.com/rss/rss.aspx?RSSSource=RSS_PUBLIC medicinenet http://www.medicinenet.com/rss/dailyhealth.xml # data = 'http://google.com' channel = request.GET['channel'] # logging.debug("value of 16 my var is %s", str(channel)) if channel == 'BBC': MEMCACHE_GREETINGS = 'bbc' url = 'http://feeds.bbci.co.uk/news/health/rss.xml' content = load(url, MEMCACHE_GREETINGS) elif channel == 'MEDLINEPLUS': url = 'http://www.nlm.nih.gov/medlineplus/feeds/news_en.xml' MEMCACHE_GREETINGS = 'MEDLINEPLUS' content = load(url, MEMCACHE_GREETINGS) elif channel == 'PUNCH': url = 'http://www.latestnigeriannews.com/feed/punch/rss.xml' MEMCACHE_GREETINGS = 'PUNCH' content = load(url, MEMCACHE_GREETINGS) elif channel == 'VANGUARD': url = 'http://www.vanguardngr.com/feed/' MEMCACHE_GREETINGS = 'VANGUARD' content = load(url, MEMCACHE_GREETINGS) elif channel == 'SUNNEWS': url = 'http://sunnewsonline.com/new/?feed=rss2' MEMCACHE_GREETINGS = 'SUNNEWS' content = load(url, MEMCACHE_GREETINGS) elif channel == 'GUARDIAN': url = 'http://www.latestnigeriannews.com/feed/guardian/rss.xml' MEMCACHE_GREETINGS = 'GUARDIAN' content = load(url, MEMCACHE_GREETINGS) elif channel == 'THISDAY': url = 'http://www.latestnigeriannews.com/feed/thisday/rss.xml' MEMCACHE_GREETINGS = 'THISDAY' content = load(url, MEMCACHE_GREETINGS) elif channel == 'DAILYTIMES': url = 'http://www.dailytimes.com.ng/rss/articles/all' MEMCACHE_GREETINGS = 'DAILYTIMES' content = load(url, MEMCACHE_GREETINGS) else : url = 'http://rssfeeds.webmd.com/rss/rss.aspx?RSSSource=RSS_PUBLIC' MEMCACHE_GREETINGS = 'WEBMD' content = load(url, MEMCACHE_GREETINGS) serializer2 = LinkSerializer(content, many=True) serializer = LinkSerializer(data=content) # submitterobj = User.objects.get(username='******') http://www.nairaland.com/feed # serializer2.object.submitter = submitterobj logging.debug("value of 27 my var is %s", str(serializer)) if serializer.is_valid(): serializer.save() return Response(serializer2.data)
class FolderGetSerializer(serializers.Serializer): folder = FolderSerializer() children_links = LinkSerializer(many=True) children_folders = FolderSerializer(many=True) sibling_folders = FolderSerializer(many=True)
def get(self, request, format=None): content = MutableString() content = [] opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/24.0')] # submitter = User.objects.get(username='******') # logging.debug("value of user my var is %s", str(submitter.username)) def load(durl, greet): def remove_tags(text): text = TAG_RE.sub('', text) text = re.sub("\n", "", text) text = re.sub("\"", "\\\"", text) text = re.sub(u"(\u2018|\u2019)", "'", text) return "".join(filter(lambda x: ord(x) < 128, text)) content = MutableString() content = [] def entry_to_link_dict(entry): s = MLStripper() s.feed(entry.description) link = { "title": remove_tags(entry.title), "url": entry.link, "linksource": urlparse(entry.link).netloc, "votes": "1", "description": remove_tags(s.get_data()), } return link try: user_id = User.objects.filter(username='******')[0].id except IndexError: return for entry in parse(durl).entries: link = entry_to_link_dict(entry) link["submitter"] = user_id info = link logging.debug( "value of user my var is %s", str( User.objects.filter( username='******')[0].get_username())) # info[title] = Wikifetch(title, description, url) content.append(info) # logging.debug("value of 1 my var is %s", str(title)) # logging.debug("value of 2 my var is %s", str(link)) return content # data = 'http://google.com' channel = request.GET['channel'] # logging.debug("value of 16 my var is %s", str(channel)) content = load(channel, channel) serializer2 = LinkSerializer(content, many=True) serializer = LinkSerializer(data=content) # submitterobj = User.objects.get(username='******') http://www.nairaland.com/feed # serializer2.object.submitter = submitterobj logging.debug("value of 27 my var is %s", str(serializer)) if serializer.is_valid(): serializer.save() return Response(serializer2.data)
def get(self, request, format=None): content = MutableString() content = [] opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/24.0')] # submitter = User.objects.get(username='******') # logging.debug("value of user my var is %s", str(submitter.username)) def load(durl, greet): def remove_tags(text): text = TAG_RE.sub('', text) text = re.sub("\n", "", text) text = re.sub("\"", "\\\"", text) return "".join(filter(lambda x: ord(x) < 128, text)) content = MutableString() content = [] opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/24.0')] MEMCACHE_GREETINGS = greet data = cache.get(MEMCACHE_GREETINGS) time = 1800 if data is None: file = urllib2.urlopen(durl) data = file.read() file.close() cache.add(MEMCACHE_GREETINGS, data, time) doc = ET.fromstring(data) # logging.debug("value of 16 my var is %s", str(data)) gg = doc.findall('channel/item') # logging.debug("value of 1 my var is %s", str(gg)) for node in gg: title = node.find('./title').text description = node.find('./description').text url = node.find('./link').text info = {} info['title'] = remove_tags(title) info['description'] = remove_tags(description) info['url'] = url # submitterobj = User.objects.get(username='******') # submitter = submitterobj.username info['submitter'] = User.objects.filter( username='******')[0].id info['linksource'] = urlparse(url).netloc # info['submitter'] = User.objects.filter(is_superuser=1)[1].id info['votes'] = randrange(20) logging.debug( "value of user my var is %s", str( User.objects.filter( username='******')[0].get_username())) # info[title] = Wikifetch(title, description, url) content.append(info) # logging.debug("value of 1 my var is %s", str(title)) # logging.debug("value of 2 my var is %s", str(link)) return content # url_id = request.GET['wiki'] # url = 'http://en.wikipedia.org/w/index.php?title='+url_id+'&action=history' medlineplus http://www.nlm.nih.gov/medlineplus/feeds/news_en.xml # reuters http://feeds.reuters.com/reuters/healthNews webmd http://rssfeeds.webmd.com/rss/rss.aspx?RSSSource=RSS_PUBLIC medicinenet http://www.medicinenet.com/rss/dailyhealth.xml # data = 'http://google.com' channel = request.GET['channel'] # logging.debug("value of 16 my var is %s", str(channel)) if channel == 'BBC': MEMCACHE_GREETINGS = 'bbc' url = 'http://feeds.bbci.co.uk/news/health/rss.xml' content = load(url, MEMCACHE_GREETINGS) elif channel == 'MEDLINEPLUS': url = 'http://www.nlm.nih.gov/medlineplus/feeds/news_en.xml' MEMCACHE_GREETINGS = 'MEDLINEPLUS' content = load(url, MEMCACHE_GREETINGS) elif channel == 'PUNCH': url = 'http://www.latestnigeriannews.com/feed/punch/rss.xml' MEMCACHE_GREETINGS = 'PUNCH' content = load(url, MEMCACHE_GREETINGS) elif channel == 'VANGUARD': url = 'http://www.vanguardngr.com/feed/' MEMCACHE_GREETINGS = 'VANGUARD' content = load(url, MEMCACHE_GREETINGS) elif channel == 'SUNNEWS': url = 'http://sunnewsonline.com/new/?feed=rss2' MEMCACHE_GREETINGS = 'SUNNEWS' content = load(url, MEMCACHE_GREETINGS) elif channel == 'GUARDIAN': url = 'http://www.latestnigeriannews.com/feed/guardian/rss.xml' MEMCACHE_GREETINGS = 'GUARDIAN' content = load(url, MEMCACHE_GREETINGS) elif channel == 'THISDAY': url = 'http://www.latestnigeriannews.com/feed/thisday/rss.xml' MEMCACHE_GREETINGS = 'THISDAY' content = load(url, MEMCACHE_GREETINGS) elif channel == 'DAILYTIMES': url = 'http://www.dailytimes.com.ng/rss/articles/all' MEMCACHE_GREETINGS = 'DAILYTIMES' content = load(url, MEMCACHE_GREETINGS) else: url = 'http://rssfeeds.webmd.com/rss/rss.aspx?RSSSource=RSS_PUBLIC' MEMCACHE_GREETINGS = 'WEBMD' content = load(url, MEMCACHE_GREETINGS) serializer2 = LinkSerializer(content, many=True) serializer = LinkSerializer(data=content) # submitterobj = User.objects.get(username='******') http://www.nairaland.com/feed # serializer2.object.submitter = submitterobj logging.debug("value of 27 my var is %s", str(serializer)) if serializer.is_valid(): serializer.save() return Response(serializer2.data)