def bulk_delete(self, request): ''' parameters: contacts: Array, listid: Int ''' contacts = [] included = [] if ('contacts' in request.data and 'listid' in request.data): media_list = MediaList.objects.get(pk=request.data['listid']) for contact_id in request.data['contacts']: # Remove moved contact from media list try: contact = Contact.objects.get(pk=contact_id) # Remove current contact from media_list media_list.contacts.remove(contact) # Add contacts to the list we're going to # return back to user. contacts.append(contact) except Contact.DoesNotExist: continue media_list.save() media_list_serializer = MediaListSerializer(media_list) included.append(media_list_serializer.data) serializer = ContactSerializer(contacts, many=True) return BulkResponse(serializer.data, included, len(serializer.data), len(serializer.data))
def feeds(self, request, pk=None): contact = self.get_contact_by_pk(request, pk) feeds = Feed.objects.filter(contact=contact) serializer = FeedSerializer(feeds, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def bulk_update(self, request, pk=None, partial=None, *args, **kwargs): ''' parameters: array of contact objects ''' serializers = [] included = [] for contact in request.data: if 'id' in contact: partial = kwargs.pop('partial', False) instance = self.get_contact_by_pk(request, contact['id']) serializer = self.get_serializer(instance, data=contact, partial=partial) serializer.is_valid(raise_exception=True) self.perform_update(serializer) if getattr(instance, '_prefetched_objects_cache', None): # If 'prefetch_related' has been applied to a queryset, we need to # forcibly invalidate the prefetch cache on the instance. instance._prefetched_objects_cache = {} serializers.append(serializer.data['data']) return BulkResponse(serializers, included, len(serializers), len(serializers))
def list(self, request): queryset = self.filter_queryset(self.get_queryset()) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def public(self, request): media_lists = MediaList.objects.filter(public_list=True) page = self.paginate_queryset(media_lists) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(media_lists, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def clients(self, request): # Retrieve all distinct clients user_profile = UserProfile.objects.get(user=request.user) media_lists = MediaList.objects.filter( team=user_profile.team, is_deleted=False, archived=False).exclude( Q(client_name__isnull=True) | Q(client_name='')).values_list( 'client_name', flat=True).distinct() clients = media_lists.values_list('client_name', flat=True) # Create a false response structure data = {'clients': clients} return BulkResponse(data, {}, len(clients), len(clients))
def list(self, request): queryset = Feed.objects.filter( created_by=self.request.user).order_by('-created') queryset = self.filter_queryset(queryset) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def emails(self, request, pk=None): media_list = self.get_media_list_by_pk(request, pk) queryset = Email.objects.filter( created_by=request.user, list_in=media_list).order_by('-created') page = self.paginate_queryset(queryset) if page is not None: serializer = EmailSerializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = EmailSerializer(queryset, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def list(self, request): if request.user and request.user.is_authenticated(): queryset = self.filter_queryset(self.get_queryset()) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data)) raise NotAuthenticated()
def headlines(self, request, pk=None): contact = self.get_contact_by_pk(request, pk) feeds = Feed.objects.filter(contact=contact) # Response to user headlines = [] total_headlines = 0 if len(feeds) > 0: limit, offset = get_pagination(request) query = { 'size': limit, 'from': offset, 'query': { 'bool': { 'should': [] } }, 'sort': [{ 'data.PublishDate': { 'order': 'desc', 'mode': 'avg' } }] } for feed in feeds: if feed.feed_url != '': query['query']['bool']['should'].append( {'match': { 'data.FeedURL': feed.feed_url }}) es_headlines = es.search(index='headlines', doc_type='headline', body=query) if 'hits' in es_headlines and 'total' in es_headlines['hits']: total_headlines = es_headlines['hits']['total'] if 'hits' in es_headlines and 'hits' in es_headlines['hits']: for es_headline in es_headlines['hits']['hits']: if ('_source' in es_headline and 'data' in es_headline['_source']): es_headline = format_es_response(es_headline) headlines.append(es_headline['_source']['data']) return BulkResponse(headlines, {}, len(headlines), total_headlines)
def lists(self, request, pk=None): contact = self.get_contact_by_pk(request, pk) user_profile = UserProfile.objects.get(user=request.user) queryset = MediaList.objects.filter( contacts=contact, team=user_profile.team, archived=False).order_by('-created') page = self.paginate_queryset(queryset) if page is not None: serializer = MediaListSerializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = MediaListSerializer(queryset, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def archived(self, request): if self.request.user and self.request.user.is_authenticated(): emails = Email.objects.filter(created_by=request.user, archived=False, cancel=False).order_by('-created') page = self.paginate_queryset(emails) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(emails, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data)) raise NotAuthenticated()
def emails(self, request, pk=None): contact = self.get_contact_by_pk(request, pk) user_profile = UserProfile.objects.get(user=request.user) queryset = Email.objects.filter(team=user_profile.team, to=contact.email, is_sent=True, delivered=True).order_by('-created') page = self.paginate_queryset(queryset) if page is not None: serializer = EmailSerializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = EmailSerializer(queryset, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def bulk_send(self, request): if 'emailids' in request.data: emails = [] for email_id in request.data['emailids']: try: email = self.get_email_by_pk(request, email_id) if not email.is_sent: emails.append(email) except Email.DoesNotExist: continue self._send_emails(emails) serializer = EmailSerializer(emails, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data)) raise ParseError()
def team(self, request): user_profile = UserProfile.objects.get(user=request.user) media_lists = MediaList.objects.none() if user_profile.team: media_lists = MediaList.objects.filter( team=user_profile.team, is_deleted=False, archived=False).filter(~Q(created_by=self.request.user)) page = self.paginate_queryset(media_lists) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(media_lists, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def team(self, request): if self.request.user and self.request.user.is_authenticated(): user_profile = UserProfile.objects.get(user=request.user) emails = Email.objects.filter( team=user_profile.team, archived=False).filter(~Q(created_by=self.request.user)) page = self.paginate_queryset(emails) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(emails, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data)) raise NotAuthenticated()
def tweets(self, request, pk=None): media_list = self.get_media_list_by_pk(request, pk) total_tweets = 0 tweets = [] if media_list.contacts.count() > 0: twitter_usernames = (media_list.contacts and media_list.contacts.values_list( 'twitter', flat=True)) if len(twitter_usernames) > 0: query = { 'size': 20, 'from': 0, 'query': { 'bool': { 'should': [], 'minimum_should_match': '100%' } }, 'sort': [{ 'data.CreatedAt': { 'order': 'desc', 'mode': 'avg' } }], } for username in twitter_usernames: query['query']['bool']['should'].append( {'term': { 'data.Username': username }}) es_tweets = es.search(index='tweets', doc_type='tweet', body=query) if 'hits' in es_tweets and 'total' in es_tweets['hits']: total_tweets = es_tweets['hits']['total'] if 'hits' in es_tweets and 'hits' in es_tweets['hits']: for es_tweet in es_tweets['hits']['hits']: if ('_source' in es_tweet and 'data' in es_tweet['_source']): tweets.append(es_tweet['_source']['data']) return BulkResponse(tweets, {}, len(tweets), total_tweets)
def headlines(self, request, pk=None): media_list = self.get_media_list_by_pk(request, pk) contacts = media_list.contacts.all() # Response to user headlines = [] total_headlines = 0 should = [] for contact in contacts: feeds = Feed.objects.filter(contact=contact, valid_feed=True, running=True) for feed in feeds: if feed.feed_url != '': should.append({'match': {'data.FeedURL': feed.feed_url}}) if len(should) > 0: query = { 'size': 20, 'from': 0, 'query': { 'bool': { 'should': should } }, 'sort': [{ 'data.PublishDate': { 'order': 'desc', 'mode': 'avg' } }] } es_headlines = es.search(index='headlines', doc_type='headline', body=query) if 'hits' in es_headlines and 'total' in es_headlines['hits']: total_headlines = es_headlines['hits']['total'] if 'hits' in es_headlines and 'hits' in es_headlines['hits']: for es_headline in es_headlines['hits']['hits']: if ('_source' in es_headline and 'data' in es_headline['_source']): headlines.append(es_headline['_source']['data']) return BulkResponse(headlines, {}, len(headlines), total_headlines)
def instagram_timeseries(self, request, pk=None): contact = self.get_contact_by_pk(request, pk) # Response to user instagram_timeseries = [] total_instagram_timeseries = 0 if contact.instagram != '': query = { 'query': { 'bool': { 'must': [{ 'term': { 'data.Username': contact.instagram } }] } }, 'sort': [{ 'data.CreatedAt': { 'order': 'desc', 'mode': 'avg' } }] } es_instagram_timeseries = es.search(index='timeseries', doc_type='instagram', body=query) if ('hits' in es_instagram_timeseries and 'total' in es_instagram_timeseries['hits']): total_instagram_timeseries = es_instagram_timeseries['hits'][ 'total'] if ('hits' in es_instagram_timeseries and 'hits' in es_instagram_timeseries['hits']): for ts in es_instagram_timeseries['hits']['hits']: if ('_source' in ts and 'data' in ts['_source']): # ts = format_es_response(ts) instagram_timeseries.append(ts['_source']['data']) return BulkResponse(instagram_timeseries, {}, len(instagram_timeseries), total_instagram_timeseries)
def cancel_scheduled(self, request): if self.request.user and self.request.user.is_authenticated(): now = datetime.datetime.today() emails = Email.objects.filter( created_by=request.user, cancel=False, is_sent=True, delivered=False, send_at__gte=now).order_by('-created') # We just have to cancel each emails for email in emails: email.cancel = True email.save() serializer = EmailSerializer(emails, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data)) raise NotAuthenticated()
def scheduled(self, request): if self.request.user and self.request.user.is_authenticated(): now = datetime.datetime.today() emails = Email.objects.filter( created_by=request.user, cancel=False, is_sent=True, send_at__gte=now).order_by('-created') page = self.paginate_queryset(emails) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(emails, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data)) raise NotAuthenticated()
def tweets(self, request, pk=None): contact = self.get_contact_by_pk(request, pk) # Response to user tweets = [] total_tweets = 0 if contact.twitter != '': limit, offset = get_pagination(request) query = { 'size': limit, 'from': offset, 'query': { 'bool': { 'should': [{ 'term': { 'data.Username': contact.twitter } }], 'minimum_should_match': '100%' } }, 'sort': [{ 'data.CreatedAt': { 'order': 'desc', 'mode': 'avg' } }], } es_tweets = es.search(index='tweets', doc_type='tweet', body=query) if 'hits' in es_tweets and 'total' in es_tweets['hits']: total_tweets = es_tweets['hits']['total'] if 'hits' in es_tweets and 'hits' in es_tweets['hits']: for es_tweet in es_tweets['hits']['hits']: if ('_source' in es_tweet and 'data' in es_tweet['_source']): es_tweet = format_es_response(es_tweet) es_tweet['type'] = 'tweets' tweets.append(es_tweet['_source']['data']) return BulkResponse(tweets, {}, len(tweets), total_tweets)
def instagram_timeseries(self, request, pk=None): if 'ids' in request.data and 'days' in request.data: timeseries = [] instagram_usernames = [] for contact_id in request.data['ids']: contact = Contact.objects.get(pk=contact_id) if contact.instagram != '': instagram_usernames.append(contact.instagram) if len(instagram_usernames) > 0: # Set how many dates we want to look behind default_date = 7 if request.data['days'] != 0: default_date = request.data['days'] # Add all instagram ids to ES ids elastic_ids = [] for instagram in instagram_usernames: if instagram != '': for i in xrange(0, default_date): consider_date = (datetime.date.today() - datetime.timedelta(days=i)) single_date = consider_date.strftime('%Y-%m-%d') elastic_ids.append(instagram + '-' + single_date) if len(elastic_ids) > 0: query = {'ids': elastic_ids} es_timeseries = es.mget(index='timeseries', doc_type='instagram', body=query) if 'docs' in es_timeseries: for ts in es_timeseries['docs']: if ts['found']: if '_source' in ts and 'data' in ts['_source']: timeseries.append(ts['_source']['data']) return BulkResponse(timeseries, {}, len(timeseries), len(timeseries)) raise ParseError()
def contacts(self, request, pk=None): media_list = self.get_media_list_by_pk(request, pk) queryset = media_list.contacts and media_list.contacts.get_queryset() queryset = OrderingFilter().filter_queryset(request, queryset, self) # We have to create a search filter self.search_fields = ( '=first_name', '=last_name', '=email', '=employers__name', '=custom_fields__value', ) queryset = SearchFilter().filter_queryset(request, queryset, self) queryset = queryset.order_by('-created') page = self.paginate_queryset(queryset) if page is not None: included = [] # Initialize included publications = [] for contact in page: publication = [] if contact.employers.count() > 0: publication += list(contact.employers.all()) if contact.past_employers.count() > 0: publication += list(contact.past_employers.all()) if len(publication) > 0: publications += publication if len(publications) > 0: pub_serializer = PublicationSerializer(publications, many=True) included = pub_serializer.data serializer = ContactSerializer(page, many=True) return self.get_paginated_response(serializer.data, included) serializer = ContactSerializer(queryset, many=True) return BulkResponse(serializer.data, {}, len(serializer.data), len(serializer.data))
def move(self, request): ''' parameters: contacts: Array, fromList: Int, toList: Int ''' contacts = [] included = [] if ('contacts' in request.data and 'fromList' in request.data and 'toList' in request.data): try: from_list = MediaList.objects.get(pk=request.data['fromList']) to_list = MediaList.objects.get(pk=request.data['toList']) for contact_id in request.data['contacts']: try: contact = Contact.objects.get(pk=contact_id) from_list.contacts.remove(contact) to_list.contacts.add(contact) # Add contacts to the list we're going to # return back to user. contacts.append(contact) except Contact.DoesNotExist: continue from_list.save() from_list_serializer = MediaListSerializer(from_list) included.append(from_list_serializer.data) to_list.save() to_list_serializer = MediaListSerializer(to_list) included.append(to_list_serializer.data) except MediaList.DoesNotExist: pass serializer = ContactSerializer(contacts, many=True) return BulkResponse(serializer.data, included, len(serializer.data), len(serializer.data))
def instagrams(self, request, pk=None): contact = self.get_contact_by_pk(request, pk) instagram_posts = [] if contact.instagram != '': limit, offset = get_pagination(request) query = { 'size': limit, 'from': offset, 'query': { 'bool': { 'should': [{ 'term': { 'data.Username': contact.instagram } }], 'minimum_should_match': '100%' } }, 'sort': [{ 'data.CreatedAt': { 'order': 'desc', 'mode': 'avg' } }] } es_instagrams = es.search(index='instagrams', doc_type='instagram', body=query) if 'hits' in es_instagrams and 'hits' in es_instagrams['hits']: for es_instagram in es_instagrams['hits']['hits']: if ('_source' in es_instagram and 'data' in es_instagram['_source']): es_instagram = format_es_response(es_instagram) es_instagram['type'] = 'instagrams' instagram_posts.append(es_instagram['_source']['data']) return BulkResponse(instagram_posts, {}, len(instagram_posts), len(instagram_posts))
def feed(self, request, pk=None): contact = self.get_contact_by_pk(request, pk) # Response to user feed = [] total_feed = 0 # ES attributes should = [] twitter_username = contact.twitter instagram_username = contact.instagram feeds = Feed.objects.filter(contact=contact, valid_feed=True, running=True) if twitter_username != '': should.append({'term': {'data.Username': twitter_username}}) if instagram_username != '': should.append( {'term': { 'data.InstagramUsername': instagram_username }}) if len(feeds) > 0: for feed in feeds: should.append({'match': {'data.FeedURL': feed.feed_url}}) if len(should) > 0: limit, offset = get_pagination(request) query = { 'size': limit, 'from': offset, 'query': { 'bool': { 'should': should } }, 'sort': [{ 'data.CreatedAt': { 'order': 'desc', 'mode': 'avg' } }] } es_feeds = es.search(index='feeds', doc_type='feed', body=query) if 'hits' in es_feeds and 'total' in es_feeds['hits']: total_feed = es_feeds['hits']['total'] if 'hits' in es_feeds and 'hits' in es_feeds['hits']: for es_feed in es_feeds['hits']['hits']: if ('_source' in es_feed and 'data' in es_feed['_source']): es_feed = format_es_response(es_feed) # we want the format to be 'tweets', 'instagrams', # and 'headlines' es_feed['_source']['data']['type'] = es_feed[ '_source']['data']['type'].lower() + 's' feed.append(es_feed['_source']['data']) return BulkResponse(feed, {}, len(feed), total_feed)
def feed(self, request, pk=None): media_list = self.get_media_list_by_pk(request, pk) # Response to user feed = [] total_feed = 0 # ES attributes should = [] if media_list.contacts.count() > 0: twitter_usernames = (media_list.contacts and media_list.contacts.values_list( 'twitter', flat=True)) instagram_usernames = (media_list.contacts and media_list.contacts.values_list( 'instagram', flat=True)) feed_urls = [] contacts = media_list.contacts.all() for contact in contacts: feeds = Feed.objects.filter(contact=contact, valid_feed=True, running=True) for feed in feeds: if feed.feed_url != '': feed_urls.append(feed.feed_url) for twitter in twitter_usernames: if twitter != '': should.append({'term': {'data.Username': twitter}}) for instagram in instagram_usernames: if instagram != '': should.append( {'term': { 'data.InstagramUsername': instagram }}) for feed_url in feed_urls: if feed_url != '': should.append({'term': {'data.FeedURL': feed_url}}) if len(should) > 0: query = { 'size': 20, 'from': 0, 'query': { 'bool': { 'should': should } }, 'sort': [{ 'data.CreatedAt': { 'order': 'desc', 'mode': 'avg' } }] } es_feeds = es.search(index='feeds', doc_type='feed', body=query) if 'hits' in es_feeds and 'total' in es_feeds['hits']: total_feed = es_feeds['hits']['total'] if 'hits' in es_feeds and 'hits' in es_feeds['hits']: for es_feed in es_feeds['hits']['hits']: if ('_source' in es_feed and 'data' in es_feed['_source']): feed.append(es_feed['_source']['data']) return BulkResponse(feed, {}, len(feed), total_feed)