def test_invalid_dotjobs_url(self): urls = ['http://google.com', # url does not contain a feed '', # url not provided 'http://'] # invalid url provided for url in urls: title, url = validate_dotjobs_url(url, self.user) self.assertIsNone(title) self.assertIsNone(url)
def test_validate_dotjobs_url_with_special_chars(self): urls = [('http://www.my.jobs/jobs/?q=query with spaces', 'http://www.my.jobs/jobs/feed/rss?q=query+with+spaces'), ('http://www.my.jobs/jobs/?q=яы', 'http://www.my.jobs/jobs/feed/rss?q=%D1%8F%D1%8B')] for url_set in urls: label, feed = validate_dotjobs_url(url_set[0], self.user) self.assertEqual(feed, url_set[1]) self.assertIsNotNone(label)
def test_valid_dotjobs_url(self): url, soup = validate_dotjobs_url(self.valid_url, self.user) self.assertIsNotNone(url) self.assertIsNotNone(soup) no_netloc = 'www.my.jobs/jobs?location=chicago&q=nurse' title, url = validate_dotjobs_url(no_netloc, self.user) self.assertIsNotNone(title) self.assertIsNotNone(url) expected = urlparse( 'http://www.my.jobs/jobs/feed/rss?q=nurse&location=chicago') actual = urlparse(url.replace('amp;', '')) self.assertEqual(actual.path, expected.path) self.assertEqual(parse_qs(actual.query), parse_qs(expected.query)) valid_filter_url = 'www.my.jobs/jobs/' title, url = validate_dotjobs_url(valid_filter_url, self.user) self.assertIsNotNone(title) self.assertIsNotNone(url)
def clean_url(self): rss_url = validate_dotjobs_url(self.cleaned_data['url'], self.user)[1] if not rss_url: raise ValidationError(_('This URL is not valid.')) # Check if form is editing existing instance and if duplicates exist if not self.instance.pk and SavedSearch.objects.filter(user=self.user, url=self.cleaned_data['url']): raise ValidationError(_('URL must be unique.')) return self.cleaned_data['url']
def test_invalid_dotjobs_url(self): urls = [ 'http://google.com', # url does not contain a feed '', # url not provided 'http://' ] # invalid url provided for url in urls: title, url = validate_dotjobs_url(url, self.user) self.assertIsNone(title) self.assertIsNone(url)
def clean_url(self): rss_url = validate_dotjobs_url(self.cleaned_data['url'], self.user)[1] if not rss_url: raise ValidationError(_('This URL is not valid.')) # Check if form is editing existing instance and if duplicates exist if not self.instance.pk and SavedSearch.objects.filter( user=self.user, url=self.cleaned_data['url']): raise ValidationError(_('URL must be unique.')) return self.cleaned_data['url']
def test_validate_dotjobs_url_with_special_chars(self): urls = [ ('http://www.my.jobs/jobs/?q=query with spaces', 'http://www.my.jobs/jobs/feed/rss?q=query+with+spaces'), ('http://www.my.jobs/jobs/?q=яы', 'http://www.my.jobs/jobs/feed/rss?q=%D1%8F%D1%8B') ] for url_set in urls: label, feed = validate_dotjobs_url(url_set[0], self.user) self.assertEqual(feed, url_set[1]) self.assertIsNotNone(label)
def add_or_activate_saved_search(user, url): """ Attempt to add a new saved search to a user, or it is already exists, make sure it is active. :param user: User recieving search :param url: URL for search :return: The new or activated search """ if not url: raise ValueError("No URL provided") url = urllib.unquote(url) label, feed = validate_dotjobs_url(url, user) if not (label and feed): raise ValueError("Invalid .JOBS URL Provided") # Create notes field noting that it was created as current date/time now = datetime.datetime.now().strftime('%A, %B %d, %Y %l:%M %p') notes = 'Saved on ' + now if url.find('//') == -1: url = 'http://' + url netloc = urlparse.urlparse(url).netloc notes += ' from ' + netloc search_args = { 'url': url, 'label': label, 'feed': feed, 'user': user, 'email': user.email, 'frequency': 'D', 'day_of_week': None, 'day_of_month': None, 'notes': notes } try: # if search already exists, activate it saved_search = SavedSearch.objects.get( user=search_args['user'], email__iexact=search_args['email'], url=search_args['url']) saved_search.is_active = True saved_search.save() except SavedSearch.DoesNotExist: # if there's no search for that email/user, create it saved_search = SavedSearch(**search_args) saved_search.save() saved_search.initial_email() return saved_search
def clean(self): cleaned_data = self.cleaned_data url = cleaned_data.get('url') feed = validate_dotjobs_url(url, self.user)[1] if feed: cleaned_data['feed'] = feed self._errors.pop('feed', None) else: error_msg = "That URL does not contain feed information" self._errors.setdefault('url', []).append(error_msg) self.cleaned_data['feed'] = feed return cleaned_data
def validate_url(request): if request.is_ajax(): feed_title, rss_url = validate_dotjobs_url(request.POST['url'], request.user) if rss_url: # returns the RSS url via AJAX to show if field is validated # id valid, the label field is auto populated with the feed_title data = {'rss_url': rss_url, 'feed_title': feed_title, 'url_status': 'valid'} else: data = {'url_status': 'not valid'} return HttpResponse(json.dumps(data))
def add_or_activate_saved_search(user, url): """ Attempt to add a new saved search to a user, or it is already exists, make sure it is active. :param user: User recieving search :param url: URL for search :return: The new or activated search """ if not url: raise ValueError("No URL provided") url = urllib.unquote(url) label, feed = validate_dotjobs_url(url, user) if not (label and feed): raise ValueError("Invalid .JOBS URL Provided") # Create notes field noting that it was created as current date/time now = datetime.datetime.now().strftime('%A, %B %d, %Y %l:%M %p') notes = 'Saved on ' + now if url.find('//') == -1: url = 'http://' + url netloc = urlparse.urlparse(url).netloc notes += ' from ' + netloc search_args = {'url': url, 'label': label, 'feed': feed, 'user': user, 'email': user.email, 'frequency': 'D', 'day_of_week': None, 'day_of_month': None, 'notes': notes} try: # if search already exists, activate it saved_search = SavedSearch.objects.get(user=search_args['user'], email__iexact=search_args['email'], url=search_args['url']) saved_search.is_active = True saved_search.save() except SavedSearch.DoesNotExist: # if there's no search for that email/user, create it saved_search = SavedSearch(**search_args) saved_search.save() saved_search.initial_email() return saved_search
def validate_url(request): if request.is_ajax(): feed_title, rss_url = validate_dotjobs_url(request.POST['url'], request.user) if rss_url: # returns the RSS url via AJAX to show if field is validated # id valid, the label field is auto populated with the feed_title data = { 'rss_url': rss_url, 'feed_title': feed_title, 'url_status': 'valid' } else: data = {'url_status': 'not valid'} return HttpResponse(json.dumps(data))
def clean(self): cleaned_data = self.cleaned_data url = cleaned_data.get('url') user_email = cleaned_data.get('email') or self.instance.email if not user_email: raise ValidationError(_("This field is required.")) # we have an email, so remove email error self._errors.pop('email', None) # Get or create the user since they might not exist yet created = False user = User.objects.get_email_owner(email=user_email) if user is None: # Don't send an email here, as this is not a typical user creation. user, created = User.objects.create_user(email=user_email, send_email=False, in_reserve=True) self.instance.user = user Contact.objects.filter(email=user_email).update(user=user) else: self.instance.user = user setattr(self, 'created', created) feed = validate_dotjobs_url(url, user)[1] if feed: cleaned_data['feed'] = feed self._errors.pop('feed', None) else: error_msg = "That URL does not contain feed information" self._errors.setdefault('url', []).append(error_msg) self.cleaned_data['feed'] = feed if 'is_active' in self.changed_data: if self.instance.is_active: # Saved search is being deactivated; set unsubscriber self.instance.unsubscriber = self.request.user.email else: # Saved search is being activated; unset unsubscriber self.instance.unsubscriber = '' return cleaned_data
def disable_or_fix(self): """ Disables or fixes this saved search based on the presence or lack of an rss feed on the search url. Sends a "search has been disabled" email if this is not fixable. """ try: _, feed = validate_dotjobs_url(self.url, self.user) except ValueError: feed = None if feed is None: # search url did not contain an rss link and is not valid self.is_active = False self.save() self.send_disable_email() elif self.feed == '': # search url passed validation in the past even though there was # an issue retrieving the page; update the feed url self.feed = feed self.save()
def create_response(self, request, data, response_class=HttpResponse, **response_kwargs): """ Intercepts the default create_reponse(). Checks for existing saved search matching the user and url. If one doesn't exist, it creates a new saved search with daily email and the date/time created in the notes. Creates new JSON formatted "data" based on the success, failure, or error in saved search creation. Returns this data to the default create_response(). """ # Confirm email was provided, and that the user exists email = request.GET.get('email', '') if not email: data = {'error': 'No email provided'} return super(SavedSearchResource, self).create_response( request, data, response_class=HttpResponse, **response_kwargs) else: user = User.objects.get_email_owner(email=email) if not user: data = {'error': 'No user with email %s exists' % email} return super(SavedSearchResource, self).create_response( request, data, response_class=HttpResponse, **response_kwargs) # Confirm that url was provided, and that it's a valid .jobs search url = request.GET.get('url', '') if not url: data = {'error': 'No .JOBS feed provided'} return super(SavedSearchResource, self).create_response( request, data, response_class=HttpResponse, **response_kwargs) else: label, feed = validate_dotjobs_url(url, user) if not (label and feed): data = {'error': 'This is not a valid .JOBS feed'} return super(SavedSearchResource, self).create_response( request, data, response_class=HttpResponse, **response_kwargs) # Create notes field noting that it was created as current date/time now = datetime.datetime.now().strftime('%A, %B %d, %Y %l:%M %p') notes = 'Saved on ' + now if url.find('//') == -1: url = 'http://' + url netloc = urlparse(url).netloc notes += ' from ' + netloc search_args = {'url': url, 'label': label, 'feed': feed, 'user': user, 'email': email, 'frequency': 'D', 'day_of_week': None, 'day_of_month': None, 'notes': notes} # if there's no search for that email/user, create it new_search = False try: SavedSearch.objects.get(user=search_args['user'], email__iexact=search_args['email'], url=search_args['url']) except SavedSearch.DoesNotExist: search = SavedSearch(**search_args) search.save() search.initial_email() new_search = True data = {'email': email, 'frequency': 'D', 'new_search': new_search} return super(SavedSearchResource, self).create_response( request, data, response_class=HttpResponse, **response_kwargs)
def create_response(self, request, data, response_class=HttpResponse, **response_kwargs): """ Intercepts the default create_reponse(). Checks for existing saved search matching the user and url. If one doesn't exist, it creates a new saved search with daily email and the date/time created in the notes. Creates new JSON formatted "data" based on the success, failure, or error in saved search creation. Returns this data to the default create_response(). """ # Confirm email was provided, and that the user exists email = request.GET.get('email', '') if not email: data = {'error': 'No email provided'} return super(SavedSearchResource, self).create_response(request, data, response_class=HttpResponse, **response_kwargs) else: user = User.objects.get_email_owner(email=email) if not user: data = {'error': 'No user with email %s exists' % email} return super(SavedSearchResource, self).create_response(request, data, response_class=HttpResponse, **response_kwargs) # Confirm that url was provided, and that it's a valid .jobs search url = request.GET.get('url', '') if not url: data = {'error': 'No .JOBS feed provided'} return super(SavedSearchResource, self).create_response(request, data, response_class=HttpResponse, **response_kwargs) else: label, feed = validate_dotjobs_url(url, user) if not (label and feed): data = {'error': 'This is not a valid .JOBS feed'} return super(SavedSearchResource, self).create_response(request, data, response_class=HttpResponse, **response_kwargs) # Create notes field noting that it was created as current date/time now = datetime.datetime.now().strftime('%A, %B %d, %Y %l:%M %p') notes = 'Saved on ' + now if url.find('//') == -1: url = 'http://' + url netloc = urlparse(url).netloc notes += ' from ' + netloc search_args = { 'url': url, 'label': label, 'feed': feed, 'user': user, 'email': email, 'frequency': 'D', 'day_of_week': None, 'day_of_month': None, 'notes': notes } # if there's no search for that email/user, create it new_search = False try: SavedSearch.objects.get(user=search_args['user'], email__iexact=search_args['email'], url=search_args['url']) except SavedSearch.DoesNotExist: search = SavedSearch(**search_args) search.save() search.initial_email() new_search = True data = {'email': email, 'frequency': 'D', 'new_search': new_search} return super(SavedSearchResource, self).create_response(request, data, response_class=HttpResponse, **response_kwargs)