def validation_save_and_message(object, is_valid, notes): # remove any path information from outputs user_home = object.owner.path media_path = settings.MEDIA_ROOT notes = notes\ .replace(user_home, '/***')\ .replace(media_path, '/***')\ # remove extra whitespace from all lines notes = '\n'.join([l.strip() for l in notes.splitlines()]) # intentionally omit post_save signal object.__class__.objects\ .filter(id=object.id)\ .update( validated=is_valid, validation_notes=notes) msg = '{} {}: '.format(object._meta.verbose_name.title(), object) if is_valid: msg += 'validation complete!' messages.success(object.owner, msg) else: msg += "<a href='{}'>validation failed (view errors)</a>"\ .format(object.get_absolute_url()) messages.warning(object.owner, msg)
def download(self): related_ds = list(self.related_datasets()) fn = self.data.path self.reset() try: r = requests.get(self.url, stream=True) with open(fn, 'wb') as f: for chunk in r.iter_content(chunk_size=self.CHUNK): if chunk: # filter out keep-alive new chunks f.write(chunk) self.end_time = now() self.status_code = self.FINISHED_SUCCESS self.md5 = self.get_md5() self.filesize = os.path.getsize(fn) for ds in related_ds: msg = 'Download complete (will validate next): {}'.format(self.url) messages.success(ds.owner, msg) except Exception as e: self.start_time = None self.status_code = self.FINISHED_ERROR self.status = str(e) for ds in related_ds: msg = 'Download failed: {}'.format(self.url) messages.warning(ds.owner, msg) self.save() for ds in related_ds: ds.validate_and_save()
def import_urls(user, fresh_urls, mark_read): group = uuid() size = len(fresh_urls) for url in fresh_urls: async(subscribe_to_imported_url, user, url, mark_read, group=group) start = time.time() while True: # print("Time", time.time() - start, "count", count_group(group)) if (time.time() - start) > IMPORT_WAIT: # print("TIME!") break if count_group(group) == size: # print("COUNT!") break time.sleep(1) import_results = Counter(result_group(group)) pretty_results = ', '.join("{}: {}".format(*x) for x in import_results.items()) num_added = import_results['added'] num_existed = import_results['existed'] num_errors = import_results['error'] if num_added: async_messages.success(user, "Import complete - you subscribed to {sub} feed{s}.".format(sub=num_added, s=pluralize(num_added))) else: async_messages.info(user, "Import complete - no new subscriptions were added.") if num_existed: async_messages.info(user, "You were already subscribed to {sub_exists} imported feed{s}.".format(sub_exists=num_existed, s=pluralize(num_existed))) if num_errors: async_messages.error(user, "There was an error subscribing to {errors} imported feed{s}.".format(errors=num_errors, s=pluralize(num_errors))) logger.info('User %(user)s OPML import complete - %(results)s', {'user': user, 'results': pretty_results}) delete_group(group, tasks=True) return pretty_results
def sync_with_wk(user, full_sync=False): ''' Takes a user. Checks the vocab list from WK for all levels. If anything new has been unlocked on the WK side, it also unlocks it here on Kaniwani and creates a new review for the user. :param full_sync: :param user: The user to check for new unlocks :return: None ''' # We split this into two seperate API calls as we do not necessarily know the current level until # For the love of god don't delete this next line profile_sync_succeeded = sync_user_profile_with_wk(user) if user.profile.api_valid: if not full_sync: new_review_count, new_synonym_count = sync_recent_unlocked_vocab_with_wk( user) else: new_review_count, new_synonym_count = sync_unlocked_vocab_with_wk( user) #Async messaging system. if new_review_count or new_synonym_count: logger.info("Sending message to front-end for user {}".format( user.username)) messages.success( user, "Your Wanikani Profile has been synced. You have {} new reviews, and {} new synonyms" .format(new_review_count, new_synonym_count)) return profile_sync_succeeded, new_review_count, new_synonym_count else: logger.warn( "Not attempting to sync, since API key is invalid, or user has indicated they do not want to be followed " )
def run(self, queue): """ Get the user and its available repositories from github, and save the counts in the job """ super(FetchAvailableRepositoriesJob, self).run(queue) user = self.object # force gh if not set if not self.gh_args.hgetall(): gh = user.get_connection() if gh and 'access_token' in gh._connection_args: self.gh = gh # check availability gh = self.gh if not gh: return # it's delayed ! nb_repos, nb_orgs, nb_watched, nb_starred, nb_teams = user.fetch_all() if self.inform_user.hget() == '1': if nb_repos + nb_teams: message = u'The list of repositories you can subscribe to (ones you own, collaborate to, or in your organizations) was just updated' else: message = u'There is no new repositories you own, collaborate to, or in your organizations' messages.success(user, message) upgraded, downgraded = user.check_subscriptions() return nb_repos, nb_orgs, nb_watched, nb_starred, nb_teams, len(upgraded), len(downgraded)
def test_success(self): messages.success(self.user, "Hello") self.assertMessageOk(constants.SUCCESS)
def run(self, queue): """ Fetch the repository and once done, convert waiting subscriptions into real ones, and save the cout of converted subscriptions in the job. """ super(FirstFetch, self).run(queue) gh = self.gh if not gh: return # it's delayed ! # the identifier of this job is not the repository's id, but its full name repository_name = self.identifier.hget() # mark waiting subscriptions as in fetching status WaitingSubscription.objects.filter(repository_name=repository_name)\ .update(state=WAITING_SUBSCRIPTION_STATES.FETCHING) # get the user who asked to add this repo, and check its rights user = GithubUser.objects.get(username=self.gh_args.hget('username')) rights = user.can_use_repository(repository_name) # TODO: in these two case, we must not retry the job without getting # an other user with fetch rights if rights is None: raise Exception('An error occured while fetching rights for the user') elif rights is False: raise Exception('The user has not rights to fetch this repository') # try to get a GithubUser which is the owner of the repository user_part, repo_name_part = repository_name.split('/') if user_part == user.username: owner = user else: try: owner = GithubUser.objects.get(username=user_part) except GithubUser.DoesNotExist: # no user, we will create it during the fetch owner = GithubUser(username=user_part) # Check if the repository already exists in the DB repository = None if owner.id: try: repository = owner.owned_repositories.get(name=repo_name_part) except Repository.DoesNotExist: pass if not repository: # create a temporary repository to fetch if none exists repository = Repository(name=repo_name_part, owner=owner) # fetch the repository if never fetched if not repository.first_fetch_done: repository.fetch_all(gh=self.gh, force_fetch=True, two_steps=True) # and convert waiting subscriptions to real ones count = 0 for subscription in WaitingSubscription.objects.filter(repository_name=repository_name): try: rights = subscription.user.can_use_repository(repository) except Exception: continue if rights: count += 1 subscription.convert(rights) message = u'Your subscription to <strong>%s</strong> is now ready' % repository.full_name messages.success(subscription.user, message) else: subscription.state = WAITING_SUBSCRIPTION_STATES.FAILED subscription.save(update_fields=('state', )) # save count in the job self.converted_subscriptions.hset(count) # add check-hook/events jobs # TODO: should not be in core but for now... from hooks.tasks import CheckRepositoryHook, CheckRepositoryEvents CheckRepositoryEvents.add_job(repository.id) CheckRepositoryHook.add_job(repository.id, delayed_for=30) # return the number of converted subscriptions return count
fail_silently=fail_silently) def info(request, message, extra_tags='', fail_silently=False, async=False): """Adds a message with the ``INFO`` level.""" if ASYNC and async: messages.info(_get_user(request), message) else: add_message(request, constants.INFO, message, extra_tags=extra_tags, fail_silently=fail_silently) def success(request, message, extra_tags='', fail_silently=False, async=False): """Adds a message with the ``SUCCESS`` level.""" if ASYNC and async: messages.success(_get_user(request), message) else: add_message(request, constants.SUCCESS, message, extra_tags=extra_tags, fail_silently=fail_silently) def warning(request, message, extra_tags='', fail_silently=False, async=False): """Adds a message with the ``WARNING`` level.""" if ASYNC and async: messages.debug(_get_user(request), message) else: add_message(request, constants.WARNING, message, extra_tags=extra_tags, fail_silently=fail_silently) def error(request, message, extra_tags='', fail_silently=False, async=False):
def info(request, message, extra_tags='', fail_silently=False, async=False): """Adds a message with the ``INFO`` level.""" if ASYNC and async: messages.info(_get_user(request), message) else: add_message(request, constants.INFO, message, extra_tags=extra_tags, fail_silently=fail_silently) def success(request, message, extra_tags='', fail_silently=False, async=False): """Adds a message with the ``SUCCESS`` level.""" if ASYNC and async: messages.success(_get_user(request), message) else: add_message(request, constants.SUCCESS, message, extra_tags=extra_tags, fail_silently=fail_silently) def warning(request, message, extra_tags='', fail_silently=False, async=False): """Adds a message with the ``WARNING`` level.""" if ASYNC and async: messages.debug(_get_user(request), message) else: add_message(request, constants.WARNING,