def import_mal(self, mal_username: str, mangaki_username: str): r = redis.StrictRedis(connection_pool=redis_pool) def update_details(count, current_index, current_title): payload = { 'count': count, 'currentWork': { 'index': current_index, 'title': current_title } } r.set('tasks:{task_id}:details'.format(task_id=self.request.id), json.dumps(payload)) user = User.objects.get(username=mangaki_username) if user.background_tasks.filter(tag=MAL_IMPORT_TAG).exists(): logger.debug('[{}] MAL import already in progress. Ignoring.'.format(user)) return bg_task = UserBackgroundTask(owner=user, task_id=self.request.id, tag=MAL_IMPORT_TAG) bg_task.save() logger.info('[{}] MAL import task created: {}.'.format(user, bg_task.task_id)) try: mal.import_mal(mal_username, mangaki_username, update_callback=update_details) except IntegrityError: logger.exception('MAL import failed due to integrity error') finally: bg_task.delete() r.delete('tasks:{}:details'.format(self.request.id)) logger.info('[{}] MAL import task recycled and deleted.'.format(user))
def import_from_mal(request, mal_username): if request.method == 'POST' and client.is_available: nb_added, fails = import_mal(mal_username, request.user.username) payload = {'added': nb_added, 'failures': fails} return HttpResponse(json.dumps(payload), content_type='application/json') elif not client.is_available: raise Http404() else: return HttpResponse()
def import_from_mal(request, mal_username): if request.method == 'POST': nb_added, fails = import_mal(mal_username, request.user.username) return HttpResponse('%d added; %d fails: %s' % (nb_added, len(fails), '\n'.join(fails))) return HttpResponse()
def test_mal_duplication(self, client_mock, rand): from mangaki.utils.mal import import_mal # prepare list of animes steins_gate_entry = MALEntry(self.steins_gate_xml, MALWorks.animes) darling_entry = MALEntry(self.darling_in_the_franxx_xml, MALWorks.animes) steins_gate_movie_entry = MALEntry(self.steins_gate_movie_xml, MALWorks.animes) steins_gate_zero_entry = MALEntry(self.steins_gate_zero_xml, MALWorks.animes) mal_user_works = [ MALUserWork(steins_gate_entry.title, steins_gate_entry.synonyms, 'mal_something', str(steins_gate_entry.mal_id), 10, 2), MALUserWork(darling_entry.title, darling_entry.synonyms, 'zero_two', str(steins_gate_entry.mal_id), 10, 1), MALUserWork(steins_gate_movie_entry.title, steins_gate_movie_entry.synonyms, 'non_canon', str(steins_gate_movie_entry.mal_id), 5, 2), MALUserWork(steins_gate_zero_entry.title, steins_gate_zero_entry.synonyms, 'brain_science_institute', str(steins_gate_zero_entry.mal_id), 10, 1) ] search_results = { steins_gate_entry.title: [ steins_gate_movie_entry, steins_gate_entry, steins_gate_zero_entry ], darling_entry.title: [darling_entry], steins_gate_zero_entry.title: [steins_gate_zero_entry, steins_gate_movie_entry], steins_gate_movie_entry.title: [steins_gate_movie_entry] } # Here, we shuffle lists. using Hypothesis' controlled Random instance. rand.shuffle(search_results[steins_gate_entry.title]) rand.shuffle(search_results[steins_gate_zero_entry.title]) client_mock.list_works_from_a_user.return_value = ( item for item in mal_user_works) client_mock.search_works.side_effect = lambda _, query: search_results.get( query, []) import_mal(self.user.username, self.user.username) n_works = Work.objects.count() expected = len(mal_user_works) # Assumption: all users' works were imported. self.assertEqual(n_works, expected) # Kill the WorkTitle. Remove evidences. WorkTitle.objects.all().delete() for _ in range(3): # Reset mocks. client_mock.list_works_from_a_user.return_value = ( item for item in mal_user_works) client_mock.search_works.side_effect = lambda _, query: search_results.get( query, []) import_mal(self.user.username, self.user.username) # Assumption: no duplicates. self.assertEqual(n_works, Work.objects.count())