def test_open_prereq_revisions(deleted): if deleted: # Sort prereqs from last to first- delete from end of series back. sort = '-revision_sort_code' else: # Add or move from first to last, so that each "after" is in place. sort = 'revision_sort_code' with mock.patch('%s._same_series_revisions' % IREV) as ssrevs_mock: op_revs = mock.MagicMock() ssrevs_mock.return_value.exclude.return_value \ .filter.return_value \ .filter.return_value \ .order_by.return_value = op_revs ssrevs_mock.return_value.exclude.return_value \ .filter.return_value \ .order_by.return_value = op_revs rev = IssueRevision() rev.id = 1234 rev.deleted = deleted assert rev._open_prereq_revisions() == op_revs if deleted: ssrevs_mock.return_value.exclude.assert_has_calls([ mock.call(id__lte=1234), mock.call().filter(committed=None), mock.call().filter().order_by(sort)]) else: ssrevs_mock.return_value.exclude.assert_has_calls([ mock.call(id__gte=1234), mock.call().filter(committed=None), mock.call().filter().filter(issue=None), mock.call().filter().filter().order_by(sort)])
def test_create_add_more(any_added_issue, any_indexer): c1 = Changeset.objects.create(indexer=any_indexer, state=states.OPEN, change_type=CTYPES['issue_add']) any_added_issue = Issue.objects.get(pk=any_added_issue.pk) # With after=None, should insert the issue at the beginning. rev1 = IssueRevision(changeset=c1, series=any_added_issue.series) rev1.save() rev1.commit_to_display() # It's necessary to re-fetch (not just refresh) the various issues # to get the calculated sort codes loaded. original_issue = Issue.objects.get(pk=any_added_issue.pk) issue1 = Issue.objects.get(pk=rev1.issue.pk) assert issue1.sort_code < original_issue.sort_code c2 = Changeset.objects.create(indexer=any_indexer, state=states.OPEN, change_type=CTYPES['issue_add']) rev2 = IssueRevision(changeset=c2, series=any_added_issue.series, after=rev1.issue) rev2.save() rev2.commit_to_display() # Again, re-fetch all the things. original_issue = Issue.objects.get(pk=any_added_issue.pk) issue1 = Issue.objects.get(pk=rev1.issue.pk) issue2 = Issue.objects.get(pk=rev2.issue.pk) assert issue2.sort_code < original_issue.sort_code assert issue1.sort_code < issue2.sort_code
def multiple_issue_revs(): with mock.patch('apps.oi.models.Issue.objects') as obj_mock, \ mock.patch('%s._same_series_revisions' % IREV) as same_mock, \ mock.patch('%s._same_series_open_with_after' % IREV) as after_mock: same_mock.return_value.filter.return_value.exists.return_value = False s = Series(name='Some Series') # Issues already created, so they have sort codes. i1 = Issue(number='1', series=s, sort_code=0) i4 = Issue(number='4', series=s, sort_code=1) i5 = Issue(number='5', series=s, sort_code=2) i1.save = mock.MagicMock() i4.save = mock.MagicMock() i5.save = mock.MagicMock() # Issues being created, no sort codes yet. i2 = Issue(number='2', series=s) i3 = Issue(number='3', series=s) c = Changeset() rev2 = IssueRevision(changeset=c, issue=i2, series=s, revision_sort_code=1) rev3 = IssueRevision(changeset=c, issue=i3, series=s, revision_sort_code=2) yield ((i1, i2, i3, i4, i5), (rev2, rev3), after_mock, obj_mock, same_mock)
def test_classification(): meta = Issue._meta gf = meta.get_field regular_fields = { 'number': gf('number'), 'title': gf('title'), 'no_title': gf('no_title'), 'volume': gf('volume'), 'no_volume': gf('no_volume'), 'volume_not_printed': gf('volume_not_printed'), 'display_volume_with_number': gf('display_volume_with_number'), 'variant_of': gf('variant_of'), 'variant_name': gf('variant_name'), 'isbn': gf('isbn'), 'no_isbn': gf('no_isbn'), 'barcode': gf('barcode'), 'no_barcode': gf('no_barcode'), 'rating': gf('rating'), 'no_rating': gf('no_rating'), 'publication_date': gf('publication_date'), 'key_date': gf('key_date'), 'on_sale_date_uncertain': gf('on_sale_date_uncertain'), 'indicia_frequency': gf('indicia_frequency'), 'no_indicia_frequency': gf('no_indicia_frequency'), 'price': gf('price'), 'page_count': gf('page_count'), 'page_count_uncertain': gf('page_count_uncertain'), 'editing': gf('editing'), 'no_editing': gf('no_editing'), 'notes': gf('notes'), 'keywords': gf('keywords'), 'series': gf('series'), 'indicia_publisher': gf('indicia_publisher'), 'indicia_pub_not_printed': gf('indicia_pub_not_printed'), 'brand': gf('brand'), 'no_brand': gf('no_brand'), 'no_indicia_printer': gf('no_indicia_printer'), 'indicia_printer': gf('indicia_printer'), } irregular_fields = { 'awards': gf('awards'), 'valid_isbn': gf('valid_isbn'), 'on_sale_date': gf('on_sale_date'), 'sort_code': gf('sort_code'), 'is_indexed': gf('is_indexed'), } assert IssueRevision._get_regular_fields() == regular_fields assert IssueRevision._get_irregular_fields() == irregular_fields single_value_fields = regular_fields.copy() del single_value_fields['keywords'] del single_value_fields['indicia_printer'] assert IssueRevision._get_single_value_fields() == single_value_fields assert IssueRevision._get_multi_value_fields() == { 'indicia_printer': gf('indicia_printer'), }
def any_added_issue_rev(any_adding_changeset, issue_add_values): indicia_printer = issue_add_values.pop('indicia_printer') rev = IssueRevision(changeset=any_adding_changeset, **issue_add_values) rev.save() rev.indicia_printer.set([ indicia_printer, ]) return rev
def test_same_series_revisions(): with mock.patch('%s.issuerevisions' % CSET) as irevs_mock: series = Series() ss_revs = mock.MagicMock() irevs_mock.filter.return_value = ss_revs rev = IssueRevision(changeset=Changeset(), series=series) assert rev._same_series_revisions() == ss_revs irevs_mock.filter.assert_called_once_with(series=series)
def test_same_series_open_with_after(): with mock.patch('%s._same_series_revisions' % IREV) as ssrevs_mock: ssowa_revs = mock.MagicMock() ssrevs_mock.return_value.filter.return_value = ssowa_revs rev = IssueRevision() assert rev._same_series_open_with_after() == ssowa_revs ssrevs_mock.return_value.filter.assert_called_once_with( after__isnull=False, committed=None)
def patched_edit(story_revs): with mock.patch(RECENT) as recent_mock, mock.patch(SAVE), \ mock.patch('%s.storyrevisions' % CSET) as story_mock: story_mock.filter.return_value = story_revs ish = Issue(is_indexed=INDEXED['full']) prev = IssueRevision(changeset=Changeset(), issue=ish) rev = IssueRevision(changeset=Changeset(), previous_revision=prev, issue=ish) yield (rev, recent_mock)
def pre_commit_rev(): with mock.patch('%s._same_series_revisions' % IREV), \ mock.patch('%s._same_series_open_with_after' % IREV): s = Series(name='Some Series') i = Issue(number='1', series=s) rev = IssueRevision( changeset=Changeset(), issue=i, series=s, previous_revision=IssueRevision(changeset=Changeset(), issue=i)) yield rev
def test_handle_dependents_add(story_revs): with mock.patch(SAVE), \ mock.patch('%s.storyrevisions' % CSET) as story_mock: story_mock.filter.return_value = story_revs rev = IssueRevision(changeset=Changeset(), issue=Issue(is_indexed=INDEXED['full'])) rev._handle_dependents({}) for story in story_revs: assert story.issue == rev.issue story.save.assert_called_once_with()
def test_fork_variant_for_cover_no_reserve(any_added_issue, any_editing_changeset, any_added_indicia_printer): # Make it a wraparound to test cover sequence page count logic. cover_rev = CoverRevision(changeset=any_editing_changeset, is_wraparound=2) issue_rev, story_rev = IssueRevision.fork_variant( any_added_issue, any_editing_changeset, variant_name='any variant name', variant_cover_revision=cover_rev) for name in IssueRevision._get_regular_fields(): if name == 'variant_of': assert issue_rev.variant_of == any_added_issue elif name == 'variant_name': assert issue_rev.variant_name == 'any variant name' elif name == 'on_sale_date': assert issue_rev.year_on_sale is None assert issue_rev.month_on_sale is None assert issue_rev.day_on_sale is None elif name == 'indicia_printer': indicia_printers = list(issue_rev.indicia_printer.order_by('id')) assert indicia_printers == [any_added_indicia_printer] elif name in EXCLUDED_FORK_FIELDS: assert getattr(issue_rev, name) == EXCLUDED_FORK_FIELDS[name] else: assert getattr(issue_rev, name) == getattr(any_added_issue, name) assert issue_rev.add_after == any_added_issue assert issue_rev.reservation_requested is False assert story_rev.changeset == issue_rev.changeset assert story_rev.issue is None assert story_rev.sequence_number == 0 assert story_rev.page_count == 2 assert story_rev.type.name == 'cover' assert story_rev.script == '' assert story_rev.no_script is True assert story_rev.inks == '?' assert story_rev.no_inks is False assert story_rev.colors == '?' assert story_rev.no_colors is False assert story_rev.letters == '' assert story_rev.no_letters is True assert story_rev.editing == '' assert story_rev.no_editing is True for name in ('pencils', 'inks', 'colors'): assert getattr(story_rev, name) == '?' for name in ('script', 'letters', 'editing'): assert getattr(story_rev, name) == ''
def test_series_changed(deleted, has_prev, changed): s1 = Series(name='One') s2 = Series(name='Two') if changed else s1 with mock.patch('%s.previous_revision' % IREV, new_callable=mock.PropertyMock) as prev_mock: rev = IssueRevision(series=s2) rev.deleted = deleted if has_prev: prev_mock.return_value = IssueRevision(series=s1) else: prev_mock.return_value = None sc = rev.series_changed assert sc is changed
def test_post_save_no_series_changed(patch_for_optional_move): patch_for_optional_move.return_value = False s = Series(name="Test Series") i = Issue(series=s) rev = IssueRevision(changeset=Changeset(), issue=i, series=s, previous_revision=IssueRevision(issue=i, series=s)) rev._post_save_object({}) # If we fail the check, there will be two calls here instead of one. s.set_first_last_issues.assert_called_once_with()
def test_fork_variant_for_cover_no_reserve(any_added_issue, any_editing_changeset): # Make it a wraparound to test cover sequence page count logic. cover_rev = CoverRevision(changeset=any_editing_changeset, is_wraparound=2) issue_rev, story_rev = IssueRevision.fork_variant( any_added_issue, any_editing_changeset, variant_name='any variant name', variant_cover_revision=cover_rev) for name in IssueRevision._get_regular_fields(): if name == 'variant_of': assert issue_rev.variant_of == any_added_issue elif name == 'variant_name': assert issue_rev.variant_name == 'any variant name' elif name == 'on_sale_date': assert issue_rev.year_on_sale is None assert issue_rev.month_on_sale is None assert issue_rev.day_on_sale is None elif name in EXCLUDED_FORK_FIELDS: assert getattr(issue_rev, name) == EXCLUDED_FORK_FIELDS[name] else: assert getattr(issue_rev, name) == getattr(any_added_issue, name) assert issue_rev.add_after == any_added_issue assert issue_rev.reservation_requested is False assert story_rev.changeset == issue_rev.changeset assert story_rev.issue is None assert story_rev.sequence_number == 0 assert story_rev.page_count == 2 assert story_rev.type.name == 'cover' assert story_rev.script == '' assert story_rev.no_script is True assert story_rev.inks == '?' assert story_rev.no_inks is False assert story_rev.colors == '?' assert story_rev.no_colors is False assert story_rev.letters == '' assert story_rev.no_letters is True assert story_rev.editing == '' assert story_rev.no_editing is True for name in ('pencils', 'inks', 'colors'): assert getattr(story_rev, name) == '?' for name in ('script', 'letters', 'editing'): assert getattr(story_rev, name) == ''
def test_handle_dependents_to_singleton(year_began, key_date): with mock.patch('%s.save' % IREV) as save_mock, \ mock.patch('%s.commit_to_display' % IREV) as commit_mock: # Make the IssueRevision that would be returned by the patched # constructor call. Only patch the methods for this. s = Series() c = Changeset() ir_params = { 'changeset': c, 'series': s, 'after': None, 'number': '[nn]', 'publication_date': year_began, } ir = IssueRevision(**ir_params) with mock.patch(IREV) as ir_class_mock: # Now patch the IssueRevision constructor itself. ir_class_mock.return_value = ir sr = SeriesRevision(changeset=c, series=s, is_singleton=True, year_began=year_began) sr._handle_dependents({'to is_singleton': True}) ir_class_mock.assert_called_once_with(**ir_params) assert ir.key_date == key_date save_mock.assert_called_once_with() assert not commit_mock.called
def test_handle_prerequisites_non_move_edit(): with mock.patch('%s.edited' % IREV, new_callable=mock.PropertyMock) as edited_mock, \ mock.patch('%s.series_changed' % IREV, new_callable=mock.PropertyMock) as moved_mock, \ mock.patch('%s._ensure_sort_code_space' % IREV) as sort_mock, \ mock.patch('%s._open_prereq_revisions' % IREV) as open_mock: edited_mock.return_value = True moved_mock.return_value = False rev = IssueRevision(deleted=False) rev._handle_prerequisites({}) # We should have returned back out immediately, no further calls. assert not sort_mock.called assert not open_mock.called
def test_parent_field_tuples(): assert IssueRevision._get_parent_field_tuples() == { ('series',), ('series', 'publisher'), ('brand', 'group'), ('brand',), ('indicia_publisher',), }
def convert(self, changeset): if self.volume is None: volume = '' else: volume = '%s' % self.volume display_series = Series.objects.get(id=self.series_id) if display_series.year_ended and display_series.year_ended < 1970: no_isbn = True else: no_isbn = False if display_series.year_ended and display_series.year_ended < 1974: no_barcode = True else: no_barcode = False issue = Issue.objects.get(id=self.id) if not self.publication_date: self.publication_date = '' if not self.key_date: self.key_date = '' if not self.price: self.price = '' revision = IssueRevision(changeset=changeset, issue=issue, number=self.number, volume=volume, series=display_series, publication_date=self.publication_date, key_date=self.key_date.replace('.', '-'), price=self.price, no_barcode=no_barcode, no_isbn=no_isbn, date_inferred=changeset.date_inferred) revision.save() return revision
def patch_for_move(patch_for_optional_move): patch_for_optional_move.return_value = True old = Series(name="Old Test Series") new = Series(name="New Test Series") i = Issue(series=old) rev = IssueRevision(changeset=Changeset(), issue=i, series=new, previous_revision=IssueRevision(issue=i, series=old)) # Need to track these calls independently, so replace class-level mock # with instance-level mocks. old.save = mock.MagicMock() new.save = mock.MagicMock() old.set_first_last_issues = mock.MagicMock() new.set_first_last_issues = mock.MagicMock() yield rev, i, old, new
def test_create_variant_edit_revision(any_added_variant, variant_add_values, any_editing_changeset): rev = IssueRevision.clone(data_object=any_added_variant, changeset=any_editing_changeset) for k, v in variant_add_values.iteritems(): assert getattr(rev, k) == v assert rev.issue is any_added_variant assert rev.source is rev.issue assert rev.changeset is any_editing_changeset assert rev.date_inferred is False
def test_committed_prereq_revisions(deleted): # We sort commited reversed from open so that we effectively append # to committed as we commit each revision as we walk through open. if deleted: sort = 'revision_sort_code' else: sort = '-revision_sort_code' with mock.patch('%s._same_series_revisions' % IREV) as ssrevs_mock: c_revs = mock.MagicMock() ssrevs_mock.return_value.exclude.return_value \ .filter.return_value \ .order_by.return_value = c_revs rev = IssueRevision() rev.id = 1234 rev.deleted = deleted assert rev._committed_prereq_revisions() == c_revs ssrevs_mock.return_value.exclude.assert_has_calls([ mock.call(id=1234), mock.call().filter(committed=True), mock.call().filter().order_by(sort)])
def test_conditional_field_mapping(): assert IssueRevision._get_conditional_field_tuple_mapping() == { 'volume': ('series', 'has_volume'), 'no_volume': ('series', 'has_volume'), 'display_volume_with_issue': ('series', 'has_volume'), 'title': ('series', 'has_issue_title'), 'no_title': ('series', 'has_issue_title'), 'barcode': ('series', 'has_barcode'), 'no_barcode': ('series', 'has_barcode'), 'isbn': ('series', 'has_isbn'), 'no_isbn': ('series', 'has_isbn'), 'valid_isbn': ('series', 'has_isbn'), 'indicia_frequency': ('series', 'has_indicia_frequency'), 'no_indicia_frequency': ('series', 'has_indicia_frequency'), }
def test_fork_variant_reserve_no_cover_with_variants(any_added_issue, any_added_variant, any_editing_changeset): issue_rev, story_rev = IssueRevision.fork_variant( any_added_issue, any_editing_changeset, variant_name='any variant name', reservation_requested=True) # Don't bother testing most fields as it is the same as above. # Just test the reservation & no cover revision implications. assert issue_rev.reservation_requested is True assert story_rev is None # Because we built a variant in the fixtures, the new variant # should be sorted just after the existing variant. assert issue_rev.add_after == any_added_variant
def test_create_edit_revision(any_added_issue, issue_add_values, any_editing_changeset, keywords): rev = IssueRevision.clone(data_object=any_added_issue, changeset=any_editing_changeset) for k, v in issue_add_values.items(): if k == 'keywords': # rev.###.keywords.names() gives wrong result for 'Bar', 'bar' kws = [k.name for k in rev.issue.keywords.all()] kws.sort() assert kws == keywords['list'] else: assert getattr(rev, k) == v assert rev.issue is any_added_issue assert rev.source is rev.issue assert rev.changeset is any_editing_changeset assert rev.date_inferred is False
def test_delete_issue(any_added_issue, any_deleting_changeset, any_added_issue_rev): rev = IssueRevision.clone(data_object=any_added_issue, changeset=any_deleting_changeset) rev.deleted = True # TODO: Pre-refactor code relies on the revision having been saved # at least once before committing. Take this out after refactor? rev.save() # refresh_from_db() doesn't refresh linked objects, so this is easier. rev = IssueRevision.objects.get(pk=rev.pk) old_issue = Issue.objects.get(pk=rev.issue.pk) old_series_issue_count = rev.series.issue_count old_ind_pub_issue_count = rev.indicia_publisher.issue_count old_brand_issue_count = rev.brand.issue_count old_brand_group_counts = { group.pk: group.issue_count for group in rev.brand.group.all() } old_publisher_issue_count = rev.series.publisher.issue_count rev.commit_to_display() # Ensure new read of saved row. # refresh_from_db() doesn't refresh linked objects, so this is easier. rev = IssueRevision.objects.get(pk=rev.pk) assert rev.deleted is True assert rev.issue == old_issue assert rev.issue.deleted is True assert rev.previous_revision.issue == old_issue s = rev.issue.series assert s.issue_count == old_series_issue_count - 1 assert s.publisher.issue_count == old_publisher_issue_count - 1 assert rev.issue.brand.issue_count == old_brand_issue_count - 1 assert { group.pk: group.issue_count for group in rev.issue.brand.group.all() } == {k: v - 1 for k, v in old_brand_group_counts.iteritems()} assert rev.issue.indicia_publisher.issue_count == \ old_ind_pub_issue_count - 1
def test_delete_issue(any_added_issue, any_deleting_changeset, any_added_issue_rev): rev = IssueRevision.clone(data_object=any_added_issue, changeset=any_deleting_changeset) rev.deleted = True # TODO: Pre-refactor code relies on the revision having been saved # at least once before committing. Take this out after refactor? rev.save() # refresh_from_db() doesn't refresh linked objects, so this is easier. rev = IssueRevision.objects.get(pk=rev.pk) old_issue = Issue.objects.get(pk=rev.issue.pk) old_series_issue_count = rev.series.issue_count old_ind_pub_issue_count = rev.indicia_publisher.issue_count old_brand_issue_count = rev.brand.issue_count old_brand_group_counts = {group.pk: group.issue_count for group in rev.brand.group.all()} old_publisher_issue_count = rev.series.publisher.issue_count rev.commit_to_display() # Ensure new read of saved row. # refresh_from_db() doesn't refresh linked objects, so this is easier. rev = IssueRevision.objects.get(pk=rev.pk) assert rev.deleted is True assert rev.issue == old_issue assert rev.issue.deleted is True assert rev.previous_revision.issue == old_issue s = rev.issue.series assert s.issue_count == old_series_issue_count - 1 assert s.publisher.issue_count == old_publisher_issue_count - 1 assert rev.issue.brand.issue_count == old_brand_issue_count - 1 assert { group.pk: group.issue_count for group in rev.issue.brand.group.all() } == {k: v - 1 for k, v in old_brand_group_counts.iteritems()} assert rev.issue.indicia_publisher.issue_count == \ old_ind_pub_issue_count - 1
def test_noncomics_counts(any_added_series_rev, issue_add_values, any_adding_changeset, any_variant_adding_changeset, any_deleting_changeset): # This is written out in long form because while it could be broken # up into fixtures and separate cases, it is only this one specific # sequence of operations that needs any of this code right now. s_rev = any_added_series_rev s_rev.is_comics_publication = False s_rev.save() with mock.patch(UPDATE_ALL) as updater: s_rev.commit_to_display() updater.has_calls([ mock.call({}, language=None, country=None, negate=True), mock.call({'series': 1}, language=s_rev.series.language, country=s_rev.series.country), ]) assert updater.call_count == 2 series = Series.objects.get(pk=s_rev.series.pk) issue_add_values['series'] = series i_rev = IssueRevision(changeset=any_adding_changeset, **issue_add_values) i_rev.save() i_rev = IssueRevision.objects.get(pk=i_rev.pk) old_series_issue_count = i_rev.series.issue_count old_ind_pub_issue_count = i_rev.indicia_publisher.issue_count old_brand_issue_count = i_rev.brand.issue_count old_brand_group_counts = {group.pk: group.issue_count for group in i_rev.brand.group.all()} old_publisher_issue_count = i_rev.series.publisher.issue_count with mock.patch(UPDATE_ALL) as updater: i_rev.commit_to_display() # Changeset must be approved so we can re-edit this later. i_rev.changeset.state = states.APPROVED i_rev.changeset.save() updater.has_calls([ mock.call({}, language=None, country=None, negate=True), mock.call({'stories': 0, 'covers': 0}, language=i_rev.series.language, country=i_rev.series.country), ]) assert updater.call_count == 2 i_rev = IssueRevision.objects.get(pk=i_rev.pk) s = i_rev.issue.series # Non-comics issues do not affect the issue counts EXCEPT on the series. assert s.issue_count == old_series_issue_count + 1 assert s.publisher.issue_count == old_publisher_issue_count assert i_rev.issue.brand.issue_count == old_brand_issue_count assert { group.pk: group.issue_count for group in i_rev.issue.brand.group.all() } == old_brand_group_counts assert i_rev.issue.indicia_publisher.issue_count == old_ind_pub_issue_count # Now do it all again with a variant added to the new issue. v_rev = IssueRevision(changeset=any_variant_adding_changeset, number='100', variant_of=i_rev.issue, variant_name='alternate cover', series=i_rev.series, brand=i_rev.brand, indicia_publisher=i_rev.indicia_publisher) v_rev.save() v_rev = IssueRevision.objects.get(pk=v_rev.pk) old_series_issue_count = v_rev.series.issue_count old_ind_pub_issue_count = v_rev.indicia_publisher.issue_count old_brand_issue_count = v_rev.brand.issue_count old_brand_group_counts = {group.pk: group.issue_count for group in v_rev.brand.group.all()} old_publisher_issue_count = v_rev.series.publisher.issue_count with mock.patch(UPDATE_ALL) as updater: v_rev.commit_to_display() # Changeset must be approved so we can re-edit this later. v_rev.changeset.state = states.APPROVED v_rev.changeset.save() updater.has_calls([ mock.call({'stories': 0, 'covers': 0}, language=None, country=None, negate=True), mock.call({'stories': 0, 'covers': 0}, language=i_rev.series.language, country=i_rev.series.country), ]) assert updater.call_count == 2 v_rev = IssueRevision.objects.get(pk=v_rev.pk) s = v_rev.issue.series # Non-comics variants do not affect the issue counts on anything. assert s.issue_count == old_series_issue_count assert s.publisher.issue_count == old_publisher_issue_count assert v_rev.issue.brand.issue_count == old_brand_issue_count assert { group.pk: group.issue_count for group in v_rev.issue.brand.group.all() } == old_brand_group_counts assert v_rev.issue.indicia_publisher.issue_count == old_ind_pub_issue_count # Now delete the variant, should still have the same counts. del_v_rev = IssueRevision.clone( changeset=any_deleting_changeset, data_object=Issue.objects.get(pk=v_rev.issue.pk)) del_v_rev.deleted = True del_v_rev.save() del_v_rev = IssueRevision.objects.get(pk=del_v_rev.pk) with mock.patch(UPDATE_ALL) as updater: del_v_rev.commit_to_display() del_v_rev = IssueRevision.objects.get(pk=del_v_rev.pk) updater.has_calls([ mock.call({'stories': 0, 'covers': 0}, language=None, country=None, negate=True), mock.call({'stories': 0, 'covers': 0}, language=i_rev.series.language, country=i_rev.series.country), ]) assert updater.call_count == 2 s = Series.objects.get(pk=del_v_rev.series.pk) i = Issue.objects.get(pk=del_v_rev.issue.pk) assert s.issue_count == old_series_issue_count assert s.publisher.issue_count == old_publisher_issue_count assert i.brand.issue_count == old_brand_issue_count assert {group.pk: group.issue_count for group in i.brand.group.all()} == old_brand_group_counts assert i.indicia_publisher.issue_count == old_ind_pub_issue_count # Finally, delete the base issue, check for only series.issue_count deleting_variant_changeset = Changeset( state=states.OPEN, change_type=0, indexer=any_deleting_changeset.indexer) deleting_variant_changeset.save() del_i_rev = IssueRevision.clone( changeset=deleting_variant_changeset, data_object=Issue.objects.get(pk=i_rev.issue.pk)) del_i_rev.deleted = True del_i_rev.save() del_i_rev = IssueRevision.objects.get(pk=del_i_rev.pk) with mock.patch(UPDATE_ALL) as updater: del_i_rev.commit_to_display() del_i_rev = IssueRevision.objects.get(pk=del_v_rev.pk) updater.has_calls([ mock.call({'stories': 0, 'covers': 0}, language=None, country=None, negate=True), mock.call({'stories': 0, 'covers': 0}, language=i_rev.series.language, country=i_rev.series.country), ]) assert updater.call_count == 2 s = Series.objects.get(pk=del_i_rev.series.pk) i = Issue.objects.get(pk=del_i_rev.issue.pk) # Series issue counts are adjusted even for non comics. assert s.issue_count == old_series_issue_count - 1 assert s.publisher.issue_count == old_publisher_issue_count assert i.brand.issue_count == old_brand_issue_count assert {group.pk: group.issue_count for group in i.brand.group.all()} == old_brand_group_counts assert i.indicia_publisher.issue_count == old_ind_pub_issue_count
def any_added_issue_rev(any_adding_changeset, issue_add_values): rev = IssueRevision(changeset=any_adding_changeset, **issue_add_values) rev.save() return rev
def any_added_variant_rev(any_variant_adding_changeset, variant_add_values): rev = IssueRevision(changeset=any_variant_adding_changeset, **variant_add_values) rev.save() return IssueRevision.objects.get(pk=rev.pk)
def handle_uploaded_cover(request, cover, issue, variant=False, revision_lock=None): ''' process the uploaded file and generate CoverRevision ''' try: if variant: form = UploadVariantScanForm(request.POST, request.FILES) else: form = UploadScanForm(request.POST, request.FILES) except IOError: # sometimes uploads misbehave. connection dropped ? error_text = 'Something went wrong with the upload. ' + \ 'Please <a href="' + request.path + '">try again</a>.' return render_error(request, error_text, redirect=False, is_safe=True) if not form.is_valid(): return _display_cover_upload_form(request, form, cover, issue, variant=variant) # process form if form.cleaned_data['is_gatefold']: return handle_gatefold_cover(request, cover, issue, form) scan = form.cleaned_data['scan'] file_source = form.cleaned_data['source'] marked = form.cleaned_data['marked'] # create OI records changeset = Changeset(indexer=request.user, state=states.OPEN, change_type=CTYPES['cover']) changeset.save() if cover: # upload_type is 'replacement': revision = CoverRevision(changeset=changeset, issue=issue, cover=cover, file_source=file_source, marked=marked, is_replacement = True) revision_lock.changeset = changeset revision_lock.save() revision.previous_revision = cover.revisions.get( next_revision=None, changeset__state=states.APPROVED, committed=True) else: revision = CoverRevision(changeset=changeset, issue=issue, file_source=file_source, marked=marked) revision.save() # if uploading a variant, generate an issue_revision for # the variant issue and copy the values which would not change # TODO are these reasonable assumptions below ? if variant: current_variants = issue.variant_set.all().order_by('-sort_code') if current_variants: add_after = current_variants[0] else: add_after = issue issue_revision = IssueRevision(changeset=changeset, after=add_after, number=issue.number, title=issue.title, no_title=issue.no_title, volume=issue.volume, no_volume=issue.no_volume, display_volume_with_number=issue.display_volume_with_number, variant_of=issue, variant_name=form.cleaned_data['variant_name'], page_count=issue.page_count, page_count_uncertain=issue.page_count_uncertain, series=issue.series, editing=issue.editing, no_editing=issue.no_editing, reservation_requested=form.cleaned_data['reservation_requested'] ) issue_revision.save() if form.cleaned_data['variant_artwork']: story_revision = StoryRevision(changeset=changeset, type=StoryType.objects.get(name='cover'), no_script=True, pencils='?', inks='?', colors='?', no_letters=True, no_editing=True, sequence_number=0, page_count=2 if form.cleaned_data['is_wraparound'] else 1, ) story_revision.save() # put new uploaded covers into # media/<LOCAL_NEW_SCANS>/<monthname>_<year>/ # with name # <revision_id>_<date>_<time>.<ext> scan_name = str(revision.id) + os.path.splitext(scan.name)[1] upload_dir = settings.MEDIA_ROOT + LOCAL_NEW_SCANS + \ changeset.created.strftime('%B_%Y').lower() destination_name = os.path.join(upload_dir, scan_name) try: # essentially only needed at beginning of the month check_cover_dir(upload_dir) except IOError: changeset.delete() error_text = "Problem with file storage for uploaded " + \ "cover, please report an error." return render_error(request, error_text, redirect=False) # write uploaded file destination = open(destination_name, 'wb') for chunk in scan.chunks(): destination.write(chunk) destination.close() try: # generate different sizes we are using im = pyImage.open(destination.name) large_enough = False if form.cleaned_data['is_wraparound']: # wraparounds need to have twice the width if im.size[0] >= 800 and im.size[1] >= 400: large_enough = True elif min(im.size) >= 400: large_enough = True if large_enough: if form.cleaned_data['is_wraparound']: revision.is_wraparound = True revision.front_left = im.size[0]/2 revision.front_right = im.size[0] revision.front_bottom = im.size[1] revision.front_top = 0 revision.save() generate_sizes(revision, im) else: changeset.delete() os.remove(destination.name) info_text = "Image is too small, only " + str(im.size) + \ " in size." return _display_cover_upload_form(request, form, cover, issue, info_text=info_text, variant=variant) except IOError as e: # just in case, django *should* have taken care of file type changeset.delete() os.remove(destination.name) info_text = 'Error: File \"' + scan.name + \ '" is not a valid picture.' return _display_cover_upload_form(request, form, cover, issue, info_text=info_text, variant=variant) # all done, we can save the state return finish_cover_revision(request, revision, form.cleaned_data)
def handle_uploaded_cover(request, cover, issue, variant=False, revision_lock=None): ''' process the uploaded file and generate CoverRevision ''' try: if variant: form = UploadVariantScanForm(request.POST, request.FILES) else: form = UploadScanForm(request.POST, request.FILES) except IOError: # sometimes uploads misbehave. connection dropped ? error_text = 'Something went wrong with the upload. ' + \ 'Please <a href="' + request.path + '">try again</a>.' return render_error(request, error_text, redirect=False, is_safe=True) if not form.is_valid(): return _display_cover_upload_form(request, form, cover, issue, variant=variant) # process form if form.cleaned_data['is_gatefold']: return handle_gatefold_cover(request, cover, issue, form) scan = form.cleaned_data['scan'] file_source = form.cleaned_data['source'] marked = form.cleaned_data['marked'] # create OI records changeset = Changeset(indexer=request.user, state=states.OPEN, change_type=CTYPES['cover']) changeset.save() if cover: # upload_type is 'replacement': revision = CoverRevision(changeset=changeset, issue=issue, cover=cover, file_source=file_source, marked=marked, is_replacement=True) revision_lock.changeset = changeset revision_lock.save() revision.previous_revision = cover.revisions.get( next_revision=None, changeset__state=states.APPROVED, committed=True) else: revision = CoverRevision(changeset=changeset, issue=issue, file_source=file_source, marked=marked) revision.save() # if uploading a variant, generate an issue_revision for # the variant issue and copy the values which would not change # TODO are these reasonable assumptions below ? if variant: current_variants = issue.variant_set.all().order_by('-sort_code') if current_variants: add_after = current_variants[0] else: add_after = issue issue_revision = IssueRevision( changeset=changeset, after=add_after, number=issue.number, title=issue.title, no_title=issue.no_title, volume=issue.volume, no_volume=issue.no_volume, display_volume_with_number=issue.display_volume_with_number, variant_of=issue, variant_name=form.cleaned_data['variant_name'], page_count=issue.page_count, page_count_uncertain=issue.page_count_uncertain, series=issue.series, editing=issue.editing, no_editing=issue.no_editing, reservation_requested=form.cleaned_data['reservation_requested']) issue_revision.save() if form.cleaned_data['variant_artwork']: story_revision = StoryRevision( changeset=changeset, type=StoryType.objects.get(name='cover'), no_script=True, pencils='?', inks='?', colors='?', no_letters=True, no_editing=True, sequence_number=0, page_count=2 if form.cleaned_data['is_wraparound'] else 1, ) story_revision.save() # put new uploaded covers into # media/<LOCAL_NEW_SCANS>/<monthname>_<year>/ # with name # <revision_id>_<date>_<time>.<ext> scan_name = str(revision.id) + os.path.splitext(scan.name)[1] upload_dir = settings.MEDIA_ROOT + LOCAL_NEW_SCANS + \ changeset.created.strftime('%B_%Y').lower() destination_name = os.path.join(upload_dir, scan_name) try: # essentially only needed at beginning of the month check_cover_dir(upload_dir) except IOError: changeset.delete() error_text = "Problem with file storage for uploaded " + \ "cover, please report an error." return render_error(request, error_text, redirect=False) # write uploaded file destination = open(destination_name, 'wb') for chunk in scan.chunks(): destination.write(chunk) destination.close() try: # generate different sizes we are using im = pyImage.open(destination.name) large_enough = False if form.cleaned_data['is_wraparound']: # wraparounds need to have twice the width if im.size[0] >= 800 and im.size[1] >= 400: large_enough = True elif min(im.size) >= 400: large_enough = True if large_enough: if form.cleaned_data['is_wraparound']: revision.is_wraparound = True revision.front_left = im.size[0] / 2 revision.front_right = im.size[0] revision.front_bottom = im.size[1] revision.front_top = 0 revision.save() generate_sizes(revision, im) else: changeset.delete() os.remove(destination.name) info_text = "Image is too small, only " + str(im.size) + \ " in size." return _display_cover_upload_form(request, form, cover, issue, info_text=info_text, variant=variant) except IOError as e: # just in case, django *should* have taken care of file type changeset.delete() os.remove(destination.name) info_text = 'Error: File \"' + scan.name + \ '" is not a valid picture.' return _display_cover_upload_form(request, form, cover, issue, info_text=info_text, variant=variant) # all done, we can save the state return finish_cover_revision(request, revision, form.cleaned_data)
def test_stats_category_field_tuples(): assert IssueRevision._get_stats_category_field_tuples() == { ('series', 'country'), ('series', 'language'), }
def test_excluded_fields(): assert IssueRevision._get_excluded_field_names() == \ Revision._get_excluded_field_names()
def test_noncomics_counts(any_added_series_rev, issue_add_values, any_adding_changeset, any_variant_adding_changeset, any_deleting_changeset): # This is written out in long form because while it could be broken # up into fixtures and separate cases, it is only this one specific # sequence of operations that needs any of this code right now. s_rev = any_added_series_rev s_rev.is_comics_publication = False s_rev.save() with mock.patch(UPDATE_ALL) as updater: s_rev.commit_to_display() updater.has_calls([ mock.call({}, language=None, country=None, negate=True), mock.call({'series': 1}, language=s_rev.series.language, country=s_rev.series.country), ]) assert updater.call_count == 2 series = Series.objects.get(pk=s_rev.series.pk) issue_add_values['series'] = series i_rev = IssueRevision(changeset=any_adding_changeset, **issue_add_values) i_rev.save() i_rev = IssueRevision.objects.get(pk=i_rev.pk) old_series_issue_count = i_rev.series.issue_count old_ind_pub_issue_count = i_rev.indicia_publisher.issue_count old_brand_issue_count = i_rev.brand.issue_count old_brand_group_counts = { group.pk: group.issue_count for group in i_rev.brand.group.all() } old_publisher_issue_count = i_rev.series.publisher.issue_count with mock.patch(UPDATE_ALL) as updater: i_rev.commit_to_display() # Changeset must be approved so we can re-edit this later. i_rev.changeset.state = states.APPROVED i_rev.changeset.save() updater.has_calls([ mock.call({}, language=None, country=None, negate=True), mock.call({ 'stories': 0, 'covers': 0 }, language=i_rev.series.language, country=i_rev.series.country), ]) assert updater.call_count == 2 i_rev = IssueRevision.objects.get(pk=i_rev.pk) s = i_rev.issue.series # Non-comics issues do not affect the issue counts EXCEPT on the series. assert s.issue_count == old_series_issue_count + 1 assert s.publisher.issue_count == old_publisher_issue_count assert i_rev.issue.brand.issue_count == old_brand_issue_count assert { group.pk: group.issue_count for group in i_rev.issue.brand.group.all() } == old_brand_group_counts assert i_rev.issue.indicia_publisher.issue_count == old_ind_pub_issue_count # Now do it all again with a variant added to the new issue. v_rev = IssueRevision(changeset=any_variant_adding_changeset, number='100', variant_of=i_rev.issue, variant_name='alternate cover', series=i_rev.series, brand=i_rev.brand, indicia_publisher=i_rev.indicia_publisher) v_rev.save() v_rev = IssueRevision.objects.get(pk=v_rev.pk) old_series_issue_count = v_rev.series.issue_count old_ind_pub_issue_count = v_rev.indicia_publisher.issue_count old_brand_issue_count = v_rev.brand.issue_count old_brand_group_counts = { group.pk: group.issue_count for group in v_rev.brand.group.all() } old_publisher_issue_count = v_rev.series.publisher.issue_count with mock.patch(UPDATE_ALL) as updater: v_rev.commit_to_display() # Changeset must be approved so we can re-edit this later. v_rev.changeset.state = states.APPROVED v_rev.changeset.save() updater.has_calls([ mock.call({ 'stories': 0, 'covers': 0 }, language=None, country=None, negate=True), mock.call({ 'stories': 0, 'covers': 0 }, language=i_rev.series.language, country=i_rev.series.country), ]) assert updater.call_count == 2 v_rev = IssueRevision.objects.get(pk=v_rev.pk) s = v_rev.issue.series # Non-comics variants do not affect the issue counts on anything. assert s.issue_count == old_series_issue_count assert s.publisher.issue_count == old_publisher_issue_count assert v_rev.issue.brand.issue_count == old_brand_issue_count assert { group.pk: group.issue_count for group in v_rev.issue.brand.group.all() } == old_brand_group_counts assert v_rev.issue.indicia_publisher.issue_count == old_ind_pub_issue_count # Now delete the variant, should still have the same counts. del_v_rev = IssueRevision.clone( changeset=any_deleting_changeset, data_object=Issue.objects.get(pk=v_rev.issue.pk)) del_v_rev.deleted = True del_v_rev.save() del_v_rev = IssueRevision.objects.get(pk=del_v_rev.pk) with mock.patch(UPDATE_ALL) as updater: del_v_rev.commit_to_display() del_v_rev = IssueRevision.objects.get(pk=del_v_rev.pk) updater.has_calls([ mock.call({ 'stories': 0, 'covers': 0 }, language=None, country=None, negate=True), mock.call({ 'stories': 0, 'covers': 0 }, language=i_rev.series.language, country=i_rev.series.country), ]) assert updater.call_count == 2 s = Series.objects.get(pk=del_v_rev.series.pk) i = Issue.objects.get(pk=del_v_rev.issue.pk) assert s.issue_count == old_series_issue_count assert s.publisher.issue_count == old_publisher_issue_count assert i.brand.issue_count == old_brand_issue_count assert {group.pk: group.issue_count for group in i.brand.group.all()} == old_brand_group_counts assert i.indicia_publisher.issue_count == old_ind_pub_issue_count # Finally, delete the base issue, check for only series.issue_count deleting_variant_changeset = Changeset( state=states.OPEN, change_type=0, indexer=any_deleting_changeset.indexer) deleting_variant_changeset.save() del_i_rev = IssueRevision.clone( changeset=deleting_variant_changeset, data_object=Issue.objects.get(pk=i_rev.issue.pk)) del_i_rev.deleted = True del_i_rev.save() del_i_rev = IssueRevision.objects.get(pk=del_i_rev.pk) with mock.patch(UPDATE_ALL) as updater: del_i_rev.commit_to_display() del_i_rev = IssueRevision.objects.get(pk=del_v_rev.pk) updater.has_calls([ mock.call({ 'stories': 0, 'covers': 0 }, language=None, country=None, negate=True), mock.call({ 'stories': 0, 'covers': 0 }, language=i_rev.series.language, country=i_rev.series.country), ]) assert updater.call_count == 2 s = Series.objects.get(pk=del_i_rev.series.pk) i = Issue.objects.get(pk=del_i_rev.issue.pk) # Series issue counts are adjusted even for non comics. assert s.issue_count == old_series_issue_count - 1 assert s.publisher.issue_count == old_publisher_issue_count assert i.brand.issue_count == old_brand_issue_count assert {group.pk: group.issue_count for group in i.brand.group.all()} == old_brand_group_counts assert i.indicia_publisher.issue_count == old_ind_pub_issue_count
def test_pre_initial_save_with_date(): rev = IssueRevision(issue=Issue(on_sale_date='2016-01-31')) rev._pre_initial_save() assert rev.year_on_sale == 2016 assert rev.month_on_sale == 1 assert rev.day_on_sale == 31
def test_pre_initial_save_no_date(): rev = IssueRevision(issue=Issue()) rev._pre_initial_save() assert rev.year_on_sale is None assert rev.month_on_sale is None assert rev.day_on_sale is None