def fx_external_ids(fx_session): f = FixtureModule('fx_external_ids') f.jane_eyre = Work() f.jane_eyre.names.update({ Name(nameable=f.jane_eyre, name='jane eyre', locale=Locale.parse('en_US')) }) f.jane_eyre_wikipedia = WikipediaEntity( name='http://dbpedia.org/resource/Jane_Eyre', revision=606655259, label='Jane Eyre', country='England', last_crawled=now(), type='book', ) f.jane_eyre_ex = ExternalId( work_id=f.jane_eyre.id, work=f.jane_eyre, wikipedia_id=f.jane_eyre_wikipedia.name, wikipedia=WikipediaEntity, ) return f
def fx_tropes(fx_session): """create *Attack on Titan* (Anime) with *The Ace*, *Action Girl*, *Behemoth Battle* tropes. create *Dragon Ball Z* (Anime) with *The Ace*, *Ass Kicking Pose*, *Cute Is Evil* tropes. """ f = FixtureModule('fx_tropes') f.attack_on_titan = Work(media_type='Anime') f.attack_on_titan.names.update({ Name(nameable=f.attack_on_titan, name='Attack on Titan', locale=Locale.parse('en_US')) }) f.the_ace = Trope(name='The Ace') f.action_girl = Trope(name='Action Girl') f.behemoth_battle = Trope(name='Behemoth Battle') f.attack_on_titan.tropes.update( {f.the_ace, f.action_girl, f.behemoth_battle}) f.dragon_ball_z = Work(media_type='Anime') f.dragon_ball_z.names.update({ Name(nameable=f.dragon_ball_z, name='Dragon Ball Z', locale=Locale.parse('en_US')) }) f.ass_kicking_pose = Trope(name='Ass Kicking Pose') f.cute_is_evil = Trope(name='Cute Is Evil') f.dragon_ball_z.tropes.update( {f.the_ace, f.ass_kicking_pose, f.cute_is_evil}) with fx_session.begin(): fx_session.add_all([ f.attack_on_titan, f.the_ace, f.action_girl, f.behemoth_battle, f.ass_kicking_pose, f.dragon_ball_z ]) return f
def test_work_list(fx_session, fx_flask_client): # case 1: non-exists document rv = fx_flask_client.get('/work/') assert_contain_text('No contents here now.', 'tbody>tr>td', rv.data) # case 2: add document work = Work(media_type='Literature') work.names.update({ Name(nameable=work, name='Story of Your Life', locale=Locale.parse('en_US')) }) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get('/work/') assert_contain_text('Story of Your Life', 'tbody>tr>td', rv.data)
def fx_edges(fx_session): """create correspondences""" f = FixtureModule('fx_corres') current_time = datetime.datetime.now(datetime.timezone.utc) f.tvtropes_entity = Entity( namespace='Film', name='Iron Man', url='http://tvtropes.org/pmwiki/pmwiki.php/Film/IronMan', type='Work', last_crawled=current_time) f.wikipedia_film = Film(name='Iron Man', label='Iron Man', last_crawled=current_time) f.cliche_work = Work(media_type='Film') f.cliche_work.names.update({ Name(nameable=f.cliche_work, name='Iron Man', locale=Locale.parse('en_US')) }) f.tvtropes_edge = ClicheTvtropesEdge(confidence=0.8) f.tvtropes_edge.cliche_work = f.cliche_work f.tvtropes_edge.tvtropes_entity = f.tvtropes_entity f.wikipedia_edge = ClicheWikipediaEdge(confidence=0.9) f.wikipedia_edge.cliche_work = f.cliche_work f.wikipedia_edge.wikipedia_work = f.wikipedia_film with fx_session.begin(): fx_session.add_all([ f.tvtropes_entity, f.wikipedia_film, f.cliche_work, f.tvtropes_edge, f.wikipedia_edge ]) return f
def test_align_from_cliche_wikipedia_edges(fx_session): current_time = datetime.datetime.now(datetime.timezone.utc) wikipedia_works = [] for i in range(1, 4): wikipedia_works.append( WikiWork(name='Entity_{}'.format(i), label='Entity_{}'.format(i), last_crawled=current_time)) cliche_works = [] for i in range(1, 4): w = Work(media_type='Work') w.names.update({ Name(nameable=w, name='Work_{}'.format(i), locale=Locale.parse('en_US')) }) cliche_works.append(w) def make_edge(wikipedia_index, cliche_index, confidence): w = wikipedia_works[wikipedia_index] c = cliche_works[cliche_index] e = ClicheWikipediaEdge(confidence=confidence) e.wikipedia_work = w e.cliche_work = c return e edges = [] edge_examples(edges, make_edge) with fx_session.begin(): fx_session.add_all(wikipedia_works + cliche_works + edges) confidences = { m.confidence for m in matching_from_cliche_wikipedia_edges() } assert confidences == example_matching_confidences()
def test_align_from_cliche_tvtrope_edges(fx_session): current_time = datetime.datetime.now(datetime.timezone.utc) tvtropes_entities = [] for i in range(1, 4): tvtropes_entities.append( TvEntity(namespace='Main', name='Entity_{}'.format(i), url='http://tvtropes.org/Main/Entity_{}'.format(i), type='Work', last_crawled=current_time)) cliche_works = [] for i in range(1, 4): w = Work(media_type='Work') w.names.update({ Name(nameable=w, name='Work_{}'.format(i), locale=Locale.parse('en_US')) }) cliche_works.append(w) def make_edge(tvtropes_index, cliche_index, confidence): t = tvtropes_entities[tvtropes_index] c = cliche_works[cliche_index] e = ClicheTvtropesEdge(confidence=confidence) e.tvtropes_entity = t e.cliche_work = c return e edges = [] edge_examples(edges, make_edge) with fx_session.begin(): fx_session.add_all(tvtropes_entities + cliche_works + edges) confidences = {m.confidence for m in matching_from_cliche_tvtropes_edges()} assert confidences == example_matching_confidences()
def fx_works(fx_session, fx_teams, fx_genres, fx_franchises): """Create *Cardcaptor Sakura* (comic book), which made by CLAMP members, which belongs to comic and romance genres. Create *The Lord of the Rings: The Fellowship of the Ring* (flim), which directed by Peter Jackson, which belongs to *The Lord of the Rings* franchise. Create *The Avengers* (flim), which belongs to *Iron Man*, *Captain America*, *Hulk*, and *Thor* franchise. Create *Iron Man* (flim), which belongs to *Iron Man* franchise. Create *Journey to the West* (novel) which is a Chinese novel published in the 16th century. Create *Saiyuki* (comic book) which is a Japanese comic book series and loosely based on the *Journey to the West*. Create *Saiyuki* (comic book) which is a Japanese comic book series and loosely based on the *Journey to the West*. Create *날아라 슈퍼보드* (anime) which is a korean animation series and loosely based on the *Journey to the West*. """ f = FixtureModule('fx_works') f += fx_teams f += fx_genres f += fx_franchises # create 'Cardcaptor Sakura' combic book series f.cardcaptor_sakura = Work(media_type='Comic Book', published_at=datetime.date(1996, 11, 22)) f.cardcaptor_sakura.genres.update({f.comic, f.romance}) f.cardcaptor_sakura.names.update({ Name(nameable=f.cardcaptor_sakura, name='Cardcaptor Sakura', locale=Locale.parse('en_US')) }) fx_session.add(f.cardcaptor_sakura) f.skura_member_asso_1 = Credit(work=f.cardcaptor_sakura, person=f.clamp_member_1, team=f.clamp, role=Role.artist) fx_session.add(f.skura_member_asso_1) f.skura_member_asso_2 = Credit(work=f.cardcaptor_sakura, person=f.clamp_member_2, team=f.clamp, role=Role.artist) fx_session.add(f.skura_member_asso_2) f.skura_member_asso_3 = Credit(work=f.cardcaptor_sakura, person=f.clamp_member_3, team=f.clamp, role=Role.artist) fx_session.add(f.skura_member_asso_3) f.skura_member_asso_4 = Credit(work=f.cardcaptor_sakura, person=f.clamp_member_4, team=f.clamp, role=Role.artist) fx_session.add(f.skura_member_asso_4) # create 'The Lord of the Rings: The Fellowship of the Ring' film f.lord_of_rings_film = Work(media_type='Film') f.lord_of_rings_film.names.update({ Name(nameable=f.lord_of_rings_film, name='The Lord of the Rings: The Fellowship of the Ring', locale=Locale.parse('en_US')) }) f.lor_film_asso_1 = Credit(work=f.lord_of_rings_film, person=f.peter_jackson, role=Role.director) fx_session.add(f.lor_film_asso_1) f.lord_of_rings_film.franchises.update({f.lord_of_rings}) fx_session.add(f.lord_of_rings_film) # create 'The Avengers' film f.avengers = Work(media_type='Film') f.avengers.names.update({ Name(nameable=f.avengers, name='The Avengers', locale=Locale.parse('en_US')) }) f.avengers.franchises.update( {f.iron_man, f.captain_america, f.hulk, f.thor}) fx_session.add(f.avengers) # create 'Iron Man' film f.iron_man_film = Work(media_type='Film') f.iron_man_film.names.update({ Name(nameable=f.iron_man_film, name='Iron Man', locale=Locale.parse('en_US')) }) f.iron_man_film.franchises.update({f.iron_man}) fx_session.add(f.iron_man_film) # create 'Journey to the West' novel f.journey_west = Work(media_type='Literature') f.journey_west.names.update({ Name(nameable=f.journey_west, name='Journey to the West', locale=Locale.parse('en_US')), Name(nameable=f.journey_west, name='서유기', locale=Locale.parse('ko_KR')) }) fx_session.add(f.journey_west) # create 'Saiyuki' comic book series f.saiyuki = Work(media_type='Comic Book') f.saiyuki.names.update({ Name(nameable=f.saiyuki, name='Saiyuki', locale=Locale.parse('en_US')), Name(nameable=f.saiyuki, name='환상마전 최유기', locale=Locale.parse('ko_KR')) }) fx_session.add(f.saiyuki) # create '날아라 슈퍼보드' animation series f.superboard = Work(media_type='Animation') f.superboard.names.update({ Name(nameable=f.superboard, name='날아라 슈퍼보드', locale=Locale.parse('ko_KR')) }) fx_session.add(f.superboard) fx_session.flush() return f
def test_work_page(fx_session, fx_flask_client): # case 1: non-exists document rv = fx_flask_client.get('/work/1/') assert rv.status_code == 404 # case 2: add document work = Work(media_type='Literature') work.names.update({ Name(nameable=work, name='Story of Your Life', locale=Locale.parse('en_US')) }) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get( '/work/{}/'.format(work.canonical_name(Locale.parse('en_US'))) ) assert_contain_text('Story of Your Life', 'h1', rv.data) assert_contain_text('Story of Your Life', 'tr.name>td', rv.data) # case 3: set attributes work.published_at = datetime.date(2010, 10, 26) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get( '/work/{}/'.format(work.canonical_name(Locale.parse('en_US'))) ) assert_contain_text('2010-10-26', 'tr.published_at>td', rv.data) work.genres.add(Genre(name='Short Stories')) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get( '/work/{}/'.format(work.canonical_name(Locale.parse('en_US'))) ) assert_contain_text('Short Stories', 'tr.genres>td', rv.data) work.genres.add(Genre(name='SF')) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get( '/work/{}/'.format(work.canonical_name(Locale.parse('en_US'))) ) assert_contain_text('Short Stories', 'tr.genres>td', rv.data) assert_contain_text('SF', 'tr.genres>td', rv.data) author = Person() author.names.update({ Name(nameable=author, name='Ted Chiang', locale=Locale.parse('en_US')) }) credit = Credit(person=author, work=work, role=Role.author) with fx_session.begin(): fx_session.add(credit) rv = fx_flask_client.get( '/work/{}/'.format(work.canonical_name(Locale.parse('en_US'))) ) assert_contain_text('Ted Chiang', 'tr.credits>td>table>tbody>tr>td.name', rv.data) assert_contain_text(Role.author.value, 'tr.credits>td>table>tbody>tr>td.role', rv.data) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Ted Chiang"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.author.value) )
def test_complex_credits(fx_session, fx_flask_client): """Data: http://www.animenewsnetwork.com/encyclopedia/anime.php?id=12376""" work = Work(media_type='Anime') work.names.update({ Name(nameable=work, name='Fate/Zero', locale=Locale.parse('en_US')) }) def make_with_name(cls, name): ins = cls() ins.names.update({ Name(nameable=ins, name=name, locale=Locale.parse('en_US')) }) return ins author_credit = Credit(person=make_with_name(Person, 'Akihiko Uda'), work=work, role=Role.author) ufotable = make_with_name(Team, 'ufotable') easter = make_with_name(Team, 'Studio Easter') artist_credits = [ Credit(person=make_with_name(Person, "Aki In'yama"), work=work, role=Role.artist, team=ufotable), Credit(person=make_with_name(Person, 'Erika Okazaki'), work=work, role=Role.artist, team=easter), Credit(person=make_with_name(Person, 'Eun Kyung Seo'), work=work, role=Role.artist, team=easter), Credit(person=make_with_name(Person, 'Jeong Ji Kim'), work=work, role=Role.artist, team=ufotable) ] with fx_session.begin(): fx_session.add(work) fx_session.add(author_credit) fx_session.add_all(artist_credits) rv = fx_flask_client.get( '/work/{}/'.format(work.canonical_name(Locale.parse('en_US'))) ) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Akihiko Uda"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.author.value) ) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Aki In\'yama"]' '/../../td[@class="team"]/a[text()="ufotable"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.artist.value) ) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Jeong Ji Kim"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.artist.value) ) assert len(document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Jeong Ji Kim"]' '/../../td')) == 2 assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Erika Okazaki"]' '/../../td[@class="team"]/a[text()="Studio Easter"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.artist.value) ) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Eun Kyung Seo"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.artist.value) ) assert len(document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Eun Kyung Seo"]' '/../../td')) == 2
def test_complex_credits(fx_session, fx_flask_client): """Data: http://www.animenewsnetwork.com/encyclopedia/anime.php?id=12376""" work = Work(media_type='Anime') work.names.update( {Name(nameable=work, name='Fate/Zero', locale=Locale.parse('en_US'))}) def make_with_name(cls, name): ins = cls() ins.names.update( {Name(nameable=ins, name=name, locale=Locale.parse('en_US'))}) return ins author_credit = Credit(person=make_with_name(Person, 'Akihiko Uda'), work=work, role=Role.author) ufotable = make_with_name(Team, 'ufotable') easter = make_with_name(Team, 'Studio Easter') artist_credits = [ Credit(person=make_with_name(Person, "Aki In'yama"), work=work, role=Role.artist, team=ufotable), Credit(person=make_with_name(Person, 'Erika Okazaki'), work=work, role=Role.artist, team=easter), Credit(person=make_with_name(Person, 'Eun Kyung Seo'), work=work, role=Role.artist, team=easter), Credit(person=make_with_name(Person, 'Jeong Ji Kim'), work=work, role=Role.artist, team=ufotable) ] with fx_session.begin(): fx_session.add(work) fx_session.add(author_credit) fx_session.add_all(artist_credits) rv = fx_flask_client.get('/work/{}/'.format( work.canonical_name(Locale.parse('en_US')))) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Akihiko Uda"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.author.value)) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Aki In\'yama"]' '/../../td[@class="team"]/a[text()="ufotable"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.artist.value)) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Jeong Ji Kim"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.artist.value)) assert len( document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Jeong Ji Kim"]' '/../../td')) == 2 assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Erika Okazaki"]' '/../../td[@class="team"]/a[text()="Studio Easter"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.artist.value)) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Eun Kyung Seo"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.artist.value)) assert len( document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Eun Kyung Seo"]' '/../../td')) == 2
def test_work_page(fx_session, fx_flask_client): # case 1: non-exists document rv = fx_flask_client.get('/work/1/') assert rv.status_code == 404 # case 2: add document work = Work(media_type='Literature') work.names.update({ Name(nameable=work, name='Story of Your Life', locale=Locale.parse('en_US')) }) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get('/work/{}/'.format( work.canonical_name(Locale.parse('en_US')))) assert_contain_text('Story of Your Life', 'h1', rv.data) assert_contain_text('Story of Your Life', 'tr.name>td', rv.data) # case 3: set attributes work.published_at = datetime.date(2010, 10, 26) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get('/work/{}/'.format( work.canonical_name(Locale.parse('en_US')))) assert_contain_text('2010-10-26', 'tr.published_at>td', rv.data) work.genres.add(Genre(name='Short Stories')) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get('/work/{}/'.format( work.canonical_name(Locale.parse('en_US')))) assert_contain_text('Short Stories', 'tr.genres>td', rv.data) work.genres.add(Genre(name='SF')) with fx_session.begin(): fx_session.add(work) rv = fx_flask_client.get('/work/{}/'.format( work.canonical_name(Locale.parse('en_US')))) assert_contain_text('Short Stories', 'tr.genres>td', rv.data) assert_contain_text('SF', 'tr.genres>td', rv.data) author = Person() author.names.update({ Name(nameable=author, name='Ted Chiang', locale=Locale.parse('en_US')) }) credit = Credit(person=author, work=work, role=Role.author) with fx_session.begin(): fx_session.add(credit) rv = fx_flask_client.get('/work/{}/'.format( work.canonical_name(Locale.parse('en_US')))) assert_contain_text('Ted Chiang', 'tr.credits>td>table>tbody>tr>td.name', rv.data) assert_contain_text(Role.author.value, 'tr.credits>td>table>tbody>tr>td.role', rv.data) assert document_fromstring(rv.data).xpath( '//tr/td[@class="name"]/a[text()="Ted Chiang"]' '/../../td[@class="role"]/a[text()="{}"]'.format(Role.author.value))