def test_view_tm_minimal_quality(client, locale_a, resource_a): """ View shouldn't return any entries if 70% of quality at minimum. """ entities = [ EntityFactory(resource=resource_a, string='Entity %s' % i, order=i) for i in range(5) ] for i, entity in enumerate(entities): TranslationMemoryFactory.create( entity=entity, source="source %s" % entity.string, target="target %s" % entity.string, locale=locale_a, ) response = client.get( '/translation-memory/', { 'text': 'no match', 'pk': entities[0].pk, 'locale': locale_a.code, } ) assert response.status_code == 200 assert json.loads(response.content) == []
def test_view_translation_memory_best_quality_entry( client, locale_a, resource_a, ): """ Translation memory should return results entries aggregated by translation string. """ entities = [ EntityFactory(resource=resource_a, string="Entity %s" % i, order=i) for i in range(3) ] tm = TranslationMemoryFactory.create( entity=entities[0], source="aaa", target="ccc", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[1], source="aaa", target="ddd", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[2], source="bbb", target="ccc", locale=locale_a, ) response = client.get( "/translation-memory/", {"text": "aaa", "pk": tm.entity.pk, "locale": locale_a.code}, ) assert json.loads(response.content) == [ {"count": 1, "source": u"aaa", "quality": u"100", "target": u"ddd"} ]
def test_view_translation_memory_translation_counts( client, locale_a, resource_a, ): """ Translation memory should aggregate identical translations strings from the different entities and count up their occurrences. """ entities = [ EntityFactory(resource=resource_a, string=x, order=i) for i, x in enumerate(["abaa", "abaa", "aaab", "aaab"]) ] tm = TranslationMemoryFactory.create( entity=entities[0], source=entities[0].string, target="ccc", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="ccc", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[2], source=entities[2].string, target="ccc", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[3], source=entities[3].string, target="ccc", locale=locale_a, ) response = client.get( "/translation-memory/", {"text": "aaaa", "pk": tm.entity.pk, "locale": locale_a.code}, ) result = json.loads(response.content) assert result[0].pop("source") in ("abaa", "aaab", "aaab") assert result == [{u"count": 3, u"quality": u"75", u"target": u"ccc"}]
def test_get_translations(gt_mock, locale_b, resource_a, google_translate_locale): entities = [ EntityFactory(resource=resource_a, string=x, order=i) for i, x in enumerate(["abaa", "abac", "aaab", "abab"]) ] entities[1].string_plural = entities[1].string entities[3].string_plural = entities[3].string entities[1].save() entities[3].save() google_translate_locale.cldr_plurals = "1, 2" google_translate_locale.save() for entity in entities[0:2]: TranslationMemoryFactory.create( entity=entity, source=entity.string, target=entity.string, locale=locale_b, ) TranslationMemoryFactory.create( entity=entity, source=entity.string, target=entity.string, locale=google_translate_locale, ) # Mock the return value of get_google_translate_data gt_mock.return_value = { "status": True, "translation": "gt_translation", } tm_user = User.objects.get(email="*****@*****.**") gt_user = User.objects.get(email="*****@*****.**") # 100% match exists in translation memory. response_a = get_translations(entities[0], locale_b) response_b = get_translations(entities[0], google_translate_locale) assert response_a == [(entities[0].string, None, tm_user)] assert response_b == [(entities[0].string, None, tm_user)] # 100% match does not exists and locale.google_translate_code is None. response = get_translations(entities[2], locale_b) assert response == [] # 100% match does not exists and locale.google_translate_code is not None. response = get_translations(entities[2], google_translate_locale) assert response == [("gt_translation", None, gt_user)] # Entity.string_plural is not None. response_a = get_translations(entities[1], google_translate_locale) response_b = get_translations(entities[3], google_translate_locale) assert response_a == [ (entities[1].string, 0, tm_user), (entities[1].string, 1, tm_user), ] assert response_b == [ ("gt_translation", 0, gt_user), ("gt_translation", 1, gt_user), ]
def test_view_concordance_search(client, project_a, locale_a, resource_a): entities = [ EntityFactory( resource=resource_a, string=x, order=i, ) for i, x in enumerate(["abaa", "aBaf", "aaAb", "aAab"]) ] TranslationMemoryFactory.create( entity=entities[0], source=entities[0].string, target="ccc", locale=locale_a, project=project_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="cCDd", locale=locale_a, project=project_a, ) response = client.get( "/concordance-search/", { "text": "cdd", "locale": locale_a.code }, ) result = json.loads(response.content) assert result == { "results": [{ "source": "aBaf", "target": "cCDd", "project_names": [project_a.name] }], "has_next": False, } response = client.get( "/concordance-search/", { "text": "abaa", "locale": locale_a.code }, ) result = json.loads(response.content) assert result == { "results": [{ "source": "abaa", "target": "ccc", "project_names": [project_a.name] }], "has_next": False, }
def test_lookup_collation(resource_a, locale_a): """ Filter translations according to collation. """ entity = EntityFactory.create(resource=resource_a, string="string",) entity_args = [ {"string": u"First string", "comment": u"random Strıng"}, {"string": u"Second strİng", "comment": u"random string"}, {"string": u"Third Strıng", "comment": u"random strİng"}, ] entities = [ EntityFactory(resource=resource_a, string=x["string"], comment=x["comment"],) for x in entity_args ] translation_args = [ u"this is string", u"this is STRİNG", u"this is Strıng", u"this is StrInG", u"this is sTriNg", ] translations = [ TranslationFactory(entity=entity, locale=locale_a, string=s,) for s in translation_args ] # Check if 'Iı' and 'İi' are appropriately distinguished and filtered # according to turkish(tr_tr) collation assert set( resource_a.entities.filter(string__icontains_collate=(u"string", "tr_tr")) ) == set([entities[n] for n in [0, 1]] + [entity]) assert set( resource_a.entities.filter(comment__icontains_collate=(u"strİng", "tr_tr")) ) == set([entities[n] for n in [1, 2]]) assert set( Translation.objects.filter(string__icontains_collate=(u"string", "tr_tr")) ) == set([translations[n] for n in [0, 1, 4]]) assert set( Translation.objects.filter(string__icontains_collate=(u"string", "tr_tr")) ) == set([translations[n] for n in [0, 1, 4]]) assert set( Translation.objects.filter(string__icontains_collate=(u"strİng", "tr_tr")) ) == set([translations[n] for n in [0, 1, 4]]) assert set( Translation.objects.filter(string__icontains_collate=(u"strıng", "tr_tr")) ) == set([translations[n] for n in [2, 3]]) # Check if differentiation fails without any collation(C) assert set( Translation.objects.filter(string__icontains_collate=(u"string", "C")) ) == set([translations[n] for n in [0, 3, 4]]) # Compare the icontains_collate query with regular i_contains query assert list(Translation.objects.filter(string__icontains=u"string")) == [ translations[n] for n in [0, 2, 3, 4] ]
def test_view_tm_translation_counts( client, locale_a, resource_a, ): """ Translation memory should aggregate identical translations strings from the different entities and count up their occurrences. """ entities = [ EntityFactory(resource=resource_a, string=x, order=i) for i, x in enumerate(["abaa", "abaa", "aaab", "aaab"]) ] tm = TranslationMemoryFactory.create( entity=entities[0], source=entities[0].string, target="ccc", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="ccc", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[2], source=entities[2].string, target="ccc", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[3], source=entities[3].string, target="ccc", locale=locale_a, ) response = client.get( '/translation-memory/', { 'text': 'aaaa', 'pk': tm.entity.pk, 'locale': locale_a.code, } ) result = json.loads(response.content) assert result[0].pop('source') in ('abaa', 'aaab', 'aaab') assert ( result == [{ u'count': 3, u'quality': 75.0, u'target': u'ccc' }] )
def test_view_concordance_search(client, project_a, locale_a, resource_a): entities = [ EntityFactory( resource=resource_a, string=x, order=i, ) for i, x in enumerate(["abaa", "abaf", "aaab", "aaab"]) ] TranslationMemoryFactory.create( entity=entities[0], source=entities[0].string, target="ccc", locale=locale_a, project=project_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="ccdd", locale=locale_a, project=project_a, ) response = client.get( "/concordance-search/", { "text": "cdd", "locale": locale_a.code }, ) result = json.loads(response.content) assert result == [{ u"project_name": project_a.name, u"quality": 86, u"source": u"abaf", u"target": u"ccdd", }] response = client.get( "/concordance-search/", { "text": "abaa", "locale": locale_a.code }, ) result = json.loads(response.content) assert result == [{ u"project_name": project_a.name, u"quality": 100, u"source": u"abaa", u"target": u"ccc", }]
def entity_test_models(translation_a, locale_b): """This fixture provides: - 2 translations of a plural entity - 1 translation of a non-plural entity - A subpage that contains the plural entity """ entity_a = translation_a.entity locale_a = translation_a.locale project_a = entity_a.resource.project locale_a.cldr_plurals = "0,1" locale_a.save() translation_a.plural_form = 0 translation_a.active = True translation_a.save() resourceX = ResourceFactory( project=project_a, path="resourceX.po", ) entity_a.string = "Entity zero" entity_a.key = entity_a.string entity_a.string_plural = "Plural %s" % entity_a.string entity_a.order = 0 entity_a.save() entity_b = EntityFactory( resource=resourceX, string="entity_b", key="Key%sentity_b" % KEY_SEPARATOR, order=0, ) translation_a_pl = TranslationFactory( entity=entity_a, locale=locale_a, plural_form=1, active=True, string="Plural %s" % translation_a.string, ) translationX = TranslationFactory( entity=entity_b, locale=locale_a, active=True, string="Translation %s" % entity_b.string, ) subpageX = SubpageFactory( project=project_a, name="Subpage", ) subpageX.resources.add(entity_a.resource) return translation_a, translation_a_pl, translationX, subpageX
def test_view_concordance_search_remove_duplicates( client, project_a, locale_a, resource_a ): """Check Concordance search doesn't produce duplicated search results.""" entities = [ EntityFactory(resource=resource_a, string=x, order=i,) for i, x in enumerate(["abaa", "abaf"]) ] TranslationMemoryFactory.create( entity=entities[0], source=entities[0].string, target="ccc", locale=locale_a, project=project_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="ccc", locale=locale_a, project=project_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="cccbbb", locale=locale_a, project=project_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="cccbbb", locale=locale_a, project=project_a, ) response = client.get( "/concordance-search/", {"text": "ccc", "locale": locale_a.code}, ) results = json.loads(response.content) assert results == { "results": [ {"source": "abaa", "target": "ccc", "project_names": [project_a.name]}, {"source": "abaf", "target": "ccc", "project_names": [project_a.name]}, {"source": "abaf", "target": "cccbbb", "project_names": [project_a.name]}, ], "has_next": False, }
def test_view_tm_best_quality_entry( client, locale_a, resource_a, ): """ Translation memory should return results entries aggregated by translation string. """ entities = [ EntityFactory(resource=resource_a, string='Entity %s' % i, order=i) for i in range(3) ] tm = TranslationMemoryFactory.create( entity=entities[0], source="aaa", target="ccc", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[1], source="aaa", target="ddd", locale=locale_a, ) TranslationMemoryFactory.create( entity=entities[2], source="bbb", target="ccc", locale=locale_a, ) response = client.get( '/translation-memory/', { 'text': 'aaa', 'pk': tm.entity.pk, 'locale': locale_a.code, } ) assert ( json.loads(response.content) == [{ "count": 1, "source": "aaa", "quality": 100.0, "target": "ddd", }] )
def test_handle_term_update_definition( create_entity_mock, obsolete_entity_mock, update_terminology_project_stats_mock, localizable_term, ): """ If localizable term's part_of_speech, definition or usage change, Entity.comment gets updated and not other changes are made. """ entity = EntityFactory() localizable_term.entity = entity localizable_term.definition = "Changed definition" localizable_term.handle_term_update() assert localizable_term.entity.comment == "Part_of_speech. Changed definition." assert create_entity_mock.call_count == 0 assert obsolete_entity_mock.call_count == 0 assert update_terminology_project_stats_mock.call_count == 0
def test_view_concordance_search_pagination(client, project_a, locale_a, resource_a): entities = [ EntityFactory(resource=resource_a, string=x, order=i) for i, x in enumerate(["abaa", "abaf"]) ] TranslationMemoryFactory.create( entity=entities[0], source=entities[0].string, target="ccc", locale=locale_a, project=project_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="cccbbb", locale=locale_a, project=project_a, ) TranslationMemoryFactory.create( entity=entities[1], source=entities[1].string, target="cccbbb", locale=locale_a, project=project_a, ) response = client.get( "/concordance-search/", {"text": "ccc", "locale": locale_a.code, "limit": 1}, ) results = json.loads(response.content) assert results == { "results": [ {"source": "abaa", "target": "ccc", "project_names": [project_a.name]}, ], "has_next": True, } response = client.get( "/concordance-search/", {"text": "ccc", "locale": locale_a.code, "limit": 1, "page": 2}, ) results = json.loads(response.content) assert results == { "results": [ {"source": "abaf", "target": "cccbbb", "project_names": [project_a.name]}, ], "has_next": False, } # Check a query that should return no results response = client.get( "/concordance-search/", {"text": "TEST", "locale": locale_a.code, "limit": 1, "page": 2}, ) results = json.loads(response.content) assert results == { "results": [], "has_next": False, }
def entity_test_search(resource_a, locale_a): """This fixture provides: - 7 translated entities - A lambda for searching for entities using Entity.for_project_locale """ TranslatedResourceFactory.create( locale=locale_a, resource=resource_a, ) entity_args = [ { "string": "First entity string", "string_plural": "First plural string", "comment": "random notes", }, { "string": "Second entity string", "string_plural": "Second plural string", "comment": "random", }, { "string": u"Third entity string with some twist: ZAŻÓŁĆ GĘŚLĄ", "string_plural": "Third plural", "comment": "even more random notes", }, { "string": "Entity with first string", "string_plural": "Entity with plural first string", "comment": "random notes", }, { "string": "First Entity", "string_plural": "First plural entity", "comment": "random notes", }, { "string": "First Entity with string", "string_plural": "First plural entity", "comment": "random notes", }, { "string": 'Entity with quoted "string"', "string_plural": "plural entity", "comment": "random notes", }, ] entities = [ EntityFactory( resource=resource_a, string=x["string"], string_plural=x["string_plural"], comment=x["comment"], order=i, ) for i, x in enumerate(entity_args) ] translation_args = [ {"string": "First translation", "entity": entities[0]}, {"string": "Second translation", "entity": entities[1]}, {"string": "Third translation", "entity": entities[2]}, {"string": "Fourth translation", "entity": entities[3]}, {"string": "Fifth translation", "entity": entities[4]}, {"string": "Sixth translation", "entity": entities[5]}, {"string": "Seventh translation", "entity": entities[6]}, ] for x in translation_args: TranslationFactory.create( locale=locale_a, string=x["string"], entity=x["entity"], ) return ( entities, lambda q: list( Entity.for_project_locale(resource_a.project, locale_a, search=q,) ), )
def test_mgr_entity_reset_active_translations(resource_a, locale_a): locale_a.cldr_plurals = '1,5' locale_a.save() entities = [ EntityFactory.create( resource=resource_a, string="testentity%s" % i, ) for i in range(0, 4) ] + [ EntityFactory( resource=resource_a, string='testentity4', string_plural='testentity4plural', ) ] entities_qs = Entity.objects.filter(pk__in=[e.pk for e in entities]) # Translations for Entity 0: # No translations pass # Translations for Entity 1: # 2 unreviewed translations TranslationFactory.create( locale=locale_a, entity=entities[1], string=entities[1].string + ' translation1', ) TranslationFactory.create( locale=locale_a, entity=entities[1], string=entities[1].string + ' translation2', ) # Translations for Entity 2: # Approved and unreviewed translation TranslationFactory.create( locale=locale_a, entity=entities[2], string=entities[2].string + ' translation1', approved=True, ) TranslationFactory.create( locale=locale_a, entity=entities[2], string=entities[2].string + ' translation2', ) # Translations for Entity 3: # Fuzzy and unreviewed translation TranslationFactory.create( locale=locale_a, entity=entities[3], string=entities[3].string + ' translation1', ) TranslationFactory.create( locale=locale_a, entity=entities[3], string=entities[3].string + ' translation2', fuzzy=True, ) # Translations for Entity 4 - pluralized: # Approved and unreviewed translation for first form, # a single unreviewed translation for second form TranslationFactory.create( locale=locale_a, entity=entities[4], plural_form=0, string=entities[4].string + ' translation1', approved=True, ) TranslationFactory.create( locale=locale_a, entity=entities[4], plural_form=0, string=entities[4].string + ' translation2', ) TranslationFactory.create( locale=locale_a, entity=entities[4], plural_form=1, string=entities[4].string_plural + ' translation1plural', ) entities_qs.reset_active_translations(locale=locale_a) # Active translations for Entity 0: # no active translations assert entities[0].translation_set.filter(active=True).count() == 0 # Active translations for Entity 1: # latest translation is active assert (entities[1].translation_set.get( active=True).string == entities[1].string + ' translation2') # Active translations for Entity 2: # approved translation is active assert (entities[2].translation_set.get( active=True).string == entities[2].string + ' translation1') # Active translations for Entity 3: # fuzzy translation is active assert (entities[3].translation_set.get( active=True).string == entities[3].string + ' translation2') # Active translations for Entity 4 - pluralized: # Approved translation for first form, # a single unreviewed translation for second form active = entities[4].translation_set.filter(active=True) assert active[0].string == entities[4].string + ' translation1' assert active[ 1].string == entities[4].string_plural + ' translation1plural'
def entity_test_search(resource_a, locale_a): """This fixture provides: - 7 translated entities - A lambda for searching for entities using Entity.for_project_locale """ TranslatedResourceFactory.create( locale=locale_a, resource=resource_a, ) entity_args = [ { 'string': 'First entity string', 'string_plural': 'First plural string', 'comment': 'random notes', }, { 'string': 'Second entity string', 'string_plural': 'Second plural string', 'comment': 'random', }, { 'string': u'Third entity string with some twist: ZAŻÓŁĆ GĘŚLĄ', 'string_plural': 'Third plural', 'comment': 'even more random notes', }, { 'string': 'Entity with first string', 'string_plural': 'Entity with plural first string', 'comment': 'random notes', }, { 'string': 'First Entity', 'string_plural': 'First plural entity', 'comment': 'random notes', }, { 'string': 'First Entity with string', 'string_plural': 'First plural entity', 'comment': 'random notes', }, { 'string': 'Entity with quoted "string"', 'string_plural': 'plural entity', 'comment': 'random notes', }, ] entities = [ EntityFactory( resource=resource_a, string=x['string'], string_plural=x['string_plural'], comment=x['comment'], order=i, ) for i, x in enumerate(entity_args) ] translation_args = [ { 'string': 'First translation', 'entity': entities[0] }, { 'string': 'Second translation', 'entity': entities[1] }, { 'string': 'Third translation', 'entity': entities[2] }, { 'string': 'Fourth translation', 'entity': entities[3] }, { 'string': 'Fifth translation', 'entity': entities[4] }, { 'string': 'Sixth translation', 'entity': entities[5] }, { 'string': 'Seventh translation', 'entity': entities[6] }, ] for x in translation_args: TranslationFactory.create( locale=locale_a, string=x['string'], entity=x['entity'], ) return (entities, lambda q: list( Entity.for_project_locale( resource_a.project, locale_a, search=q, )))
def test_lookup_collation(resource_a, locale_a): """ Filter translations according to collation. """ entity = EntityFactory.create( resource=resource_a, string="string", ) entity_args = [ { 'string': u'First string', 'comment': u'random Strıng', }, { 'string': u'Second strİng', 'comment': u'random string', }, { 'string': u'Third Strıng', 'comment': u'random strİng', }, ] entities = [ EntityFactory( resource=resource_a, string=x['string'], comment=x['comment'], ) for x in entity_args ] translation_args = [ u'this is string', u'this is STRİNG', u'this is Strıng', u'this is StrInG', u'this is sTriNg' ] translations = [ TranslationFactory( entity=entity, locale=locale_a, string=s, ) for s in translation_args ] # Check if 'Iı' and 'İi' are appropriately distinguished and filtered # according to turkish(tr_tr) collation assert (set( Entity.objects.filter(string__icontains_collate=( u'string', 'tr_tr'))) == set([entities[n] for n in [0, 1]] + [entity])) assert (set( Entity.objects.filter(comment__icontains_collate=(u'strİng', 'tr_tr'))) == set([ entities[n] for n in [1, 2] ])) assert (set( Translation.objects.filter( string__icontains_collate=(u'string', 'tr_tr'))) == set( [translations[n] for n in [0, 1, 4]])) assert (set( Translation.objects.filter( string__icontains_collate=(u'string', 'tr_tr'))) == set( [translations[n] for n in [0, 1, 4]])) assert (set( Translation.objects.filter( string__icontains_collate=(u'strİng', 'tr_tr'))) == set( [translations[n] for n in [0, 1, 4]])) assert (set( Translation.objects.filter( string__icontains_collate=(u'strıng', 'tr_tr'))) == set( [translations[n] for n in [2, 3]])) # Check if differentiation fails without any collation(C) assert (set( Translation.objects.filter( string__icontains_collate=(u'string', 'C'))) == set( [translations[n] for n in [0, 3, 4]])) # Compare the icontains_collate query with regular i_contains query assert (list(Translation.objects.filter(string__icontains=u'string')) == [ translations[n] for n in [0, 2, 3, 4] ])