def mark_changed_translation(changed_entities, locale): """Mark entities as changed, for later sync.""" changed_entities_array = [] existing = ChangedEntityLocale.objects.values_list("entity", "locale").distinct() for changed_entity in changed_entities: key = (changed_entity.pk, locale.pk) # Remove duplicate changes to prevent unique constraint violation. if key not in existing: changed_entities_array.append( ChangedEntityLocale(entity=changed_entity, locale=locale)) ChangedEntityLocale.objects.bulk_create(changed_entities_array)
def batch_edit_translations(request): try: l = request.POST['locale'] action = request.POST['action'] entity_pks = request.POST.getlist('entities[]') except MultiValueDictKeyError as e: return HttpResponseBadRequest('Bad Request: {error}'.format(error=e)) locale = get_object_or_404(Locale, code=l) # Batch editing is only available to translators if not request.user.has_perm('base.can_translate_locale', locale): return HttpResponseForbidden( "Forbidden: You don't have permission for batch editing" ) entities = ( Entity.objects.filter(pk__in=entity_pks) .prefetch_resources_translations(locale) ) translation_pks = set() for entity in entities: if entity.string_plural == "": translation_pks.add(entity.get_translation()['pk']) else: for plural_form in range(0, locale.nplurals or 1): translation_pks.add(entity.get_translation(plural_form)['pk']) translation_pks.discard(None) translations = Translation.objects.filter(pk__in=translation_pks) # Must be executed before translations set changes, which is why # we need to force evaluate QuerySets by wrapping them inside list() def get_translations_info(translations): count = translations.count() translated_resources = list(translations.translated_resources(locale)) changed_entities = list(Entity.objects.filter(translation__in=translations).distinct()) return count, translated_resources, changed_entities if action == 'approve': translations = translations.filter(approved=False) count, translated_resources, changed_entities = get_translations_info(translations) translations.update( approved=True, approved_user=request.user, approved_date=timezone.now() ) elif action == 'delete': count, translated_resources, changed_entities = get_translations_info(translations) translations.delete() elif action == 'replace': find = request.POST.get('find') replace = request.POST.get('replace') try: translations = translations.find_and_replace(find, replace, request.user) except Translation.NotAllowed: return JsonResponse({ 'error': 'Empty translations not allowed', }) count, translated_resources, changed_entities = get_translations_info(translations) if count == 0: return JsonResponse({'count': 0}) # Update stats for translated_resource in translated_resources: translated_resource.calculate_stats(save=False) bulk_update(translated_resources, update_fields=[ 'total_strings', 'approved_strings', 'fuzzy_strings', 'translated_strings', ]) project = entity.resource.project project.aggregate_stats() locale.aggregate_stats() ProjectLocale.objects.get(locale=locale, project=project).aggregate_stats() # Mark translations as changed changed_entities_array = [] existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for changed_entity in changed_entities: key = (changed_entity.pk, locale.pk) # Remove duplicate changes to prevent unique constraint violation if not key in existing: changed_entities_array.append( ChangedEntityLocale(entity=changed_entity, locale=locale) ) ChangedEntityLocale.objects.bulk_create(changed_entities_array) return JsonResponse({ 'count': count })
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. :arg UploadedFile f: UploadedFile instance. :arg User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile(suffix=extension) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet(project, VCSProject(project, locales=[locale]), timezone.now()) entities_qs = Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False).prefetch_related( Prefetch('translation_set', queryset=Translation.objects.filter(locale=locale), to_attr='db_translations')).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter( locale=locale, approved_date__lte=timezone.now()), to_attr='db_translations_approved_before_sync')) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.db_translations_approved_before_sync) changeset.bulk_create_translations() changeset.bulk_update_translations() changeset.bulk_create_translaton_memory_entries() TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for t in changeset.changed_translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest translation if changeset.translations_to_create: changeset.translations_to_create[-1].update_latest_translation()
def pretranslate(self, project_pk, locales=None, entities=None): """ Identifies strings without any translations and any suggestions. Engages TheAlgorithm (bug 1552796) to gather pretranslations. Stores pretranslations as suggestions (approved=False) to DB. :arg project_pk: the pk of the project to be pretranslated :arg Queryset locales: the locales for the project to be pretranslated :arg Queryset entites: the entities for the project to be pretranslated :returns: None """ project = Project.objects.get(pk=project_pk) log.info("Fetching pretranslations for project {} started".format( project.name)) if locales: locales = project.locales.filter(pk__in=locales) else: locales = project.locales locales = (locales.filter(project_locale__readonly=False).distinct(). prefetch_project_locale(project)) if not entities: entities = Entity.objects.filter( resource__project=project, obsolete=False, ).prefetch_related("resource") # get available TranslatedResource pairs tr_pairs = (TranslatedResource.objects.filter( resource__project=project, locale__in=locales, ).annotate(locale_resource=Concat( "locale_id", V("-"), "resource_id", output_field=CharField())).values_list("locale_resource", flat=True).distinct()) # Fetch all distinct locale-entity pairs for which translation exists translated_entities = (Translation.objects.filter( locale__in=locales, entity__in=entities, ).annotate(locale_entity=Concat( "locale_id", V("-"), "entity_id", output_field=CharField())).values_list("locale_entity", flat=True).distinct()) translated_entities = list(translated_entities) translations = [] # To keep track of changed TranslatedResources and their latest_translation tr_dict = {} tr_filter = [] index = -1 for locale in locales: log.info("Fetching pretranslations for locale {} started".format( locale.code)) for entity in entities: locale_entity = "{}-{}".format(locale.id, entity.id) locale_resource = "{}-{}".format(locale.id, entity.resource.id) if locale_entity in translated_entities or locale_resource not in tr_pairs: continue strings = get_translations(entity, locale) if not strings: continue for string, plural_form, user in strings: t = Translation( entity=entity, locale=locale, string=string, user=user, approved=False, fuzzy=True, active=True, plural_form=plural_form, ) index += 1 translations.append(t) if locale_resource not in tr_dict: tr_dict[locale_resource] = index # Add query for fetching respective TranslatedResource. tr_filter.append( Q(locale__id=locale.id) & Q(resource__id=entity.resource.id)) # Update the latest translation index tr_dict[locale_resource] = index log.info("Fetching pretranslations for locale {} done".format( locale.code)) if len(translations) == 0: return translations = Translation.objects.bulk_create(translations) # Run checks on all translations translation_pks = {translation.pk for translation in translations} bulk_run_checks( Translation.objects.for_checks().filter(pk__in=translation_pks)) # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list("entity", "locale").distinct() for t in translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest activity and stats for changed instances. update_changed_instances(tr_filter, tr_dict, translations) log.info("Fetching pretranslations for project {} done".format( project.name))
def batch_edit_translations(request): try: l = request.POST['locale'] action = request.POST['action'] entity_pks = utils.split_ints(request.POST.get('entities', '')) except MultiValueDictKeyError as e: return HttpResponseBadRequest('Bad Request: {error}'.format(error=e)) locale = get_object_or_404(Locale, code=l) entities = (Entity.objects.filter(pk__in=entity_pks).prefetch_related( 'resource').prefetch_translations(locale)) if not entities.exists(): return JsonResponse({'count': 0}) # Batch editing is only available to translators. # Check if user has translate permissions for all of the projects in passed entities. projects = Project.objects.filter(pk__in=entities.values_list( 'resource__project__pk', flat=True).distinct()) for project in projects: if not request.user.can_translate(project=project, locale=locale): return HttpResponseForbidden( "Forbidden: You don't have permission for batch editing") translation_pks = set() for entity in entities: if entity.string_plural == "": translation_pks.add(entity.get_translation()['pk']) else: for plural_form in range(0, locale.nplurals or 1): translation_pks.add(entity.get_translation(plural_form)['pk']) translation_pks.discard(None) translations = Translation.objects.filter(pk__in=translation_pks) latest_translation_pk = None changed_translation_pks = [] # Must be executed before translations set changes, which is why # we need to force evaluate QuerySets by wrapping them inside list() def get_translations_info(translations, changed_entities=None): count = translations.count() translated_resources = list(translations.translated_resources(locale)) if changed_entities is None: changed_entities = list( Entity.objects.filter(translation__in=translations).distinct()) return count, translated_resources, changed_entities if action == 'approve': approved = translations.filter(approved=False) changed_translation_pks = list(approved.values_list('pk', flat=True)) if changed_translation_pks: latest_translation_pk = approved.last().pk count, translated_resources, changed_entities = get_translations_info( approved) approved.update( approved=True, approved_user=request.user, approved_date=timezone.now(), rejected=False, rejected_user=None, rejected_date=None, fuzzy=False, ) # Reject all other non-rejected translations. suggestions = Translation.objects.filter( locale=locale, entity__pk__in=entities, approved=False, rejected=False, ) suggestions.update( rejected=True, rejected_user=request.user, rejected_date=timezone.now(), fuzzy=False, ) elif action == 'reject': suggestions = Translation.objects.filter(locale=locale, entity__pk__in=entities, approved=False, rejected=False) count, translated_resources, changed_entities = get_translations_info( suggestions, []) TranslationMemoryEntry.objects.filter( translation__in=suggestions).delete() suggestions.update( rejected=True, rejected_user=request.user, rejected_date=timezone.now(), fuzzy=False, ) elif action == 'replace': find = request.POST.get('find') replace = request.POST.get('replace') try: translations, changed_translations = translations.find_and_replace( find, replace, request.user) changed_translation_pks = [c.pk for c in changed_translations] if changed_translation_pks: latest_translation_pk = max(changed_translation_pks) except Translation.NotAllowed: return JsonResponse({ 'error': 'Empty translations not allowed', }) count, translated_resources, changed_entities = get_translations_info( translations) if count == 0: return JsonResponse({'count': 0}) # Update stats for translated_resource in translated_resources: translated_resource.calculate_stats(save=False) bulk_update(translated_resources, update_fields=[ 'total_strings', 'approved_strings', 'fuzzy_strings', 'translated_strings', ]) project = entity.resource.project project.aggregate_stats() locale.aggregate_stats() ProjectLocale.objects.get(locale=locale, project=project).aggregate_stats() # Mark translations as changed changed_entities_array = [] existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for changed_entity in changed_entities: key = (changed_entity.pk, locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities_array.append( ChangedEntityLocale(entity=changed_entity, locale=locale)) ChangedEntityLocale.objects.bulk_create(changed_entities_array) # Update latest translation if latest_translation_pk: Translation.objects.get( pk=latest_translation_pk).update_latest_translation() # Update translation memory memory_entries = [ TranslationMemoryEntry( source=t.entity.string, target=t.string, locale=locale, entity=t.entity, translation=t, project=project, ) for t in (Translation.objects.filter(pk__in=changed_translation_pks). prefetch_related('entity__resource')) ] TranslationMemoryEntry.objects.bulk_create(memory_entries) return JsonResponse({'count': count})
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. :arg UploadedFile f: UploadedFile instance. :arg User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile( prefix="strings" if extension == ".xml" else "", suffix=extension, ) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet(project, VCSProject(project, locales=[locale]), timezone.now()) entities_qs = (Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False).prefetch_related( Prefetch( "translation_set", queryset=Translation.objects.filter(locale=locale), to_attr="db_translations", )).prefetch_related( Prefetch( "translation_set", queryset=Translation.objects.filter( locale=locale, approved_date__lte=timezone.now()), to_attr="db_translations_approved_before_sync", ))) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.db_translations_approved_before_sync, ) changeset.bulk_create_translations() changeset.bulk_update_translations() changeset.bulk_log_actions() if changeset.changed_translations: # Update 'active' status of all changed translations and their siblings, # i.e. translations of the same entity to the same locale. changed_pks = {t.pk for t in changeset.changed_translations} (Entity.objects.filter( translation__pk__in=changed_pks).reset_active_translations( locale=locale)) # Run checks and create TM entries for translations that pass them valid_translations = changeset.bulk_check_translations() changeset.bulk_create_translation_memory_entries(valid_translations) # Remove any TM entries of translations that got rejected changeset.bulk_remove_translation_memory_entries() TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list("entity", "locale").distinct() for t in changeset.changed_translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest translation if changeset.translations_to_create: changeset.translations_to_create[-1].update_latest_translation()