def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. :arg UploadedFile f: UploadedFile instance. :arg User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile(suffix=extension) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet(project, VCSProject(project, locales=[locale]), timezone.now()) entities_qs = Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False).prefetch_related( Prefetch('translation_set', queryset=Translation.objects.filter(locale=locale), to_attr='db_translations')).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter( locale=locale, approved_date__lte=timezone.now()), to_attr='db_translations_approved_before_sync')) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.db_translations_approved_before_sync) changeset.bulk_create_translations() changeset.bulk_update_translations() changeset.bulk_create_translaton_memory_entries() TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for t in changeset.changed_translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest translation if changeset.translations_to_create: changeset.translations_to_create[-1].update_latest_translation()
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. :arg UploadedFile f: UploadedFile instance. :arg User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile( prefix='strings' if extension == '.xml' else '', suffix=extension, ) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet( project, VCSProject(project, locales=[locale]), timezone.now() ) entities_qs = Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False ).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter(locale=locale), to_attr='db_translations' ) ).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter(locale=locale, approved_date__lte=timezone.now()), to_attr='db_translations_approved_before_sync' ) ) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.db_translations_approved_before_sync ) changeset.bulk_create_translations() changeset.bulk_update_translations() if changeset.changed_translations: # Update 'active' status of all changed translations and their siblings, # i.e. translations of the same entity to the same locale. changed_pks = {t.pk for t in changeset.changed_translations} ( Entity.objects .filter(translation__pk__in=changed_pks) .reset_active_translations(locale=locale) ) # Run checks and create TM entries for translations that pass them valid_translations = changeset.bulk_check_translations() changeset.bulk_create_translation_memory_entries(valid_translations) TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for t in changeset.changed_translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest translation if changeset.translations_to_create: changeset.translations_to_create[-1].update_latest_translation()
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :param str slug: Project slug. :param str code: Locale code. :param str part: Resource path or Subpage name. :param UploadedFile f: UploadedFile instance. :param User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code__iexact=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile(suffix=extension) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet( project, VCSProject(project, locales=[locale]), timezone.now() ) entities_qs = Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False ).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter(locale=locale), to_attr='db_translations' ) ).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter(locale=locale, approved_date__lte=timezone.now()), to_attr='old_translations' ) ) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.old_translations ) changeset.bulk_create_translations() changeset.bulk_update_translations() TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for t in changeset.translations_to_create + changeset.translations_to_update: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if not key in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values())
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. :arg UploadedFile f: UploadedFile instance. :arg User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile( prefix="strings" if extension == ".xml" else "", suffix=extension, ) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet(project, VCSProject(project, locales=[locale]), timezone.now()) entities_qs = (Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False).prefetch_related( Prefetch( "translation_set", queryset=Translation.objects.filter(locale=locale), to_attr="db_translations", )).prefetch_related( Prefetch( "translation_set", queryset=Translation.objects.filter( locale=locale, approved_date__lte=timezone.now()), to_attr="db_translations_approved_before_sync", ))) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.db_translations_approved_before_sync, ) changeset.bulk_create_translations() changeset.bulk_update_translations() changeset.bulk_log_actions() if changeset.changed_translations: # Update 'active' status of all changed translations and their siblings, # i.e. translations of the same entity to the same locale. changed_pks = {t.pk for t in changeset.changed_translations} (Entity.objects.filter( translation__pk__in=changed_pks).reset_active_translations( locale=locale)) # Run checks and create TM entries for translations that pass them valid_translations = changeset.bulk_check_translations() changeset.bulk_create_translation_memory_entries(valid_translations) # Remove any TM entries of translations that got rejected changeset.bulk_remove_translation_memory_entries() TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list("entity", "locale").distinct() for t in changeset.changed_translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest translation if changeset.translations_to_create: changeset.translations_to_create[-1].update_latest_translation()