def __init__(self, vcs_project, path, locales=None): """ Load the resource file for each enabled locale and store its translations in VCSEntity instances. """ from pontoon.base.models import Locale from pontoon.sync import formats # Avoid circular import. self.vcs_project = vcs_project self.path = path self.locales = locales or [] self.files = {} self.entities = {} # Create entities using resources from the source directory, source_resource_path = os.path.join(vcs_project.source_directory_path, self.path) source_resource_path = locale_to_source_path(source_resource_path) source_resource_file = formats.parse( source_resource_path, locale=Locale.objects.get(code='en-US')) for index, translation in enumerate(source_resource_file.translations): vcs_entity = VCSEntity( resource=self, key=translation.key, string=translation.source_string, string_plural=translation.source_string_plural, comments=translation.comments, source=translation.source, order=translation.order or index) self.entities[vcs_entity.key] = vcs_entity # Fill in translations from the locale resources. for locale in locales: locale_directory = self.vcs_project.locale_directory_paths[ locale.code] resource_path = os.path.join(locale_directory, self.path) log.debug('Parsing resource file: %s', resource_path) try: resource_file = formats.parse(resource_path, source_resource_path, locale) except (IOError, ParseError): continue # File doesn't exist or is invalid, let's move on self.files[locale] = resource_file log.debug('Discovered %s translations.', len(resource_file.translations)) for translation in resource_file.translations: try: self.entities[translation.key].translations[ locale.code] = translation except KeyError: # If the source is missing an entity, we consider it # deleted and don't add it. pass
def __init__(self, vcs_project, path, locales=None): """ Load the resource file for each enabled locale and store its translations in VCSEntity instances. """ from pontoon.base.models import Locale from pontoon.sync import formats # Avoid circular import. self.vcs_project = vcs_project self.path = path self.locales = locales or [] self.files = {} self.entities = {} # Create entities using resources from the source directory, source_resource_path = os.path.join(vcs_project.source_directory_path, self.path) source_resource_path = locale_to_source_path(source_resource_path) source_resource_file = formats.parse( source_resource_path, locale=Locale.objects.get(code='en-US') ) for index, translation in enumerate(source_resource_file.translations): vcs_entity = VCSEntity( resource=self, key=translation.key, string=translation.source_string, string_plural=translation.source_string_plural, comments=translation.comments, source=translation.source, order=translation.order or index ) self.entities[vcs_entity.key] = vcs_entity # Fill in translations from the locale resources. for locale in locales: locale_directory = self.vcs_project.locale_directory_paths[locale.code] resource_path = os.path.join(locale_directory, self.path) log.debug('Parsing resource file: %s', resource_path) try: resource_file = formats.parse(resource_path, source_resource_path, locale) except (IOError, ParseError): continue # File doesn't exist or is invalid, let's move on self.files[locale] = resource_file log.debug('Discovered %s translations.', len(resource_file.translations)) for translation in resource_file.translations: try: self.entities[translation.key].translations[locale.code] = translation except KeyError: # If the source is missing an entity, we consider it # deleted and don't add it. pass
def __init__(self, vcs_project, path): """ Load the resource file for each enabled locale and store its translations in VCSEntity instances. """ from pontoon.sync import formats # Avoid circular import. self.vcs_project = vcs_project self.path = path self.files = {} self.entities = {} # Create entities using resources from the source directory, source_resource_path = os.path.join(vcs_project.source_directory_path(), self.path) # Special case: Source files for pofiles are actually .pot. if source_resource_path.endswith('po'): source_resource_path += 't' source_resource_file = formats.parse(source_resource_path) for index, translation in enumerate(source_resource_file.translations): vcs_entity = VCSEntity( resource=self, key=translation.key, string=translation.source_string, string_plural=translation.source_string_plural, comments=translation.comments, source=translation.source, order=translation.order or index ) self.entities[vcs_entity.key] = vcs_entity # Fill in translations from the locale resources. for locale in vcs_project.db_project.locales.all(): resource_path = os.path.join( vcs_project.locale_directory_path(locale.code), self.path ) try: resource_file = formats.parse(resource_path, source_resource_path) except IOError: continue # File doesn't exist, let's move on self.files[locale] = resource_file for translation in resource_file.translations: try: self.entities[translation.key].translations[locale.code] = translation except KeyError: # If the source is missing an entity, we consider it # deleted and don't add it. pass
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. :arg UploadedFile f: UploadedFile instance. :arg User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile(suffix=extension) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet(project, VCSProject(project, locales=[locale]), timezone.now()) entities_qs = Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False).prefetch_related( Prefetch('translation_set', queryset=Translation.objects.filter(locale=locale), to_attr='db_translations')).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter( locale=locale, approved_date__lte=timezone.now()), to_attr='db_translations_approved_before_sync')) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.db_translations_approved_before_sync) changeset.bulk_create_translations() changeset.bulk_update_translations() changeset.bulk_create_translaton_memory_entries() TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for t in changeset.changed_translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest translation if changeset.translations_to_create: changeset.translations_to_create[-1].update_latest_translation()
def get_download_content(slug, code, part): """ Get content of the file to be downloaded. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import Entity, Locale, Project, Resource project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) # Download a ZIP of all files if project has > 1 and < 10 resources resources = Resource.objects.filter(project=project, translatedresources__locale=locale) isZipable = 1 < len(resources) < 10 if isZipable: s = StringIO.StringIO() zf = zipfile.ZipFile(s, "w") # Download a single file if project has 1 or >= 10 resources else: relative_path = _get_relative_path_from_part(slug, part) resources = [ get_object_or_404(Resource, project__slug=slug, path=relative_path) ] for resource in resources: # Get locale file locale_prefixes = (project.repositories.filter( permalink_prefix__contains='{locale_code}').values_list( 'permalink_prefix', flat=True).distinct()) dirnames = set([locale.code, locale.code.replace('-', '_')]) locale_path = _download_file(locale_prefixes, dirnames, resource.path) if not locale_path and not resource.is_asymmetric: return None, None # Get source file if needed source_path = None if resource.is_asymmetric: source_prefixes = (project.repositories.values_list( 'permalink_prefix', flat=True).distinct()) dirnames = VCSProject.SOURCE_DIR_NAMES source_path = _download_file(source_prefixes, dirnames, resource.path) if not source_path: return None, None # If locale file doesn't exist, create it if not locale_path: extension = os.path.splitext(resource.path)[1] with tempfile.NamedTemporaryFile(suffix=extension, delete=False) as temp: temp.flush() locale_path = temp.name # Update file from database resource_file = formats.parse(locale_path, source_path) entities_dict = {} entities_qs = Entity.objects.filter(changedentitylocale__locale=locale, resource__project=project, resource__path=resource.path, obsolete=False) for e in entities_qs: entities_dict[e.key] = e.translation_set.filter(approved=True, locale=locale) for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] vcs_translation.update_from_db(entity) resource_file.save(locale) if not locale_path: return None, None if isZipable: zf.write(locale_path, resource.path) else: with codecs.open(locale_path, 'r', 'utf-8') as f: content = f.read() filename = os.path.basename(resource.path) # Remove temporary files os.remove(locale_path) if source_path: os.remove(source_path) if isZipable: zf.close() content = s.getvalue() filename = project.slug + '.zip' return content, filename
def __init__(self, vcs_project, path, locales=None): """ Load the resource file for each enabled locale and store its translations in VCSEntity instances. """ from pontoon.base.models import Locale from pontoon.sync import formats # Avoid circular import. self.vcs_project = vcs_project self.path = path self.locales = locales or [] self.files = {} self.entities = {} # Create entities using resources from the source directory, source_resource_path = os.path.join(vcs_project.source_directory_path, self.path) source_resource_path = locale_to_source_path(source_resource_path) source_resource_file = formats.parse( source_resource_path, locale=Locale.objects.get(code="en-US")) for index, translation in enumerate(source_resource_file.translations): vcs_entity = VCSEntity( resource=self, key=translation.key, string=translation.source_string, string_plural=translation.source_string_plural, comments=translation.comments, group_comments=(translation.group_comments if hasattr( translation, "group_comments") else None), resource_comments=(translation.resource_comments if hasattr( translation, "resource_comments") else None), source=translation.source, order=translation.order or index, ) self.entities[vcs_entity.key] = vcs_entity # Fill in translations from the locale resources. for locale in locales: locale_directory = self.vcs_project.locale_directory_paths[ locale.code] if self.vcs_project.configuration: # Some resources might not be available for this locale resource_path = self.vcs_project.configuration.l10n_path( locale, source_resource_path, ) if resource_path is None: continue else: resource_path = os.path.join(locale_directory, self.path) log.debug("Parsing resource file: %s", resource_path) try: resource_file = formats.parse(resource_path, source_resource_path, locale) # File doesn't exist or is invalid: log it and move on except (IOError, ParseError) as err: log.error( u"Skipping resource {path} due to {type}: {err}".format( path=path, type=type(err).__name__, err=err)) continue self.files[locale] = resource_file log.debug("Discovered %s translations.", len(resource_file.translations)) for translation in resource_file.translations: try: self.entities[translation.key].translations[ locale.code] = translation except KeyError: # If the source is missing an entity, we consider it # deleted and don't add it. pass
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :param str slug: Project slug. :param str code: Locale code. :param str part: Resource path or Subpage name. :param UploadedFile f: UploadedFile instance. :param User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code__iexact=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile(suffix=extension) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet( project, VCSProject(project, locales=[locale]), timezone.now() ) entities_qs = Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False ).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter(locale=locale), to_attr='db_translations' ) ).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter(locale=locale, approved_date__lte=timezone.now()), to_attr='old_translations' ) ) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.old_translations ) changeset.bulk_create_translations() changeset.bulk_update_translations() TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for t in changeset.translations_to_create + changeset.translations_to_update: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if not key in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values())
def get_download_content(slug, code, part): """ Get content of the file to be downloaded. :param str slug: Project slug. :param str code: Locale code. :param str part: Resource path or Subpage name. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import Entity, Locale, Project, Resource project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code__iexact=code) # Download a ZIP of all files if project has > 1 and < 10 resources resources = Resource.objects.filter(project=project, translatedresources__locale=locale) isZipable = 1 < len(resources) < 10 if isZipable: s = StringIO.StringIO() zf = zipfile.ZipFile(s, "w") # Download a single file if project has 1 or >= 10 resources else: relative_path = _get_relative_path_from_part(slug, part) resources = [get_object_or_404(Resource, project__slug=slug, path=relative_path)] for resource in resources: # Get locale file locale_prefixes = ( project.repositories.filter(permalink_prefix__contains='{locale_code}') .values_list('permalink_prefix', flat=True) .distinct() ) dirnames = set([locale.code, locale.code.replace('-', '_')]) locale_path = _download_file(locale_prefixes, dirnames, resource.path) if not locale_path and not resource.is_asymmetric: return None, None # Get source file if needed source_path = None if resource.is_asymmetric: source_prefixes = ( project.repositories .values_list('permalink_prefix', flat=True) .distinct() ) dirnames = VCSProject.SOURCE_DIR_NAMES source_path = _download_file(source_prefixes, dirnames, resource.path) if not source_path: return None, None # If locale file doesn't exist, create it if not locale_path: extension = os.path.splitext(resource.path)[1] with tempfile.NamedTemporaryFile(suffix=extension, delete=False) as temp: temp.flush() locale_path = temp.name # Update file from database resource_file = formats.parse(locale_path, source_path) entities_dict = {} entities_qs = Entity.objects.filter( changedentitylocale__locale=locale, resource__project=project, resource__path=resource.path, obsolete=False ) for e in entities_qs: entities_dict[e.key] = e.translation_set.filter(approved=True, locale=locale) for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] vcs_translation.update_from_db(entity) resource_file.save(locale) if not locale_path: return None, None if isZipable: zf.write(locale_path, resource.path) else: with codecs.open(locale_path, 'r', 'utf-8') as f: content = f.read() filename = os.path.basename(resource.path) # Remove temporary files os.remove(locale_path) if source_path: os.remove(source_path) if isZipable: zf.close() content = s.getvalue() filename = project.slug + '.zip' return content, filename
def get_download_content(slug, code, part): """ Get content of the file to be downloaded. :param str slug: Project slug. :param str code: Locale code. :param str part: Resource path or Subpage name. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.vcs_models import VCSProject from pontoon.base.models import ( Entity, Locale, Project, Resource, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code__iexact=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Get locale file locale_prefixes = (project.repositories.filter( permalink_prefix__contains='{locale_code}').values_list( 'permalink_prefix', flat=True).distinct()) dirnames = set([locale.code, locale.code.replace('-', '_')]) locale_path = _download_file(locale_prefixes, dirnames, relative_path) if not locale_path: return None, None # Get source file if needed source_path = None if resource.is_asymmetric: source_prefixes = (project.repositories.values_list( 'permalink_prefix', flat=True).distinct()) dirnames = VCSProject.SOURCE_DIR_NAMES source_path = _download_file(source_prefixes, dirnames, relative_path) if not source_path: return None, None # Update file from database resource_file = formats.parse(locale_path, source_path) entities_dict = {} entities_qs = Entity.objects.filter(changedentitylocale__locale=locale, resource__project=project, resource__path=relative_path, obsolete=False) for e in entities_qs: entities_dict[e.key] = e.translation_set.filter(approved=True, locale=locale) for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] vcs_translation.update_from_db(entity) resource_file.save(locale) # Read download content with codecs.open(locale_path, 'r', 'utf-8') as f: content = f.read() # Remove temporary files os.remove(locale_path) if source_path: os.remove(source_path) return content, relative_path
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. :arg UploadedFile f: UploadedFile instance. :arg User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile( prefix='strings' if extension == '.xml' else '', suffix=extension, ) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet( project, VCSProject(project, locales=[locale]), timezone.now() ) entities_qs = Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False ).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter(locale=locale), to_attr='db_translations' ) ).prefetch_related( Prefetch( 'translation_set', queryset=Translation.objects.filter(locale=locale, approved_date__lte=timezone.now()), to_attr='db_translations_approved_before_sync' ) ) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.db_translations_approved_before_sync ) changeset.bulk_create_translations() changeset.bulk_update_translations() if changeset.changed_translations: # Update 'active' status of all changed translations and their siblings, # i.e. translations of the same entity to the same locale. changed_pks = {t.pk for t in changeset.changed_translations} ( Entity.objects .filter(translation__pk__in=changed_pks) .reset_active_translations(locale=locale) ) # Run checks and create TM entries for translations that pass them valid_translations = changeset.bulk_check_translations() changeset.bulk_create_translation_memory_entries(valid_translations) TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct() for t in changeset.changed_translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest translation if changeset.translations_to_create: changeset.translations_to_create[-1].update_latest_translation()
def get_download_content(slug, code, part): """ Get content of the file to be downloaded. :param str slug: Project slug. :param str code: Locale code. :param str part: Resource path or Subpage name. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.vcs_models import VCSProject from pontoon.base.models import ( Entity, Locale, Project, Resource, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code__iexact=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Get locale file locale_prefixes = ( project.repositories.filter(permalink_prefix__contains='{locale_code}') .values_list('permalink_prefix', flat=True) .distinct() ) dirnames = set([locale.code, locale.code.replace('-', '_')]) locale_path = _download_file(locale_prefixes, dirnames, relative_path) if not locale_path: return None, None # Get source file if needed source_path = None if resource.is_asymmetric: source_prefixes = ( project.repositories .values_list('permalink_prefix', flat=True) .distinct() ) dirnames = VCSProject.SOURCE_DIR_NAMES source_path = _download_file(source_prefixes, dirnames, relative_path) if not source_path: return None, None # Update file from database resource_file = formats.parse(locale_path, source_path) entities_dict = {} entities_qs = Entity.objects.filter( changedentitylocale__locale=locale, resource__project=project, resource__path=relative_path, obsolete=False ) for e in entities_qs: entities_dict[e.key] = e.translation_set.filter(approved=True, locale=locale) for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] vcs_translation.update_from_db(entity) resource_file.save(locale) # Read download content with codecs.open(locale_path, 'r', 'utf-8') as f: content = f.read() # Remove temporary files os.remove(locale_path) if source_path: os.remove(source_path) return content, relative_path
def handle_upload_content(slug, code, part, f, user): """ Update translations in the database from uploaded file. :arg str slug: Project slug. :arg str code: Locale code. :arg str part: Resource path or Subpage name. :arg UploadedFile f: UploadedFile instance. :arg User user: User uploading the file. """ # Avoid circular import; someday we should refactor to avoid. from pontoon.sync import formats from pontoon.sync.changeset import ChangeSet from pontoon.sync.vcs.models import VCSProject from pontoon.base.models import ( ChangedEntityLocale, Entity, Locale, Project, Resource, TranslatedResource, Translation, ) relative_path = _get_relative_path_from_part(slug, part) project = get_object_or_404(Project, slug=slug) locale = get_object_or_404(Locale, code=code) resource = get_object_or_404(Resource, project__slug=slug, path=relative_path) # Store uploaded file to a temporary file and parse it extension = os.path.splitext(f.name)[1] with tempfile.NamedTemporaryFile( prefix="strings" if extension == ".xml" else "", suffix=extension, ) as temp: for chunk in f.chunks(): temp.write(chunk) temp.flush() resource_file = formats.parse(temp.name) # Update database objects from file changeset = ChangeSet(project, VCSProject(project, locales=[locale]), timezone.now()) entities_qs = (Entity.objects.filter( resource__project=project, resource__path=relative_path, obsolete=False).prefetch_related( Prefetch( "translation_set", queryset=Translation.objects.filter(locale=locale), to_attr="db_translations", )).prefetch_related( Prefetch( "translation_set", queryset=Translation.objects.filter( locale=locale, approved_date__lte=timezone.now()), to_attr="db_translations_approved_before_sync", ))) entities_dict = {entity.key: entity for entity in entities_qs} for vcs_translation in resource_file.translations: key = vcs_translation.key if key in entities_dict: entity = entities_dict[key] changeset.update_entity_translations_from_vcs( entity, locale.code, vcs_translation, user, entity.db_translations, entity.db_translations_approved_before_sync, ) changeset.bulk_create_translations() changeset.bulk_update_translations() changeset.bulk_log_actions() if changeset.changed_translations: # Update 'active' status of all changed translations and their siblings, # i.e. translations of the same entity to the same locale. changed_pks = {t.pk for t in changeset.changed_translations} (Entity.objects.filter( translation__pk__in=changed_pks).reset_active_translations( locale=locale)) # Run checks and create TM entries for translations that pass them valid_translations = changeset.bulk_check_translations() changeset.bulk_create_translation_memory_entries(valid_translations) # Remove any TM entries of translations that got rejected changeset.bulk_remove_translation_memory_entries() TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats() # Mark translations as changed changed_entities = {} existing = ChangedEntityLocale.objects.values_list("entity", "locale").distinct() for t in changeset.changed_translations: key = (t.entity.pk, t.locale.pk) # Remove duplicate changes to prevent unique constraint violation if key not in existing: changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale) ChangedEntityLocale.objects.bulk_create(changed_entities.values()) # Update latest translation if changeset.translations_to_create: changeset.translations_to_create[-1].update_latest_translation()