def export_as_type(request, store, filetype): """Export given file to xliff for offline translation.""" from pootle_store.filetypes import factory_classes, is_monolingual klass = factory_classes.get(filetype, None) if (not klass or is_monolingual(klass) or store.pootle_path.endswith(filetype)): raise ValueError path, ext = os.path.splitext(store.real_path) export_path = os.path.join('POOTLE_EXPORT', path + os.path.extsep + filetype) abs_export_path = absolute_real_path(export_path) key = iri_to_uri("%s:export_as_%s" % (store.pootle_path, filetype)) last_export = cache.get(key) if (not (last_export and last_export == store.get_mtime() and os.path.isfile(abs_export_path))): from pootle_app.project_tree import ensure_target_dir_exists from pootle_misc import ptempfile as tempfile import shutil ensure_target_dir_exists(abs_export_path) outputstore = store.convert(klass) fd, tempstore = tempfile.mkstemp(prefix=store.name, suffix=os.path.extsep + filetype) os.close(fd) outputstore.savefile(tempstore) shutil.move(tempstore, abs_export_path) cache.set(key, store.get_mtime(), settings.OBJECT_CACHE_TIMEOUT) return redirect('/export/' + export_path)
def mergefile(self, newfile, profile, allownewstrings, suggestions, notranslate, obsoletemissing): """make sure each msgid is unique ; merge comments etc from duplicates into original""" if not newfile.units: return monolingual = is_monolingual(type(newfile)) self.clean_stale_lock() # must be done before locking the file in case it wasn't already parsed self.require_units() if self.state == LOCKED: # file currently being updated #FIXME: shall we idle wait for lock to be released first? what about stale locks? logging.info(u"attemped to merge %s while locked", self.pootle_path) return logging.debug(u"merging %s", self.pootle_path) # lock store oldstate = self.state self.state = LOCKED self.save() try: if suggestions and isinstance(newfile, poheader.poheader): try: mtime = newfile.parseheader().get('X-POOTLE-MTIME', None) if mtime: mtime = datetime.datetime.fromtimestamp(float(mtime)) except Exception, e: logging.debug("failed to parse mtime: %s", e) mtime = None else:
def require_units(self): """make sure file is parsed and units are created""" if self.state < PARSED and self.unit_set.count() == 0: if self.file and is_monolingual(type(self.file.store)) and not self.translation_project.is_template_project: self.translation_project.update_from_templates(pootle_path=self.pootle_path) else: self.parse()
def export_as_type(request, pootle_path, filetype): """export given file to xliff for offline translation""" if pootle_path[0] != '/': pootle_path = '/' + pootle_path store = get_object_or_404(Store, pootle_path=pootle_path) klass = factory_classes.get(filetype, None) if not klass or is_monolingual(klass) or pootle_path.endswith(filetype): raise ValueError path, ext = os.path.splitext(store.real_path) export_path = os.path.join('POOTLE_EXPORT', path + os.path.extsep + filetype) abs_export_path = absolute_real_path(export_path) key = iri_to_uri("%s:export_as_%s" % (pootle_path, filetype)) last_export = cache.get(key) if not (last_export and last_export == store.get_mtime() and os.path.isfile(abs_export_path)): ensure_target_dir_exists(abs_export_path) outputstore = store.convert(klass) fd, tempstore = tempfile.mkstemp(prefix=store.name, suffix=os.path.extsep + filetype) os.close(fd) outputstore.savefile(tempstore) shutil.move(tempstore, abs_export_path) cache.set(key, store.get_mtime(), settings.OBJECT_CACHE_TIMEOUT) return redirect('/export/' + export_path)
def export_as_type(request, pootle_path, filetype): """export given file to xliff for offline translation""" if pootle_path[0] != '/': pootle_path = '/' + pootle_path store = get_object_or_404(Store, pootle_path=pootle_path) klass = factory_classes.get(filetype, None) if not klass or is_monolingual(klass) or \ pootle_path.endswith(filetype): raise ValueError path, ext = os.path.splitext(store.real_path) export_path = os.path.join( 'POOTLE_EXPORT', path + os.path.extsep + filetype) abs_export_path = absolute_real_path(export_path) key = iri_to_uri("%s:export_as_%s" % (pootle_path, filetype)) last_export = cache.get(key) if not (last_export and last_export == store.get_mtime() and os.path.isfile(abs_export_path)): ensure_target_dir_exists(abs_export_path) outputstore = store.convert(klass) file_desc, tempstore = tempfile.mkstemp( prefix=store.name, suffix=os.path.extsep + filetype) os.close(file_desc) outputstore.savefile(tempstore) shutil.move(tempstore, abs_export_path) cache.set(key, store.get_mtime(), settings.OBJECT_CACHE_TIMEOUT) return redirect('/export/' + export_path)
def require_units(self): """make sure file is parsed and units are created""" if self.state < PARSED and self.unit_set.count() == 0: if self.file and is_monolingual(type(self.file.store)) and \ not self.translation_project.is_template_project and \ self.translation_project.project.get_template_translationproject(): self.translation_project.update_from_templates(pootle_path=self.pootle_path) else: self.update(update_structure=True, update_translation=True, conservative=False)
def fix_monolingual(oldunit, newunit, monolingual=None): """hackish workaround for monolingual files always having only source and no target. we compare monolingual unit with corresponding bilingual unit, if sources differ assume monolingual source is actually a translation""" if monolingual is None: monolingual = is_monolingual(type(newunit._store)) if monolingual and newunit.source != oldunit.source: newunit.target = newunit.source newunit.source = oldunit.source
def sync(self, update_structure=False, update_translation=False, conservative=True, create=False, profile=None): """sync file with translations from db""" if not self.file: if create: # file doesn't exist let's create it logging.debug("Creating file %s", self.pootle_path) storeclass = self.get_file_class() store_path = os.path.join(self.translation_project.abs_real_path, self.name) store = self.convert(storeclass) store.savefile(store_path) self.file = store_path self.save() self.update_store_header(profile=profile) self.file.savestore() return logging.debug("Syncing %s", self.pootle_path) self.require_dbid_index(update=True) old_ids = set(self.file.store.getids()) new_ids = set(self.dbid_index.keys()) if update_structure: obsolete_units = (self.file.store.findid(uid) for uid in old_ids - new_ids) for unit in obsolete_units: if not unit.istranslated(): del unit elif not conservative: unit.makeobsolete() if not unit.isobsolete(): del unit new_dbids = [self.dbid_index.get(uid) for uid in new_ids - old_ids] for unit in self.findid_bulk(new_dbids): newunit = unit.convert(self.file.store.UnitClass) self.file.store.addunit(newunit) monolingual = is_monolingual(type(self.file.store)) if update_translation: shared_dbids = [self.dbid_index.get(uid) for uid in old_ids & new_ids] for unit in self.findid_bulk(shared_dbids): #FIXME: use a better mechanism for handling states and different formats if monolingual and not unit.istranslated(): continue match = self.file.store.findid(unit.getid()) if match is not None: unit.sync(match) self.update_store_header(profile=profile) self.file.savestore()
def sync(self, update_structure=False, update_translation=False, conservative=True, create=False, profile=None): """sync file with translations from db""" key = "%s:sync" % self.pootle_path last_sync = cache.get(key) if conservative and last_sync and last_sync == self.get_mtime(): return if not self.file: if create: # file doesn't exist let's create it logging.debug(u"Creating file %s", self.pootle_path) storeclass = self.get_file_class() store_path = os.path.join(self.translation_project.abs_real_path, self.name) store = self.convert(storeclass) store.savefile(store_path) self.file = store_path self.save() self.update_store_header(profile=profile) self.file.savestore() cache.set(key, self.get_mtime(), settings.OBJECT_CACHE_TIMEOUT) return if self.translation_project.is_template_project: #FIXME: should we do this on conservative == True only? # don't save to templates return logging.debug(u"Syncing %s", self.pootle_path) self.require_dbid_index(update=True) old_ids = set(self.file.store.getids()) new_ids = set(self.dbid_index.keys()) file_changed = False if update_structure: obsolete_units = (self.file.store.findid(uid) for uid in old_ids - new_ids) for unit in obsolete_units: if not unit.istranslated(): del unit elif not conservative: unit.makeobsolete() if not unit.isobsolete(): del unit file_changed = True new_dbids = [self.dbid_index.get(uid) for uid in new_ids - old_ids] for unit in self.findid_bulk(new_dbids): newunit = unit.convert(self.file.store.UnitClass) self.file.store.addunit(newunit) file_changed = True monolingual = is_monolingual(type(self.file.store)) if update_translation: shared_dbids = [self.dbid_index.get(uid) for uid in old_ids & new_ids] for unit in self.findid_bulk(shared_dbids): #FIXME: use a better mechanism for handling states and different formats if monolingual and not unit.istranslated(): continue match = self.file.store.findid(unit.getid()) if match is not None: changed = unit.sync(match) if changed: file_changed = True if file_changed: self.update_store_header(profile=profile) self.file.savestore() cache.set(key, self.get_mtime(), settings.OBJECT_CACHE_TIMEOUT)
def update(self, update_structure=False, update_translation=False, conservative=True, store=None, fuzzy=False): """update db with units from file""" if self.state == LOCKED: # file currently being updated #FIXME: shall we idle wait for lock to be released first? what about stale locks? logging.info(u"attempted to update %s while locked", self.pootle_path) return elif self.state < PARSED: # file has not been parsed before logging.debug(u"attempted to update unparsed file %s", self.pootle_path) self.parse(store=store) return if store is None: store = self.file.store key = "%s:sync" % self.pootle_path # lock store logging.debug(u"Updating %s", self.pootle_path) oldstate = self.state self.state = LOCKED self.save() try: if fuzzy: matcher = self.get_matcher() monolingual = is_monolingual(type(store)) self.require_dbid_index(update=True) old_ids = set(self.dbid_index.keys()) new_ids = set(store.getids()) if update_structure: obsolete_dbids = [self.dbid_index.get(uid) for uid in old_ids - new_ids] for unit in self.findid_bulk(obsolete_dbids): if not unit.istranslated() or not conservative: #FIXME: make obselete instead? unit.makeobsolete() unit.save() new_units = (store.findid(uid) for uid in new_ids - old_ids) for unit in new_units: newunit = self.addunit(unit, unit.index) if fuzzy and not filter(None, newunit.target.strings): match_unit = newunit.fuzzy_translate(matcher) if match_unit: newunit.save() self._remove_obsolete(match_unit.source, store=store) if update_translation: shared_dbids = [self.dbid_index.get(uid) for uid in old_ids & new_ids] for unit in self.findid_bulk(shared_dbids): newunit = store.findid(unit.getid()) if monolingual and not self.translation_project.is_template_project: fix_monolingual(unit, newunit, monolingual) changed = unit.update(newunit) if update_structure and unit.index != newunit.index: unit.index = newunit.index changed = True if fuzzy and not filter(None, unit.target.strings): match_unit = unit.fuzzy_translate(matcher) if match_unit: changed = True self._remove_obsolete(match_unit.source, store=store) if changed: unit.save() finally: # unlock store self.state = oldstate self.save() if update_structure and update_translation and not conservative: cache.set(key, self.get_mtime(), settings.OBJECT_CACHE_TIMEOUT)
def mergefile(self, newfile, profile, allownewstrings, suggestions, notranslate, obsoletemissing): """make sure each msgid is unique ; merge comments etc from duplicates into original""" if not newfile.units: return monolingual = is_monolingual(type(newfile)) if self.state == LOCKED: # file currently being updated #FIXME: shall we idle wait for lock to be released first? what about stale locks? logging.info(u"attemped to merge %s while locked", self.pootle_path) return # must be done before locking the file in case it wasn't already parsed self.require_units() logging.debug(u"merging %s", self.pootle_path) # lock store oldstate = self.state self.state = LOCKED self.save() try: self.require_dbid_index(update=True) old_ids = set(self.dbid_index.keys()) if issubclass(self.translation_project.project.get_file_class(), newfile.__class__): new_ids = set(newfile.getids()) else: new_ids = set(newfile.getids(self.name)) if (not monolingual or self.translation_project.is_template_project) and allownewstrings: new_units = (newfile.findid(uid) for uid in new_ids - old_ids) for unit in new_units: self.addunit(unit) if obsoletemissing: obsolete_dbids = [self.dbid_index.get(uid) for uid in old_ids - new_ids] for unit in self.findid_bulk(obsolete_dbids): if unit.istranslated(): unit.makeobsolete() unit.save() else: unit.delete() shared_dbids = [self.dbid_index.get(uid) for uid in old_ids & new_ids] for oldunit in self.findid_bulk(shared_dbids): newunit = newfile.findid(oldunit.getid()) if monolingual and not self.translation_project.is_template_project: fix_monolingual(oldunit, newunit, monolingual) if notranslate or oldunit.istranslated() and suggestions: if newunit.istranslated(): #FIXME: add a user argument oldunit.add_suggestion(newunit.target, profile) else: changed = oldunit.merge(newunit) if changed: oldunit.save() if allownewstrings or obsoletemissing: self.sync(update_structure=True, update_translation=True, conservative=False, create=False, profile=profile) finally: # unlock store self.state = oldstate self.save()
def is_monolingual(self): """Returns ``True`` if this project is monolingual.""" return is_monolingual(self.get_file_class())
def is_monolingual(self): """is this a monolingual project""" return is_monolingual(self.get_file_class())
def is_monolingual(self): """Return ``True`` if this project is monolingual.""" return is_monolingual(self.get_file_class())
def update(self, update_structure=False, update_translation=False, conservative=True): """update db with units from file""" if self.state == LOCKED: # file currently being updated #FIXME: shall we idle wait for lock to be released first? what about stale locks? logging.info("attemped to update %s while locked", self.pootle_path) return if self.state < PARSED: logging.debug("Parsing %s", self.pootle_path) # no existing units in db, file hasn't been parsed before # no point in merging, add units directly oldstate = self.state self.state = LOCKED self.save() try: for index, unit in enumerate(self.file.store.units): if unit.istranslatable(): self.addunit(unit, index) except: # something broke, delete any units that got created # and return store state to its original value self.unit_set.all().delete() self.state = oldstate self.save() raise self.state = PARSED self.save() return # lock store logging.debug("Updating %s", self.pootle_path) oldstate = self.state self.state = LOCKED self.save() try: monolingual = is_monolingual(type(self.file.store)) self.require_dbid_index(update=True) old_ids = set(self.dbid_index.keys()) new_ids = set(self.file.store.getids()) if update_structure: obsolete_dbids = [self.dbid_index.get(uid) for uid in old_ids - new_ids] for unit in self.findid_bulk(obsolete_dbids): if not unit.istranslated() or not conservative: #FIXME: make obselete instead? unit.delete() new_units = (self.file.store.findid(uid) for uid in new_ids - old_ids) for unit in new_units: self.addunit(unit, unit.index) if update_translation: shared_dbids = [self.dbid_index.get(uid) for uid in old_ids & new_ids] for unit in self.findid_bulk(shared_dbids): newunit = self.file.store.findid(unit.getid()) if monolingual and not self.translation_project.is_template_project: fix_monolingual(unit, newunit, monolingual) changed = unit.update(newunit) if update_structure and unit.index != newunit.index: unit.index = newunit.index changed = True if changed: unit.save() finally: # unlock store self.state = oldstate self.save()