def save(self, user=None, force_insert=False, force_update=False, commit=True): m = super(ResourceForm, self).save(commit=False) if commit: cleaned_data = self.cleaned_data file = None language = cleaned_data['source_language'] if cleaned_data.has_key('sourcefile'): file = cleaned_data['sourcefile'] if file: sf = StorageFile() sf.uuid = str(uuid4()) sf.name = file.name fh = open(sf.get_storage_path(), 'wb') file.seek(0) fh.write(file.read()) fh.flush() fh.close() sf.size = file.size sf.language = language sf.update_props() sf.save() parser = sf.find_parser() try: # Try to do an actual parsing to see if file is valid fhandler = parser(filename=sf.get_storage_path()) fhandler.set_language(language) fhandler.bind_resource(self.instance) fhandler.contents_check(fhandler.filename) fhandler.parse_file(is_source=True) fhandler.save2db(is_source=True, user=user) except: raise # Try to set the i18n type. Problem is that we only check # filename instead of mime type. we should probably update the # function to use python magic as well try: m.i18n_type = get_i18n_type_from_file(file.name) except: pass m.save() return m
def handle(self, *args, **options): # OMG!1! Dirty fix for circular importing issues. Didn't want to dig # into it because it's probably not worth, once it's a tmp code. from transifex.resources.formats import get_i18n_type_from_file from transifex.resources.formats.pofile import POHandler from transifex.languages.models import Language from transifex.projects.models import Project from transifex.resources.models import Resource force = options.get('force') if settings.DEBUG: msg = "You are running this command with DEBUG=True. Please " \ "change it to False in order to avoid problems with " \ "allocating memory." raise CommandError(msg) msg = None if len(args) == 0: jsonmaps = JSONMap.objects.all() else: jsonmaps = JSONMap.objects.filter(project__slug__in=args) if not jsonmaps: msg = "No mapping found for given project slug(s): %s" % ', '.join(args) if not jsonmaps: raise CommandError(msg or "No mapping found in the database.") for jsonmap in jsonmaps: jm = jsonmap.loads(True) # Check whether the map was already migrated or not if jm['meta'].get('_migrated', None) and not force: logger.debug("Project '%s' was already migrated." % jsonmap.project) continue for r in jm['resources']: logger.debug("Pushing resource: %s" % r.get('resource_slug')) project = jsonmap.project # Path for cached files of project.component path = os.path.join(settings.MSGMERGE_DIR, '%s.%s' % (project.slug, jsonmap.slug)) if os.path.exists(path): resource_slug = r['resource_slug'] language = Language.objects.by_code_or_alias_or_none( r['source_lang']) # Create resource and load source language if language: resource, created = Resource.objects.get_or_create( slug = resource_slug, source_language = language, project = project) if created: resource.name = '%s - %s' % (jsonmap.slug, r['source_file']) source_file = os.path.join(path, r['source_file']) resource.i18n_type = get_i18n_type_from_file(source_file) resource.save() resource.url_info = URLInfo(source_file_url=r['autofetch_url']) logger.debug("Inserting source strings from %s (%s) to " "'%s'." % (r['source_file'], language.code, resource.full_name)) fhandler = POHandler(filename=source_file) fhandler.bind_resource(resource) fhandler.set_language(language) try: fhandler.contents_check(fhandler.filename) fhandler.parse_file(True) strings_added, strings_updated = fhandler.save2db(True) except Exception, e: resource.delete() sys.stdout.write((u"Resource not created! Could not import " "file '%s': %s.\n" % (source_file, str(e))).encode('UTF-8')) # Skip adding translations, as the resource # wasn't created. continue logger.debug("Inserting translations for '%s' (%s)." % (resource.slug, project)) # Load translations for code, f in r['translations'].items(): language = Language.objects.by_code_or_alias_or_none(code) if language: translation_file = os.path.join(path, f['file']) try: fhandler = POHandler(filename=translation_file) fhandler.set_language(language) fhandler.bind_resource(resource) fhandler.contents_check(fhandler.filename) fhandler.parse_file() strings_added, strings_updated = fhandler.save2db() except Exception, e: sys.stdout.write((u"Could not import file '%s': %s" % (translation_file, str(e))).encode('UTF-8')) else: logger.debug("Mapping '%s' does not have cached files " "under %s." % (jsonmap, path))
tr.save() strings_updated += 1 except Exception, e: logger.error("There was problem while importing the entries " "into the database. Entity: '%s'. Error: '%s'." % (j.source_entity, str(e))) transaction.rollback() return 0,0 else: if is_source: strings_deleted = len(original_sources) t, created = Template.objects.get_or_create(resource = self.resource) t.content = self.template t.save() if created: self.resource.i18n_type = get_i18n_type_from_file(self.filename) self.resource.save() # See how many iterations we need for this iterations = len(original_sources)*len(new_entities) # If it's not over the limit, then do it if iterations < settings.MAX_STRING_ITERATIONS: for se in original_sources: for ne in new_entities: try: old_trans = Translation.objects.get(source_entity=se, language=se.resource.source_language, rule=5) new_trans = Translation.objects.get(source_entity=ne, language=se.resource.source_language, rule=5) except Translation.DoesNotExist: # Source language translation should always exist # but just in case...
def update_source_file(self, fake=False): """ Fetch source file from remote url and import it, updating existing entries. """ try: file = urllib2.urlopen(self.source_file_url) except: logger.error("Could not pull source file for resource %s (%s)" % (self.resource.full_name, self.source_file_url)) raise filename = '' if file.info().has_key('Content-Disposition'): # If the response has Content-Disposition, we try to take # filename from it content = file.info()['Content-Disposition'] if 'filename' in content: filename = content.split('filename')[1] filename = filename.replace('"', '').replace("'", "" ).replace("=", "").replace('/', '-').strip() if filename == '': parts = urlparse.urlsplit(self.source_file_url) #FIXME: This still might end empty filename = parts.path.split('/')[-1] sf = StorageFile() sf.uuid = str(uuid4()) sf.name = filename fh = open(sf.get_storage_path(), 'wb') fh.write(file.read()) fh.flush() fh.close() sf.size = os.path.getsize(sf.get_storage_path()) sf.language = self.resource.source_language sf.update_props() sf.save() try: if self.resource.i18n_type: parser = get_i18n_handler_from_type(self.resource.i18n_type) else: parser = sf.find_parser() assert parser, "Could not find a suitable handler for this file." i18n_type = get_i18n_type_from_file(sf.get_storage_path()) self.resource.i18n_type = i18n_type self.resource.save() language = sf.language fhandler = parser(filename=sf.get_storage_path()) fhandler.set_language(language) fhandler.bind_resource(self.resource) fhandler.contents_check(fhandler.filename) fhandler.parse_file(is_source=True) strings_added, strings_updated = 0, 0 if not fake: strings_added, strings_updated = fhandler.save2db(is_source=True) except Exception,e: logger.error("Error importing source file for resource %s.%s (%s): %s" % ( self.resource.project.slug, self.resource.slug, self.source_file_url, str(e))) raise