def app_xml(app_id): try: app = db.session.query(App).filter_by(unique_id=app_id, composer = 'adapt').first() if app is None: return "App not found", 404 data = json.loads(app.data) if data['adaptor_type'] != 'jsconfig': return "App deprecated", 404 url = data['url'].strip() contents = get_cached_session().get(url).text # If the user hasn't clicked on "Save" yet, do not replace configuration script if data.get('configuration_name'): contents = replace_default_configuration_script(contents, url_for('.configuration', app_id=app_id, _external=True)) contents = inject_absolute_urls(contents, url) xmldoc = minidom.parseString(contents) inject_original_url_in_xmldoc(xmldoc, url) inject_absolute_locales_in_xmldoc(xmldoc, url) contents = xmldoc.toprettyxml() except Exception as e: traceback.print_exc() # TODO: some bootstrap magic return "Could not convert the application. %s" % str(e) else: return Response(contents, mimetype='text/xml')
def get_contents(lang, trials=2): if lang == 'en': resource_id = '560410f1f0e1b09f6c8117ec' requests = get_cached_session() request_url = "http://graasp.eu/resources/{0}/raw".format(resource_id) try: r = requests.get(request_url, timeout=(10,10)) r.raise_for_status() except Exception: raise TimeoutError("Timeout") try: return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(r.text) except ValueError as ve: if len(r.text) == 0: if trials == 0: raise ValueError("{}: {} returned empty result!".format(ve, request_url)) return get_contents(lang, trials-1) if len(r.text) >= 20: response = '{!r}...'.format(r.text[:20]) else: response = r.text raise ValueError("{}: {}: {!r}".format(ve, request_url, response)) else: return None
def app_xml(app_id): try: app = db.session.query(App).filter_by(unique_id=app_id, composer='adapt').first() if app is None: return "App not found", 404 data = json.loads(app.data) if data['adaptor_type'] != 'jsconfig': return "App deprecated", 404 url = data['url'].strip() contents = get_cached_session().get(url).text # If the user hasn't clicked on "Save" yet, do not replace configuration script if data.get('configuration_name'): contents = replace_default_configuration_script( contents, url_for('.configuration', app_id=app_id, _external=True)) contents = inject_absolute_urls(contents, url) xmldoc = minidom.parseString(contents) inject_original_url_in_xmldoc(xmldoc, url) inject_absolute_locales_in_xmldoc(xmldoc, url) contents = xmldoc.toprettyxml() except Exception as e: traceback.print_exc() # TODO: some bootstrap magic return "Could not convert the application. %s" % str(e) else: return Response(contents, mimetype='text/xml')
def get_contents(lang, trials=2): if lang == 'en': resource_id = '5a2ee559256f60731402f4da' requests = get_cached_session() request_url = "http://graasp.eu/resources/{0}/raw".format(resource_id) try: r = requests.get(request_url, timeout=(10,10)) r.raise_for_status() except Exception: raise TimeoutError("Timeout") try: return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(r.text) except ValueError as ve: if len(r.text) == 0: if trials == 0: raise ValueError("{}: {} returned empty result!".format(ve, request_url)) return get_contents(lang, trials-1) if len(r.text) >= 20: response = '{!r}...'.format(r.text[:20]) else: response = r.text raise ValueError("{}: {}: {!r}".format(ve, request_url, response)) else: return None
def __init__(self, app_url, force_reload): threading.Thread.__init__(self) self.cached_requests = get_cached_session() self.app_url = app_url self.force_reload = force_reload self.finished = False self.failing = False self.metadata_information = None
def get_languages(): return ['en'] requests = get_cached_session() languages = [] for line in requests.get( "http://go-lab.gw.utwente.nl/production/common/languages/list.txt" ).text.splitlines(): languages.append(line.split("_")[1]) return languages
def synchronize_apps_no_cache(): """Force obtaining the results and checking everything again to avoid inconsistences. This should be run once a day.""" sync_id = start_synchronization() try: cached_requests = get_cached_session() synced_apps = _sync_golab_translations(cached_requests, force_reload = True) _sync_regular_apps(cached_requests, synced_apps, force_reload = True) finally: end_synchronization(sync_id)
def __init__(self, app_url, force_reload): threading.Thread.__init__(self) self.cached_requests = trutils.get_cached_session( caching=not force_reload) self.app_url = app_url self.force_reload = force_reload self.finished = False self.failing = False self.metadata_information = None
def synchronize_apps_cache(): """Force obtaining the results and checking everything again to avoid inconsistences. This can safely be run every few minutes, since most applications will be in the cache.""" sync_id = start_synchronization() try: cached_requests = get_cached_session() synced_apps = _sync_golab_translations(cached_requests, force_reload = False) _sync_regular_apps(cached_requests, synced_apps, force_reload = False) finally: end_synchronization(sync_id)
def locale(language): requests = get_cached_session() if language not in get_languages(): return "Language not found", 404 # xml_response = requests.get('http://go-lab.gw.utwente.nl/production/commons/commons_en_ALL.xml') xml_response = requests.get('http://go-lab.gw.utwente.nl/production/commons/languages/common_{0}_ALL.xml'.format(language)) response = make_response(get_text_from_response(xml_response)) response.content_type = 'application/xml' return response
def synchronize_apps_no_cache(source, single_app_url = None): """Force obtaining the results and checking everything again to avoid inconsistences. This should be run once a day.""" sync_id = start_synchronization(source = source, cached = False, single_app_url = single_app_url) number = 0 try: cached_requests = trutils.get_cached_session(caching = False) synced_apps = [] all_golab_apps = _get_golab_translations(cached_requests) all_golab_apps.extend(get_other_apps()) number += _sync_translations(cached_requests, "Go-Lab apps", synced_apps, all_golab_apps, force_reload = True, single_app_url = single_app_url) number += _sync_regular_apps(cached_requests, synced_apps, force_reload = True, single_app_url = single_app_url) finally: end_synchronization(sync_id, number)
def run(self): self.failing = False cached_requests = trutils.get_cached_session(caching = not self.force_reload) try: self.metadata_information = extract_metadata_information(self.app_url, self.preview_link, cached_requests, self.force_reload, app_format=self.app_format) except Exception: logger.warning("Error extracting information from %s" % self.app_url, exc_info = True) if DEBUG_VERBOSE: print("Error extracting information from %s" % self.app_url) traceback.print_exc() self.metadata_information = {} self.failing = True else: self.failing = self.metadata_information.get('failing', False) self.finished = True
def locale(language): requests = get_cached_session() if language not in get_languages(): return "Language not found", 404 # xml_response = requests.get('http://go-lab.gw.utwente.nl/production/commons/commons_en_ALL.xml') try: xml_response = requests.get( 'http://go-lab.gw.utwente.nl/production/common/languages/common_{0}_ALL.xml' .format(language), timeout=(10, 10)) except Exception: return "Error requesting external resource", 502 else: response = make_response(get_text_from_response(xml_response)) response.content_type = 'application/xml' return response
def synchronize_apps_no_cache(source, single_app_url=None): """Force obtaining the results and checking everything again to avoid inconsistences. This should be run once a day.""" sync_id = start_synchronization(source=source, cached=False, single_app_url=single_app_url) number = 0 try: cached_requests = trutils.get_cached_session(caching=False) synced_apps = [] all_golab_apps = _get_golab_translations(cached_requests) all_golab_apps.extend(get_other_apps()) number += _sync_translations(cached_requests, "Go-Lab apps", synced_apps, all_golab_apps, force_reload=True, single_app_url=single_app_url) number += _sync_regular_apps(cached_requests, synced_apps, force_reload=True, single_app_url=single_app_url) finally: end_synchronization(sync_id, number)
def run(self): self.failing = False cached_requests = trutils.get_cached_session( caching=not self.force_reload) try: self.metadata_information = extract_metadata_information( self.app_url, self.preview_link, cached_requests, self.force_reload, app_format=self.app_format) except Exception: logger.warning("Error extracting information from %s" % self.app_url, exc_info=True) if DEBUG_VERBOSE: print("Error extracting information from %s" % self.app_url) traceback.print_exc() self.metadata_information = {} self.failing = True else: self.failing = self.metadata_information.get('failing', False) self.finished = True
def extract_metadata_information(app_url, preview_link, cached_requests = None, force_reload = False, app_format=None): if cached_requests is None: cached_requests = get_cached_session() if not app_format or app_format == 'opensocial': app_information = _extract_information_opensocial(app_url, cached_requests) elif app_format == 'html': app_information = _extract_information_html(app_url, cached_requests) else: raise TranslatorError('Invalid app format: {}'.format(app_format)) original_translations = {} original_translation_urls = {} default_translations = {} default_translation_url = None default_metadata = {} if len(app_information.locales) == 0: translatable = False else: translatable = True default_locale = None for locale in app_information.locales: lang = locale['lang'] country = locale['country'] messages_url = locale['messages'] if lang and messages_url and lang.lower() != 'all': if country: lang = u'{}_{}'.format(lang, country) elif len(lang) == 2: lang = u'%s_ALL' % lang if lang.lower() == 'zh_all': lang = 'zh_CN' if '_' in lang and lang.split('_')[0].lower() == lang.split('_')[1].lower(): # es_ES is es_ALL, fr_FR is fr_ALL lang = '{}_ALL'.format(lang.split('_')) try: absolute_url, messages, metadata, locale_contents = _retrieve_messages_from_relative_url(app_url, messages_url, cached_requests) except TranslatorError as e: logging.warning(u"Could not load %s translation for app URL: %s Reason: %s" % (lang, app_url, e), exc_info = True) continue else: new_messages = {} if messages: for key, value in messages.iteritems(): new_messages[key] = value['text'] original_translations[lang] = new_messages original_translation_urls[lang] = absolute_url if (lang is None or lang.lower() == 'all') and messages_url: # Process this later. This way we can force we get the results for the default translation default_locale = locale if default_locale is not None: messages_url = default_locale['messages'] absolute_url, messages, metadata, locale_contents = _retrieve_messages_from_relative_url(app_url, messages_url, cached_requests) default_translations = messages default_translation_url = absolute_url default_metadata = metadata # No English? Default is always English! if 'en_ALL' not in original_translations: lang = 'en_ALL' new_messages = {} if messages: for key, value in messages.iteritems(): new_messages[key] = value['text'] original_translations[lang] = new_messages original_translation_urls[lang] = absolute_url check_urls = app_information.check_urls if preview_link: check_urls.append(preview_link) metadata = { 'translatable' : translatable, 'check_urls' : check_urls, 'uses_proxy': app_information.uses_proxy, 'adaptable' : False, # Not supported anymore (all 0 in the database right now) 'original_translations' : original_translations, 'original_translation_urls' : original_translation_urls, 'default_translations' : default_translations, 'default_translation_url' : default_translation_url, 'default_metadata' : default_metadata, } translation_hash, serialized = _calculate_translations_hash(original_translations, default_translations) metadata['translation_hash'] = translation_hash metadata['check_urls_hash'] = unicode(zlib.crc32(json.dumps(sorted(check_urls)))) return metadata
def extract_metadata_information(app_url, preview_link, cached_requests=None, force_reload=False, app_format=None): if cached_requests is None: cached_requests = get_cached_session() if not app_format or app_format == 'opensocial': app_information = _extract_information_opensocial( app_url, cached_requests) elif app_format == 'html': app_information = _extract_information_html(app_url, cached_requests) else: raise TranslatorError('Invalid app format: {}'.format(app_format)) original_translations = {} original_translation_urls = {} default_translations = {} default_translation_url = None default_metadata = {} if len(app_information.locales) == 0: translatable = False else: translatable = True default_locale = None for locale in app_information.locales: lang = locale['lang'] country = locale['country'] messages_url = locale['messages'] if lang and messages_url and lang.lower() != 'all': if country: lang = u'{}_{}'.format(lang, country) elif len(lang) == 2: lang = u'%s_ALL' % lang if lang.lower() == 'zh_all': lang = 'zh_CN' if '_' in lang and lang.split('_')[0].lower() == lang.split( '_')[1].lower(): # es_ES is es_ALL, fr_FR is fr_ALL lang = '{}_ALL'.format(lang.split('_')) try: absolute_url, messages, metadata, locale_contents = _retrieve_messages_from_relative_url( app_url, messages_url, cached_requests) except TranslatorError as e: logging.warning( u"Could not load %s translation for app URL: %s Reason: %s" % (lang, app_url, e), exc_info=True) continue else: new_messages = {} if messages: for key, value in messages.iteritems(): new_messages[key] = value['text'] original_translations[lang] = new_messages original_translation_urls[lang] = absolute_url if (lang is None or lang.lower() == 'all') and messages_url: # Process this later. This way we can force we get the results for the default translation default_locale = locale if default_locale is not None: messages_url = default_locale['messages'] absolute_url, messages, metadata, locale_contents = _retrieve_messages_from_relative_url( app_url, messages_url, cached_requests) default_translations = messages default_translation_url = absolute_url default_metadata = metadata # No English? Default is always English! if 'en_ALL' not in original_translations: lang = 'en_ALL' new_messages = {} if messages: for key, value in messages.iteritems(): new_messages[key] = value['text'] original_translations[lang] = new_messages original_translation_urls[lang] = absolute_url check_urls = app_information.check_urls if preview_link: check_urls.append(preview_link) metadata = { 'translatable': translatable, 'check_urls': check_urls, 'uses_proxy': app_information.uses_proxy, 'offline': app_information.offline, 'adaptable': False, # Not supported anymore (all 0 in the database right now) 'original_translations': original_translations, 'original_translation_urls': original_translation_urls, 'default_translations': default_translations, 'default_translation_url': default_translation_url, 'default_metadata': default_metadata, } translation_hash, serialized = _calculate_translations_hash( original_translations, default_translations) metadata['translation_hash'] = translation_hash metadata['check_urls_hash'] = unicode( zlib.crc32(json.dumps(sorted(check_urls)))) return metadata
def extract_local_translations_url(app_url, force_local_cache=False): redis_key = 'appcomposer:fast-cache:{}'.format(app_url) if force_local_cache: # Under some situations (e.g., updating a single message), it is better to have a cache # than contacting the foreign server. Only if requested, this method will try to check # in a local cache in Redis. cached = redis_store.get(redis_key) if cached: translation_url, original_messages, metadata = json.loads(cached) if metadata is not None: original_messages_loaded = json.loads(original_messages) metadata_loaded = json.loads(metadata) return translation_url, original_messages_loaded, metadata_loaded cached_requests = get_cached_session() repository_app = db.session.query(RepositoryApp).filter_by( url=app_url).first() if repository_app is not None: guessing = False app_format = repository_app.app_format else: guessing = True app_url_path = urlparse.urlparse(app_url).path if app_url_path.endswith('.xml'): app_format = 'opensocial' elif app_url_path.endswith('.html'): app_format = 'html' else: r = cached_requests.get(app_url) if 'xml' in (r.headers.get('Content-Type') or ''): app_format = 'opensocial' else: app_format = 'html' if app_format == 'opensocial' or not app_format: # Default to opensocial try: app_information = _extract_information_opensocial( app_url, cached_requests) except TranslatorError as err: if not guessing: raise try: app_information = _extract_information_html( app_url, cached_requests) except TranslatorError as err2: raise TranslatorError( "Error trying both opensocial and html: {} / {}".format( err, err2)) else: try: app_information = _extract_information_html( app_url, cached_requests) except TranslatorError as err: try: app_information = _extract_information_opensocial( app_url, cached_requests) except TranslatorError as err2: raise TranslatorError( "Error trying both html and opensocial: {} / ".format( err, err2)) locales_without_lang = [ locale for locale in app_information.locales if not locale['lang'] or locale['lang'].lower() == 'all' ] if not locales_without_lang: raise TranslatorError( "That application does not provide any default locale. The application has probably not been adopted to be translated." ) relative_translation_url = locales_without_lang[0]['messages'] if not relative_translation_url: raise TranslatorError("Default Locale not provided message attribute") absolute_translation_url, messages, metadata, contents = _retrieve_messages_from_relative_url( app_url, relative_translation_url, cached_requests) redis_value = json.dumps( [absolute_translation_url, json.dumps(messages), json.dumps(metadata)]) redis_store.setex(name=redis_key, time=10 * 60, value=redis_value) # For 10 minutes return absolute_translation_url, messages, metadata
def get_languages(): requests = get_cached_session() languages = [] for line in requests.get("http://go-lab.gw.utwente.nl/production/commons/languages/list.txt").text.splitlines(): languages.append(line.split("_")[1]) return languages
def index(): requests = get_cached_session() languages = get_languages() response = make_response(render_template('graasp_i18n.xml', languages = languages, title = "Twente commons")) response.content_type = 'application/xml' return response
def extract_local_translations_url(app_url, force_local_cache = False): redis_key = 'appcomposer:fast-cache:{}'.format(app_url) if force_local_cache: # Under some situations (e.g., updating a single message), it is better to have a cache # than contacting the foreign server. Only if requested, this method will try to check # in a local cache in Redis. cached = redis_store.get(redis_key) if cached: translation_url, original_messages, metadata = json.loads(cached) if metadata is not None: original_messages_loaded = json.loads(original_messages) metadata_loaded = json.loads(metadata) return translation_url, original_messages_loaded, metadata_loaded cached_requests = get_cached_session() repository_app = db.session.query(RepositoryApp).filter_by(url=app_url).first() if repository_app is not None: guessing = False app_format = repository_app.app_format else: guessing = True app_url_path = urlparse.urlparse(app_url).path if app_url_path.endswith('.xml'): app_format = 'opensocial' elif app_url_path.endswith('.html'): app_format = 'html' else: r = cached_requests.get(app_url) if 'xml' in (r.headers.get('Content-Type') or ''): app_format = 'opensocial' else: app_format = 'html' if app_format == 'opensocial' or not app_format: # Default to opensocial try: app_information = _extract_information_opensocial(app_url, cached_requests) except TranslatorError as err: if not guessing: raise try: app_information = _extract_information_html(app_url, cached_requests) except TranslatorError as err2: raise TranslatorError("Error trying both opensocial and html: {} / {}".format(err, err2)) else: try: app_information = _extract_information_html(app_url, cached_requests) except TranslatorError as err: try: app_information = _extract_information_opensocial(app_url, cached_requests) except TranslatorError as err2: raise TranslatorError("Error trying both html and opensocial: {} / ".format(err, err2)) locales_without_lang = [ locale for locale in app_information.locales if not locale['lang'] or locale['lang'].lower() == 'all' ] if not locales_without_lang: raise TranslatorError("That application does not provide any default locale. The application has probably not been adopted to be translated.") relative_translation_url = locales_without_lang[0]['messages'] if not relative_translation_url: raise TranslatorError("Default Locale not provided message attribute") absolute_translation_url, messages, metadata, contents = _retrieve_messages_from_relative_url(app_url, relative_translation_url, cached_requests) redis_value = json.dumps([ absolute_translation_url, json.dumps(messages), json.dumps(metadata) ]) redis_store.setex(name=redis_key, time=10 * 60, value=redis_value) # For 10 minutes return absolute_translation_url, messages, metadata