def get_avatar_image(request, user, size): """Return avatar image from cache (if available) or download it.""" cache_key = '-'.join(( 'avatar-img', user.username, str(size) )) # Try using avatar specific cache if available try: cache = caches['avatar'] except InvalidCacheBackendError: cache = caches['default'] image = cache.get(cache_key) if image is None: try: image = download_avatar_image(user, size) cache.set(cache_key, image) except (IOError, CertificateError) as error: report_error( error, sys.exc_info(), request, extra_data={'avatar': user.username}, level='debug', ) LOGGER.error( 'Failed to fetch avatar for %s: %s', user.username, str(error) ) return get_fallback_avatar(size) return image
def import_json(cls, request, fileobj, category=None, origin=None): from weblate.memory.tasks import update_memory_task content = fileobj.read() try: data = json.loads(force_text(content)) except (ValueError, UnicodeDecodeError) as error: report_error(error, request) raise MemoryImportError(_('Failed to parse JSON file!')) updates = {} fields = cls.SCHEMA().names() if category: updates = { 'category': category, 'origin': origin, } found = 0 if isinstance(data, list): for entry in data: if not isinstance(entry, dict): continue # Apply overrides entry.update(updates) # Ensure all fields are set for field in fields: if not entry.get(field): continue # Ensure there are not extra fields record = {field: entry[field] for field in fields} update_memory_task.delay(**record) found += 1 return found
def upload_dictionary(request, project, lang): prj = get_project(request, project) lang = get_object_or_404(Language, code=lang) form = DictUploadForm(request.POST, request.FILES) if form.is_valid(): try: count = Dictionary.objects.upload( request, prj, lang, request.FILES['file'], form.cleaned_data['method'] ) import_message( request, count, _('No words to import found in file.'), ungettext( 'Imported %d word from the uploaded file.', 'Imported %d words from the uploaded file.', count ) ) except Exception as error: report_error(error, sys.exc_info(), request) messages.error( request, _('File upload has failed: %s') % force_text(error) ) else: messages.error(request, _('Failed to process form!')) return redirect( 'show_dictionary', project=prj.slug, lang=lang.code )
def report_error(self, exc, request, message): """Wrapper for handling error situations""" extra = {'mt_url': self.request_url, 'mt_params': self.request_params} report_error(exc, request, extra, prefix='Machine translation error') LOGGER.error(message, self.name) LOGGER.info( 'Last URL: %s, params: %s', extra['mt_url'], extra['mt_params'] )
def send_mails(mails): """Send multiple mails in single connection.""" try: connection = get_connection() connection.send_messages( [mail for mail in mails if mail is not None] ) except SMTPException as error: LOGGER.error('Failed to send email: %s', error) report_error(error, sys.exc_info())
def is_valid_base_for_new(cls, base, monolingual): """Check whether base is valid.""" if not base: return monolingual and cls.new_translation is not None try: cls.parse_store(base) return True except Exception as error: report_error(error, prefix='File parse error') return False
def is_valid_base_for_new(cls, base, monolingual): """Check whether base is valid.""" if not base: return True try: AppStoreParser(base) return True except Exception as error: report_error(error, prefix='File parse error') return False
def run_git_http(request, obj, path): """Git HTTP backend execution wrapper.""" # Find Git HTTP backend git_http_backend = find_git_http_backend() if git_http_backend is None: return HttpResponseServerError('git-http-backend not found') # Invoke Git HTTP backend query = request.META.get('QUERY_STRING', '') process_env = { 'REQUEST_METHOD': request.method, 'PATH_TRANSLATED': os.path.join(obj.full_path, path), 'GIT_HTTP_EXPORT_ALL': '1', 'CONTENT_TYPE': request.META.get('CONTENT_TYPE', ''), 'QUERY_STRING': query, 'HTTP_CONTENT_ENCODING': request.META.get('HTTP_CONTENT_ENCODING', ''), } process = subprocess.Popen( [git_http_backend], env=process_env, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) output, output_err = process.communicate(request.body) retcode = process.poll() # Log error if output_err: try: raise Exception('Git http backend error: {}'.format( force_text(output_err).splitlines()[0] )) except Exception as error: report_error(error, request) # Handle failure if retcode: return HttpResponseServerError(output_err) headers, content = output.split(b'\r\n\r\n', 1) message = message_from_string(headers.decode('utf-8')) # Handle status in response if 'status' in message: return HttpResponse( status=int(message['status'].split()[0]) ) # Send content response = HttpResponse( content_type=message['content-type'] ) response.write(content) return response
def main(argv=None): os.environ.setdefault("DJANGO_SETTINGS_MODULE", "weblate.settings") os.environ['DJANGO_IS_MANAGEMENT_COMMAND'] = '1' from django.core.management import execute_from_command_line if argv is None: argv = sys.argv try: execute_from_command_line(argv) except Exception as error: from weblate.utils.errors import report_error report_error(error, sys.exc_info())
def get_words(self, unit): """Return list of word pairs for an unit.""" words = set() source_language = unit.translation.component.project.source_language # Prepare analyzers # - simple analyzer just splits words based on regexp # - language analyzer if available (it is for English) analyzers = [ SimpleAnalyzer(expression=SPLIT_RE, gaps=True), LanguageAnalyzer(source_language.base_code), ] # Add ngram analyzer for languages like Chinese or Japanese if source_language.uses_ngram(): analyzers.append(NgramAnalyzer(4)) # Extract words from all plurals and from context flags = unit.all_flags for text in unit.get_source_plurals() + [unit.context]: text = strip_string(text, flags).lower() for analyzer in analyzers: # Some Whoosh analyzers break on unicode new_words = [] try: new_words = [token.text for token in analyzer(text)] except (UnicodeDecodeError, IndexError) as error: report_error(error) words.update(new_words) if len(words) > 1000: break if len(words) > 1000: break if '' in words: words.remove('') if not words: # No extracted words, no dictionary return self.none() # Build the query for fetching the words # We want case insensitive lookup return self.filter( project=unit.translation.component.project, language=unit.translation.language, source__iregex=r'(^|[ \t\n\r\f\v])({0})($|[ \t\n\r\f\v])'.format( '|'.join([re_escape(word) for word in islice(words, 1000)]) ) )
def get_last_content_change(self, request, silent=False): """Wrapper to get last content change metadata Used when commiting pending changes, needs to handle and report inconsistencies from past releases. """ from weblate.auth.models import get_anonymous try: change = self.change_set.content().order_by('-timestamp')[0] return change.author or get_anonymous(), change.timestamp except IndexError as error: if not silent: report_error(error, request, level='error') return get_anonymous(), timezone.now()
def import_tmx(cls, request, fileobj, langmap=None, category=None, origin=None): from weblate.memory.tasks import update_memory_task if category is None: category = CATEGORY_FILE try: storage = tmxfile.parsefile(fileobj) except (SyntaxError, AssertionError) as error: report_error(error, request, prefix='Failes to parse') raise MemoryImportError(_('Failed to parse TMX file!')) header = next( storage.document.getroot().iterchildren( storage.namespaced("header") ) ) source_language_code = header.get('srclang') source_language = cls.get_language_code(source_language_code, langmap) languages = {} found = 0 for unit in storage.units: # Parse translations (translate-toolkit does not care about # languages here, it just picks first and second XML elements) translations = {} for node in unit.getlanguageNodes(): lang, text = get_node_data(unit, node) if not lang or not text: continue translations[lang] = text if lang not in languages: languages[lang] = cls.get_language_code(lang, langmap) try: source = translations.pop(source_language_code) except KeyError: # Skip if source language is not present continue for lang, text in translations.items(): update_memory_task.delay( source_language=source_language, target_language=languages[lang], source=source, target=text, origin=origin, category=category, ) found += 1 return found
def report_error(self, exc, message): """Wrapper for handling error situations""" report_error( exc, sys.exc_info(), {'mt_url': self.request_url, 'mt_params': self.request_params} ) LOGGER.error( message, self.name, ) LOGGER.error( 'Last fetched URL: %s, params: %s', self.request_url, self.request_params, )
def execute_locked(request, obj, message, call, *args, **kwargs): """Helper function to catch possible lock exception.""" try: result = call(*args, **kwargs) # With False the call is supposed to show errors on its own if result is None or result: messages.success(request, message) except Timeout as error: messages.error( request, _('Failed to lock the repository, another operation is in progress.') ) report_error(error, request) return redirect_param(obj, '#repository')
def get_words(self, unit): """Return list of word pairs for an unit.""" words = set() source_language = unit.translation.component.project.source_language # Prepare analyzers # - simple analyzer just splits words based on regexp # - language analyzer if available (it is for English) analyzers = [ SimpleAnalyzer(expression=SPLIT_RE, gaps=True), LanguageAnalyzer(source_language.base_code()), ] # Add ngram analyzer for languages like Chinese or Japanese if source_language.uses_ngram(): analyzers.append(NgramAnalyzer(4)) # Extract words from all plurals and from context flags = unit.all_flags for text in unit.get_source_plurals() + [unit.context]: text = strip_string(text, flags).lower() for analyzer in analyzers: # Some Whoosh analyzers break on unicode new_words = [] try: new_words = [token.text for token in analyzer(text)] except (UnicodeDecodeError, IndexError) as error: report_error(error, sys.exc_info()) words.update(new_words) if '' in words: words.remove('') if not words: # No extracted words, no dictionary return self.none() # Build the query for fetching the words # We want case insensitive lookup return self.filter( project=unit.translation.component.project, language=unit.translation.language, source__iregex=r'(^|[ \t\n\r\f\v])({0})($|[ \t\n\r\f\v])'.format( '|'.join([re_escape(word) for word in words]) ) )
def saml_metadata(request): if "social_core.backends.saml.SAMLAuth" not in settings.AUTHENTICATION_BACKENDS: raise Http404 # Generate metadata complete_url = reverse("social:complete", args=("saml", )) saml_backend = social_django.utils.load_backend(load_strategy(request), "saml", complete_url) metadata, errors = saml_backend.generate_metadata_xml() # Handle errors if errors: add_breadcrumb(category="auth", message="SAML errors", errors=errors) report_error(level="error", cause="SAML metadata") return HttpResponseServerError(content=", ".join(errors)) return HttpResponse(content=metadata, content_type="text/xml")
def get_uri_error(uri): """Return error for fetching the URL or None if it works.""" if uri.startswith("https://nonexisting.weblate.org/"): return "Non existing test URL" cache_key = f"uri-check-{uri}" cached = cache.get(cache_key) if cached: LOGGER.debug("URL check for %s, cached success", uri) return None try: with request("get", uri, stream=True): cache.set(cache_key, True, 3600) LOGGER.debug("URL check for %s, tested success", uri) return None except requests.exceptions.RequestException as error: report_error(cause="URL check failed") return str(error)
def report_spam(text, user_ip, user_agent): if not settings.AKISMET_API_KEY: return from akismet import Akismet, ProtocolError akismet = Akismet( settings.AKISMET_API_KEY, get_site_url() ) try: akismet.submit_spam( user_ip, user_agent, comment_content=text, comment_type='comment' ) except ProtocolError as error: report_error(error)
def activate(request): form = ActivateForm(request.POST) if form.is_valid(): support = SupportStatus(**form.cleaned_data) try: support.refresh() support.save() messages.success(request, _('Activation completed.')) except Exception as error: report_error(error, request) messages.error( request, _('The activation failed. Please check your activation token.') ) else: show_form_errors(request, form) return redirect('manage')
def commit_pending(self, reason, user, skip_push=False, force=False, signals=True): """Commit any pending changes.""" if not force and not self.needs_commit(): return False self.log_info("committing pending changes (%s)", reason) try: store = self.store except FileParseError as error: report_error(cause="Failed to parse file on commit") self.log_error("skipping commit due to error: %s", error) return False with self.component.repository.lock: units = ( self.unit_set.filter(pending=True) .prefetch_recent_content_changes() .select_for_update() ) for unit in units: # We reuse the queryset, so pending units might reappear here if not unit.pending: continue # Get last change metadata author, timestamp = unit.get_last_content_change() author_name = author.get_author_name() # Flush pending units for this author self.update_units(units, store, author_name, author.id) # Commit changes self.git_commit( user, author_name, timestamp, skip_push=skip_push, signals=signals ) # Remove the pending flag units.update(pending=False) # Update stats (the translated flag might have changed) self.invalidate_cache() return True
def is_spam(text, request): """Generic spam checker interface.""" if settings.AKISMET_API_KEY: from akismet import Akismet akismet = Akismet(settings.AKISMET_API_KEY, get_site_url()) try: return akismet.comment_check( get_ip_address(request), request.META.get("HTTP_USER_AGENT", ""), comment_content=text, comment_type="comment", ) except OSError as error: report_error(error) return True return False
def is_valid_base_for_new( cls, base: str, monolingual: bool, errors: Optional[List] = None, fast: bool = False, ) -> bool: """Check whether base is valid.""" if not base: return True try: if not fast: AppStoreParser(base) return True except Exception: report_error(cause="File parse error") return False
def execute_locked(request, obj, message, call, *args, **kwargs): """Helper function to catch possible lock exception.""" try: result = call(*args, **kwargs) # With False the call is supposed to show errors on its own if result is None or result: messages.success(request, message) except Timeout as error: messages.error( request, _('Failed to lock the repository, another operation in progress.') ) report_error( error, sys.exc_info(), ) return redirect_param(obj, '#repository')
def handle_missing_parameter(request, backend, error): report_error(error, request) if backend != 'email' and error.parameter == 'email': return auth_fail( request, _('Got no e-mail address from third party authentication service.') + ' ' + _('Please register using e-mail instead.'), ) if error.parameter in ('email', 'user', 'expires'): return auth_redirect_token(request) if error.parameter in ('state', 'code'): return auth_redirect_state(request) if error.parameter == 'demo': return auth_fail(request, _('Can not change authentication for demo!')) if error.parameter == 'disabled': return auth_fail(request, _('New registrations are turned off.')) return None
def import_tmx(cls, fileobj, langmap=None, category=None, origin=None): from weblate.memory.tasks import update_memory_task if category is None: category = CATEGORY_FILE try: storage = tmxfile.parsefile(fileobj) except SyntaxError as error: report_error(error) raise MemoryImportError(_('Failed to parse TMX file!')) header = next(storage.document.getroot().iterchildren( storage.namespaced("header"))) source_language_code = header.get('srclang') source_language = cls.get_language_code(source_language_code, langmap) languages = {} found = 0 for unit in storage.units: # Parse translations (translate-toolkit does not care about # languages here, it just picks first and second XML elements) translations = {} for node in unit.getlanguageNodes(): lang, text = get_node_data(unit, node) if not lang or not text: continue translations[lang] = text if lang not in languages: languages[lang] = cls.get_language_code(lang, langmap) try: source = translations.pop(source_language_code) except KeyError: # Skip if source language is not present continue for lang, text in translations.items(): update_memory_task.delay( source_language=source_language, target_language=languages[lang], source=source, target=text, origin=origin, category=category, ) found += 1 return found
def get_words(self, unit): """ Returns list of word pairs for an unit. """ words = set() # Prepare analyzers # - standard analyzer simply splits words # - stemming extracts stems, to catch things like plurals analyzers = [StandardAnalyzer(), StemmingAnalyzer()] source_language = unit.translation.subproject.project.source_language lang_code = source_language.base_code() # Add per language analyzer if Whoosh has it if has_stemmer(lang_code): analyzers.append(LanguageAnalyzer(lang_code)) # Add ngram analyzer for languages like Chinese or Japanese if source_language.uses_ngram(): analyzers.append(NgramAnalyzer(4)) # Extract words from all plurals and from context for text in unit.get_source_plurals() + [unit.context]: for analyzer in analyzers: # Some Whoosh analyzers break on unicode try: words.update([token.text for token in analyzer(force_text(text))]) except (UnicodeDecodeError, IndexError) as error: report_error(error, sys.exc_info()) # Grab all words in the dictionary dictionary = self.filter(project=unit.translation.subproject.project, language=unit.translation.language) if len(words) == 0: # No extracted words, no dictionary dictionary = dictionary.none() else: # Build the query for fetching the words # Can not use __in as we want case insensitive lookup query = Q() for word in words: query |= Q(source__iexact=word) # Filter dictionary dictionary = dictionary.filter(query) return dictionary
def activate(request): form = ActivateForm(request.POST) if form.is_valid(): support = SupportStatus(**form.cleaned_data) try: support.refresh() support.save() messages.success(request, _("Activation completed.")) except Exception: report_error() messages.error( request, _("Could not activate your installation. " "Please ensure your activation token is correct."), ) else: show_form_errors(request, form) return redirect("manage")
def do_update(self, component, query): try: source_translation = component.translation_set.get( pk=self.instance.configuration["source"]) target_translation = component.translation_set.get( pk=self.instance.configuration["target"]) except Translation.DoesNotExist: # Uninstall misconfigured add-on report_error(cause="add-on error") self.intance.disable() return self.generate_translation( source_translation, target_translation, prefix=self.instance.configuration["prefix"], suffix=self.instance.configuration["suffix"], query=query, )
def icon(name): """Inlines SVG icon. Inlining is necessary to be able to apply CSS styles on the path. """ if not name: raise ValueError("Empty icon name") if name not in CACHE: icon_file = os.path.join(settings.STATIC_ROOT, "icons", name) try: with open(icon_file, "r") as handle: CACHE[name] = mark_safe(handle.read()) except OSError as error: report_error(error, prefix="Failed to load icon") return "" return CACHE[name]
def get_gpg_public_key(): key = get_gpg_sign_key() if key is None: return None data = cache.get("gpg-key-public") if not data: try: data = subprocess.check_output( ["gpg", "--batch", "-armor", "--export", key], stderr=subprocess.STDOUT, env=get_clean_env(), ).decode("utf-8") cache.set("gpg-key-public", data, 7 * 86400) delete_configuration_error("GPG key public") except (subprocess.CalledProcessError, OSError) as exc: report_error(exc, prefix="GPG key public") add_configuration_error("GPG key public", force_text(exc)) return None return data
def tools(request): emailform = TestMailForm(initial={"email": request.user.email}) if request.method == "POST": if "email" in request.POST: emailform = TestMailForm(request.POST) if emailform.is_valid(): try: send_test_mail(**emailform.cleaned_data) messages.success(request, _("Test e-mail sent.")) except Exception as error: report_error(error, request) messages.error(request, _("Could not send test e-mail: %s") % error) return render( request, "manage/tools.html", {"menu_items": MENU, "menu_page": "tools", "email_form": emailform}, )
def auth_complete(self, *args, **kwargs): try: return super(EmailAuth, self).auth_complete(*args, **kwargs) except AuthMissingParameter as error: if error.parameter == 'email': messages.error( self.strategy.request, _( 'Failed to verify your registration! ' 'Probably the verification token has expired. ' 'Please try the registration again.' ) ) report_error( error, sys.exc_info(), extra_data=self.data ) return redirect(reverse('login')) raise
def __call__(self, request): # Fake HttpRequest attribute to inject configured # site name into build_absolute_uri request._current_scheme_host = get_site_url() # Actual proxy handling proxy = None if settings.IP_BEHIND_REVERSE_PROXY: proxy = request.META.get(settings.IP_PROXY_HEADER) if proxy: # X_FORWARDED_FOR returns client1, proxy1, proxy2,... address = proxy.split(", ")[settings.IP_PROXY_OFFSET].strip() try: validate_ipv46_address(address) request.META["REMOTE_ADDR"] = address except ValidationError: report_error(cause="Invalid IP address") return self.get_response(request)
def is_valid_base_for_new( cls, base: str, monolingual: bool, errors: Optional[List] = None, fast: bool = False, ) -> bool: """Check whether base is valid.""" if not base: return monolingual and cls.new_translation is not None try: if not fast: cls.parse_store(base) return True except Exception as exception: if errors is not None: errors.append(exception) report_error(cause="File parse error") return False
def commit_pending(self, reason, user, skip_push=False, force=False, signals=True): """Commit any pending changes.""" if not force and not self.needs_commit(): return False self.log_info("committing pending changes (%s)", reason) try: store = self.store except FileParseError as error: report_error(cause="Failed to parse file on commit") self.log_error("skipping commit due to error: %s", error) return False with self.component.repository.lock, transaction.atomic(): while True: # Find oldest change break loop if there is none left try: unit = ( self.unit_set.filter(pending=True) .annotate(Max("change__timestamp")) .order_by("change__timestamp__max")[0] ) except IndexError: break # Get last change metadata author, timestamp = unit.get_last_content_change() author_name = author.get_author_name() # Flush pending units for this author self.update_units(store, author_name, author.id) # Commit changes self.git_commit( user, author_name, timestamp, skip_push=skip_push, signals=signals ) # Update stats (the translated flag might have changed) self.invalidate_cache() return True
def borg(cmd, env=None): """Wrapper to execute borgbackup.""" with backup_lock(): SSH_WRAPPER.create() try: return subprocess.check_output( ["borg", "--rsh", SSH_WRAPPER.filename] + cmd, stderr=subprocess.STDOUT, env=get_clean_env(env), universal_newlines=True, ) except OSError as error: report_error() raise BackupError(f"Could not execute borg program: {error}") except subprocess.CalledProcessError as error: add_breadcrumb(category="backup", message="borg output", stdout=error.stdout) report_error() raise BackupError(error.stdout)
def main(argv=None, developer_mode: bool = False): os.environ.setdefault("DJANGO_SETTINGS_MODULE", "weblate.settings") from weblate.utils.management import WeblateManagementUtility if argv is None: argv = sys.argv try: # This is essentially Django's execute_from_command_line utility = WeblateManagementUtility(argv=argv, developer_mode=developer_mode) utility.execute() except Exception: try: from weblate.utils.errors import report_error report_error() except ImportError: pass raise
def translate(request, unit_id, service): """AJAX handler for translating.""" unit = get_object_or_404(Unit, pk=int(unit_id)) request.user.check_access(unit.translation.component.project) if not request.user.has_perm('machinery.view', unit.translation): raise PermissionDenied() if service not in MACHINE_TRANSLATION_SERVICES: raise SuspiciousOperation('Invalid service specified') translation_service = MACHINE_TRANSLATION_SERVICES[service] # Error response response = { 'responseStatus': 500, 'service': translation_service.name, 'responseDetails': '', 'translations': [], 'lang': unit.translation.language.code, 'dir': unit.translation.language.direction, } try: response['translations'] = translation_service.translate( unit.translation.language.code, unit.get_source_plurals()[0], unit, request ) response['responseStatus'] = 200 except Exception as exc: if not isinstance(exc, MachineTranslationError): report_error(exc, request) response['responseDetails'] = '{0}: {1}'.format( exc.__class__.__name__, str(exc) ) return JsonResponse( data=response, )
def get_avatar_image(user, size: int): """Return avatar image from cache (if available) or download it.""" username = user.username cache_key = "-".join(("avatar-img", username, str(size))) # Try using avatar specific cache if available try: cache = caches["avatar"] except InvalidCacheBackendError: cache = caches["default"] image = cache.get(cache_key) if image is None: try: image = download_avatar_image(user.email, size) cache.set(cache_key, image) except (OSError, CertificateError): report_error(cause=f"Failed to fetch avatar for {username}") return get_fallback_avatar(size) return image
def social_complete(request, backend): """Wrapper around social_django.views.complete. - blocks access for demo user - gracefuly handle backend errors """ try: return complete(request, backend) except InvalidEmail: return auth_redirect_token(request) except AuthMissingParameter as error: return handle_missing_parameter(request, backend, error) except (AuthStateMissing, AuthStateForbidden) as error: report_error(error, request) return auth_redirect_state(request) except AuthFailed as error: report_error(error, request) return auth_fail( request, _('Authentication has failed, probably due to expired token ' 'or connection error.')) except AuthCanceled: return auth_fail(request, _('Authentication has been cancelled.')) except AuthForbidden as error: report_error(error, request) return auth_fail(request, _('Authentication has been forbidden by server.')) except AuthAlreadyAssociated: return auth_fail( request, _('Could not complete registration. The supplied authentication, ' 'email or username is already in use for another account.'))
def send_mails(mails): """Send multiple mails in single connection.""" images = [] for name in ('email-logo.png', 'email-logo-footer.png'): filename = os.path.join(settings.STATIC_ROOT, name) with open(filename, 'rb') as handle: image = MIMEImage(handle.read()) image.add_header('Content-ID', '<{}@cid.weblate.org>'.format(name)) image.add_header('Content-Disposition', 'inline', filename=name) images.append(image) connection = get_connection() try: connection.open() except Exception as error: report_error(error, prefix='Failed to send notifications') connection.close() return html2text = HTML2Text(bodywidth=78) html2text.unicode_snob = True html2text.ignore_images = True html2text.pad_tables = True try: for mail in mails: email = EmailMultiAlternatives( settings.EMAIL_SUBJECT_PREFIX + mail['subject'], html2text.handle(mail['body']), to=[mail['address']], headers=mail['headers'], connection=connection, ) email.mixed_subtype = 'related' for image in images: email.attach(image) email.attach_alternative(mail['body'], 'text/html') email.send() finally: connection.close()
def social_complete(request, backend): """Wrapper around social_django.views.complete. - blocks access for demo user - gracefuly handle backend errors """ try: return complete(request, backend) except InvalidEmail: return auth_redirect_token(request) except AuthMissingParameter as error: return handle_missing_parameter(request, backend, error) except (AuthStateMissing, AuthStateForbidden) as error: report_error(error, request) return auth_redirect_state(request) except AuthFailed as error: report_error(error, request) return auth_fail(request, _( 'Authentication has failed, probably due to expired token ' 'or connection error.' )) except AuthCanceled: return auth_fail( request, _('Authentication has been cancelled.') ) except AuthForbidden as error: report_error(error, request) return auth_fail( request, _('Authentication has been forbidden by server.') ) except AuthAlreadyAssociated: return auth_fail(request, _( 'Could not complete registration. The supplied authentication, ' 'email or username is already in use for another account.' ))
def send_mails(mails): """Send multiple mails in single connection.""" images = [] for name in ("email-logo.png", "email-logo-footer.png"): filename = os.path.join(settings.STATIC_ROOT, name) with open(filename, "rb") as handle: image = MIMEImage(handle.read()) image.add_header("Content-ID", "<{}@cid.weblate.org>".format(name)) image.add_header("Content-Disposition", "inline", filename=name) images.append(image) connection = get_connection() try: connection.open() except Exception as error: report_error(error, prefix="Failed to send notifications") connection.close() return html2text = HTML2Text(bodywidth=78) html2text.unicode_snob = True html2text.ignore_images = True html2text.pad_tables = True try: for mail in mails: email = EmailMultiAlternatives( settings.EMAIL_SUBJECT_PREFIX + mail["subject"], html2text.handle(mail["body"]), to=[mail["address"]], headers=mail["headers"], connection=connection, ) email.mixed_subtype = "related" for image in images: email.attach(image) email.attach_alternative(mail["body"], "text/html") email.send() finally: connection.close()
def social_complete(request, backend): """Wrapper around social_django.views.complete. - Blocks access for demo user - Handles backend errors gracefully """ try: return complete(request, backend) except InvalidEmail: return auth_redirect_token(request) except AuthMissingParameter as error: result = handle_missing_parameter(request, backend, error) if result: return result raise except (AuthStateMissing, AuthStateForbidden) as error: report_error(error, request) return auth_redirect_state(request) except AuthFailed as error: report_error(error, request) return auth_fail( request, _('Could not authenticate, probably due to an expired token ' 'or connection error.')) except AuthCanceled: return auth_fail(request, _('Authentication cancelled.')) except AuthForbidden as error: report_error(error, request) return auth_fail(request, _('The server does not allow authentication.')) except AuthAlreadyAssociated: return auth_fail( request, _('Could not complete registration. The supplied authentication, ' 'e-mail or username is already in use for another account.'))
def request(self, method: str, credentials: Dict, url: str, json: Dict): try: response = requests.request( method, url, headers={ "Accept": "application/vnd.github.v3+json", "Authorization": "token {}".format(credentials["token"]), }, json=json, ) except OSError as error: report_error(cause="request") raise RepositoryException(0, str(error)) self.add_response_breadcrumb(response) try: data = response.json() except JSONDecodeError as error: report_error(cause="request json decoding") response.raise_for_status() raise RepositoryException(0, str(error)) # Log and parase all errors. Sometimes GitHub returns the error # messages in an errors list instead of the message. Sometimes, there # is no errors list. Hence the different logics error_message = "" if "message" in data: error_message = data["message"] self.log(data["message"], level=logging.INFO) if "errors" in data: messages = [] for error in data["errors"]: line = error.get("message", str(error)) messages.append(line) self.log(line, level=logging.WARNING) if error_message: error_message += ": " error_message += ", ".join(messages) return data, error_message
def handle_missing_parameter(request, backend, error): report_error(error, request) if backend != 'email' and error.parameter == 'email': return auth_fail( request, _('Got no e-mail address from third party authentication service!') ) if error.parameter in ('email', 'user', 'expires'): return auth_redirect_token(request) if error.parameter in ('state', 'code'): return auth_redirect_state(request) if error.parameter == 'demo': return auth_fail( request, _('Can not change authentication for demo!') ) if error.parameter == 'disabled': return auth_fail( request, _('New registrations are disabled!') ) raise
def handle_machinery(request, service, unit, source): request.user.check_access(unit.translation.component.project) if service == 'weblate-translation-memory': perm = 'memory.view' else: perm = 'machinery.view' if not request.user.has_perm(perm, unit.translation): raise PermissionDenied() translation_service = MACHINE_TRANSLATION_SERVICES[service] # Error response response = { 'responseStatus': 500, 'service': translation_service.name, 'responseDetails': '', 'translations': [], 'lang': unit.translation.language.code, 'dir': unit.translation.language.direction, } try: response['translations'] = translation_service.translate( unit.translation.language.code, source, unit, request.user ) response['responseStatus'] = 200 except MachineTranslationError as exc: response['responseDetails'] = str(exc) except Exception as exc: report_error(exc, request) response['responseDetails'] = '{0}: {1}'.format( exc.__class__.__name__, str(exc) ) return JsonResponse(data=response)
def tools(request): email_form = TestMailForm(initial={"email": request.user.email}) announce_form = AnnouncementForm() if request.method == "POST": if "email" in request.POST: email_form = TestMailForm(request.POST) if email_form.is_valid(): try: send_test_mail(**email_form.cleaned_data) messages.success(request, _("Test e-mail sent.")) except Exception as error: report_error() messages.error(request, _("Could not send test e-mail: %s") % error) if "sentry" in request.POST: try: raise Exception("Test exception") except Exception: report_error() if "message" in request.POST: announce_form = AnnouncementForm(request.POST) if announce_form.is_valid(): Announcement.objects.create( user=request.user, **announce_form.cleaned_data ) return render( request, "manage/tools.html", { "menu_items": MENU, "menu_page": "tools", "email_form": email_form, "announce_form": announce_form, }, )
def background_hook(method): try: method() except Exception as error: report_error(error, sys.exc_info())
def vcs_service_hook(request, service): """Shared code between VCS service hooks. Currently used for bitbucket_hook, github_hook and gitlab_hook, but should be usable for other VCS services (Google Code, custom coded sites, etc.) too. """ # We support only post methods if not settings.ENABLE_HOOKS: return HttpResponseNotAllowed(()) # Check if we got payload try: data = parse_hook_payload(request) except (ValueError, KeyError, UnicodeError): return HttpResponseBadRequest('Could not parse JSON payload!') # Get service helper hook_helper = HOOK_HANDLERS[service] # Send the request data to the service handler. try: service_data = hook_helper(data) except Exception as error: LOGGER.error('failed to parse service %s data', service) report_error(error, sys.exc_info()) return HttpResponseBadRequest('Invalid data in json payload!') # This happens on ping request upon installation if service_data is None: return hook_response('Hook working') # Log data service_long_name = service_data['service_long_name'] repos = service_data['repos'] repo_url = service_data['repo_url'] branch = service_data['branch'] full_name = service_data['full_name'] # Generate filter spfilter = Q(repo__in=repos) | Q(repo__iendswith=full_name) # We need to match also URLs which include username and password for repo in repos: if not repo.startswith('https://'): continue spfilter = spfilter | ( Q(repo__startswith='https://') & Q(repo__endswith='@{0}'.format(repo[8:])) ) all_components = Component.objects.filter(spfilter) if branch is not None: all_components = all_components.filter(branch=branch) components = all_components.filter(project__enable_hooks=True) LOGGER.info( 'received %s notification on repository %s, branch %s, ' '%d matching components, %d to process', service_long_name, repo_url, branch, all_components.count(), components.count(), ) # Trigger updates updates = 0 for obj in components: updates += 1 LOGGER.info( '%s notification will update %s', service_long_name, obj ) perform_update(obj) if updates == 0: return hook_response('No matching repositories found!', 'failure') return hook_response('Update triggered: {}'.format( ', '.join([obj.log_prefix for obj in components]) ))
def save_backend(self, request, propagate=True, change_action=None, user=None): """ Stores unit to backend. Optional user parameters defines authorship of a change. This should be always called in a trasaction with updated unit locked for update. """ # For case when authorship specified, use user from request if user is None or (user.is_anonymous and request): user = request.user # Commit possible previous changes on this unit if self.pending: try: change = self.change_set.content().order_by('-timestamp')[0] except IndexError as error: # This is probably bug in the change data, fallback by using # any change entry report_error(error, sys.exc_info(), request) change = self.change_set.all().order_by('-timestamp')[0] if change.author_id != request.user.id: self.translation.commit_pending(request) # Return if there was no change # We have to explicitly check for fuzzy flag change on monolingual # files, where we handle it ourselves without storing to backend if (self.old_unit.state == self.state and self.old_unit.target == self.target): # Propagate if we should if propagate: self.propagate(request, change_action) return False # Propagate to other projects # This has to be done before changing source/content_hash for template if propagate: self.propagate(request, change_action) if self.translation.is_template: self.source = self.target self.content_hash = calculate_hash(self.source, self.context) # Unit is pending for write self.pending = True # Update translated flag (not fuzzy and at least one translation) translation = bool(max(self.get_target_plurals())) if self.state == STATE_TRANSLATED and not translation: self.state = STATE_EMPTY elif self.state == STATE_EMPTY and translation: self.state = STATE_TRANSLATED # Save updated unit to database self.save(backend=True) old_translated = self.translation.stats.translated if change_action not in (Change.ACTION_UPLOAD, Change.ACTION_AUTO): # Update translation stats self.translation.invalidate_cache() # Update user stats user.profile.translated += 1 user.profile.save() # Notify subscribed users about new translation from weblate.accounts.notifications import notify_new_translation notify_new_translation(self, self.old_unit, user) # Generate Change object for this change self.generate_change(request, user, change_action) # Force commiting on completing translation translated = self.translation.stats.translated if (old_translated < translated and translated == self.translation.stats.all): Change.objects.create( translation=self.translation, action=Change.ACTION_COMPLETE, user=user, author=user ) self.translation.commit_pending(request) # Update related source strings if working on a template if self.translation.is_template: self.update_source_units(self.old_unit.source, user) return True
def upload_translation(request, project, component, lang): """Handling of translation uploads.""" obj = get_translation(request, project, component, lang) if not request.user.has_perm('upload.perform', obj): raise PermissionDenied() # Check method and lock if obj.component.locked: messages.error(request, _('Access denied.')) return redirect(obj) # Get correct form handler based on permissions form = get_upload_form( request.user, obj, request.POST, request.FILES ) # Check form validity if not form.is_valid(): messages.error(request, _('Please fix errors in the form.')) show_form_errors(request, form) return redirect(obj) # Create author name author = None if (request.user.has_perm('upload.authorship', obj) and form.cleaned_data['author_name'] != '' and form.cleaned_data['author_email'] != ''): author = '{0} <{1}>'.format( form.cleaned_data['author_name'], form.cleaned_data['author_email'] ) # Check for overwriting overwrite = False if request.user.has_perm('upload.overwrite', obj): overwrite = form.cleaned_data['upload_overwrite'] # Do actual import try: not_found, skipped, accepted, total = obj.merge_upload( request, request.FILES['file'], overwrite, author, merge_header=form.cleaned_data['merge_header'], method=form.cleaned_data['method'], fuzzy=form.cleaned_data['fuzzy'], ) if total == 0: message = _('No strings were imported from the uploaded file.') else: message = ungettext( 'Processed {0} string from the uploaded files ' '(skipped: {1}, not found: {2}, updated: {3}).', 'Processed {0} strings from the uploaded files ' '(skipped: {1}, not found: {2}, updated: {3}).', total ).format(total, skipped, not_found, accepted) if accepted == 0: messages.warning(request, message) else: messages.success(request, message) except Exception as error: messages.error( request, _('File content merge failed: %s') % force_text(error) ) report_error(error, sys.exc_info(), request) return redirect(obj)
def upload_translation(request, project, subproject, lang): ''' Handling of translation uploads. ''' obj = get_translation(request, project, subproject, lang) if not can_upload_translation(request.user, obj): raise PermissionDenied() # Check method and lock if obj.is_locked(request.user): messages.error(request, _('Access denied.')) return redirect(obj) # Get correct form handler based on permissions form = get_upload_form(request.user, obj.subproject.project)( request.POST, request.FILES ) # Check form validity if not form.is_valid(): messages.error(request, _('Please fix errors in the form.')) return redirect(obj) # Create author name author = None if (can_author_translation(request.user, obj.subproject.project) and form.cleaned_data['author_name'] != '' and form.cleaned_data['author_email'] != ''): author = '%s <%s>' % ( form.cleaned_data['author_name'], form.cleaned_data['author_email'] ) # Check for overwriting overwrite = False if can_overwrite_translation(request.user, obj.subproject.project): overwrite = form.cleaned_data['overwrite'] # Do actual import try: ret, count = obj.merge_upload( request, request.FILES['file'], overwrite, author, merge_header=form.cleaned_data['merge_header'], merge_comments=form.cleaned_data['merge_comments'], method=form.cleaned_data['method'], fuzzy=form.cleaned_data['fuzzy'], ) import_message( request, count, _('No strings were imported from the uploaded file.'), ungettext( 'Processed %d string from the uploaded files.', 'Processed %d strings from the uploaded files.', count ) ) if not ret: messages.warning( request, _('There were no new strings in uploaded file!') ) except Exception as error: messages.error( request, _('File content merge failed: %s') % force_text(error) ) report_error(error, sys.exc_info(), request) return redirect(obj)