def check_origin_zips(app_configs: AppConfig = None, **kwargs) -> List[CheckMessage]: """ check_origin_zips: Checks if the user has supplied at least one origin zip. Args: appconfig (AppConfig, optional): Defaults to None. Returns: List[checks.CheckMessage]: List of Django CheckMessages """ from . import settings as tax_settings messages = [] if not tax_settings.NEXUSES: msg = "Could not find a Nexus." hint = "Add at least one TAXTEA_NEXUSES to your settings." messages.append(Critical(msg, hint=hint, id="tax.C004")) # If there is no TAX_NEXUS, then the next check will throw an IndexError return messages state, zip_code = tax_settings.NEXUSES[0] if not state and not zip_code: msg = "Could not find a valid Nexus tuple." hint = "Add at least one Nexus tuple ('STATE', 'ZIPCODE') to your settings." messages.append(Critical(msg, hint=hint, id="tax.C005")) return messages
def check_Avalara_api_auth(app_configs: AppConfig = None, **kwargs) -> List[CheckMessage]: """ check_Avalara_api_auth: Checks if the user has supplied a Avalara username/password. Args: appconfig (AppConfig, optional): Defaults to None. Returns: List[checks.CheckMessage]: List of Django CheckMessages """ from . import settings as tax_settings messages = [] if not tax_settings.AVALARA_USER: msg = "Could not find a Avalara User." hint = "Add TAXTEA_AVALARA_USER to your settings." messages.append(Critical(msg, hint=hint, id="tax.C002")) if not tax_settings.AVALARA_PASSWORD: msg = "Could not find a Avalara Password." hint = "Add TAXTEA_AVALARA_PASSWORD to your settings." messages.append(Critical(msg, hint=hint, id="tax.C003")) return messages
def check_celery(app_configs, **kwargs): errors = [] if settings.CELERY_TASK_ALWAYS_EAGER: errors.append( Error( "Celery is configured in the eager mode", hint=get_doc_url("admin/install", "celery"), id="weblate.E005", )) elif settings.CELERY_BROKER_URL == "memory://": errors.append( Critical( "Celery is configured to store queue in local memory", hint=get_doc_url("admin/install", "celery"), id="weblate.E026", )) else: if is_celery_queue_long(): errors.append( Critical( "The Celery tasks queue is too long, either the worker " "is not running or is too slow.", hint=get_doc_url("admin/install", "celery"), id="weblate.E009", )) result = ping.delay() try: result.get(timeout=10, disable_sync_subtasks=False) except TimeoutError: errors.append( Critical( "The Celery does not process tasks or is too slow " "in processing them.", hint=get_doc_url("admin/install", "celery"), id="weblate.E019", )) except NotImplementedError: errors.append( Critical( "The Celery is not configured to store results, " "CELERY_RESULT_BACKEND is probably not set.", hint=get_doc_url("admin/install", "celery"), id="weblate.E020", )) heartbeat = cache.get("celery_heartbeat") loaded = cache.get("celery_loaded") now = time.time() if loaded and now - loaded > 60 and (not heartbeat or now - heartbeat > 600): errors.append( Critical( "The Celery beats scheduler is not executing periodic tasks " "in a timely manner.", hint=get_doc_url("admin/install", "celery"), id="weblate.C030", )) return errors
def check_celery(app_configs, **kwargs): errors = [] if settings.CELERY_TASK_ALWAYS_EAGER: errors.append( Error( 'Celery is configured in the eager mode', hint=get_doc_url('admin/install', 'celery'), id='weblate.E005', )) elif settings.CELERY_BROKER_URL == 'memory://': errors.append( Critical( 'Celery is configured to store queue in local memory', hint=get_doc_url('admin/install', 'celery'), id='weblate.E026', )) else: stats = get_queue_stats() if stats['celery'] > 50 or stats['search'] > 10000: errors.append( Critical( 'The Celery tasks queue is too long, either the worker ' 'is not running or is too slow.', hint=get_doc_url('admin/install', 'celery'), id='weblate.E009', )) result = ping.delay() try: result.get(timeout=10, disable_sync_subtasks=False) except TimeoutError: errors.append( Critical( 'The Celery does not process tasks or is too slow ' 'in processing them.', hint=get_doc_url('admin/install', 'celery'), id='weblate.E019', )) except NotImplementedError: errors.append( Critical( 'The Celery is not configured to store results, ' 'CELERY_RESULT_BACKEND is probably not set.', hint=get_doc_url('admin/install', 'celery'), id='weblate.E020', )) heartbeat = cache.get('celery_heartbeat') if not heartbeat or time.time() - heartbeat > 600: errors.append( Critical( 'The Celery beats scheduler is not executing periodic tasks ' 'in a timely manner.', hint=get_doc_url('admin/install', 'celery'), id='weblate.C030', )) return errors
def test_public_schema_on_extra_search_paths(self): TenantModel = get_tenant_model() TenantModel.objects.create( schema_name='demo1', domain_url='demo1.example.com') TenantModel.objects.create( schema_name='demo2', domain_url='demo2.example.com') self.assertBestPractice([ Critical("public can not be included on PG_EXTRA_SEARCH_PATHS."), Critical("Do not include tenant schemas (demo1, demo2) on PG_EXTRA_SEARCH_PATHS."), ])
def check_settings(app_configs, **kwargs): """Check for sane settings""" errors = [] if (len(settings.ADMINS) == 0 or '*****@*****.**' in [x[1] for x in settings.ADMINS]): errors.append( Error( 'The site admins seem to be wrongly configured', hint=get_doc_url('admin/install', 'production-admins'), id='weblate.E011', ) ) if settings.SERVER_EMAIL in DEFAULT_MAILS: errors.append( Critical( 'The server email has default value', hint=get_doc_url('admin/install', 'production-email'), id='weblate.E012', ) ) if settings.DEFAULT_FROM_EMAIL in DEFAULT_MAILS: errors.append( Critical( 'The default from email has default value', hint=get_doc_url('admin/install', 'production-email'), id='weblate.E013', ) ) if settings.SECRET_KEY == settings_example.SECRET_KEY: errors.append( Critical( 'The cookie secret key has default value', hint=get_doc_url('admin/install', 'production-secret'), id='weblate.E014', ) ) if not settings.ALLOWED_HOSTS: errors.append( Critical( 'The allowed hosts are not configured', hint=get_doc_url('admin/install', 'production-hosts'), id='weblate.E015', ) ) return errors
def test_configuration_health_check(self): # Run checks internally configuration_health_check() # List of triggered checks remotely configuration_health_check([ Critical(msg="Error", id="weblate.E001"), Critical(msg="Test Error", id="weblate.E002"), ]) all_errors = ConfigurationError.objects.all() self.assertEqual(len(all_errors), 1) self.assertEqual(all_errors[0].name, "weblate.E002") self.assertEqual(all_errors[0].message, "Test Error") # No triggered checks configuration_health_check([]) self.assertEqual(ConfigurationError.objects.count(), 0)
def check_settings(app_configs, **kwargs): """Check for sane settings.""" errors = [] if not settings.ADMINS or "*****@*****.**" in (x[1] for x in settings.ADMINS): errors.append( Error( "The site admins seem to be wrongly configured", hint=get_doc_url("admin/install", "production-admins"), id="weblate.E011", ) ) if settings.SERVER_EMAIL in DEFAULT_MAILS: errors.append( Critical( "The server email has default value", hint=get_doc_url("admin/install", "production-email"), id="weblate.E012", ) ) if settings.DEFAULT_FROM_EMAIL in DEFAULT_MAILS: errors.append( Critical( "The default from email has default value", hint=get_doc_url("admin/install", "production-email"), id="weblate.E013", ) ) if settings.SECRET_KEY == settings_example.SECRET_KEY: errors.append( Critical( "The cookie secret key has default value", hint=get_doc_url("admin/install", "production-secret"), id="weblate.E014", ) ) if not settings.ALLOWED_HOSTS: errors.append( Critical( "The allowed hosts are not configured", hint=get_doc_url("admin/install", "production-hosts"), id="weblate.E015", ) ) return errors
def check_perms(app_configs=None, **kwargs): """Check we can write to data dir.""" errors = [] uid = os.getuid() message = 'Path {} is not writable, check your DATA_DIR settings.' for dirpath, dirnames, filenames in os.walk(settings.DATA_DIR): for name in chain(dirnames, filenames): path = os.path.join(dirpath, name) try: stat = os.lstat(path) except OSError as error: # File was removed meanwhile if error.errno == errno.ENOENT: continue raise if stat.st_uid != uid: errors.append( Critical( message.format(path), hint=get_doc_url('admin/install', 'file-permissions'), id='weblate.E002', ) ) return errors
def check_cache(app_configs, **kwargs): """Check for sane caching""" errors = [] cache = settings.CACHES['default']['BACKEND'].split('.')[-1] if cache not in GOOD_CACHE: errors.append( Critical( 'The configured cache backend will lead to serious ' 'performance or consistency issues.', hint=get_doc_url('admin/install', 'production-cache'), id='weblate.E007', ) ) if settings.ENABLE_AVATARS and 'avatar' not in settings.CACHES: errors.append( Error( 'Please configure separate avatar caching to reduce pressure ' 'on the default cache', hint=get_doc_url('admin/install', 'production-cache-avatar'), id='weblate.E008', ) ) return errors
def check_data_writable(app_configs=None, **kwargs): """Check we can write to data dir.""" errors = [] dirs = [ settings.DATA_DIR, data_dir("home"), data_dir("ssh"), data_dir("vcs"), data_dir("celery"), data_dir("backups"), data_dir("fonts"), data_dir("cache", "fonts"), ] message = "Path {} is not writable, check your DATA_DIR settings." for path in dirs: if not os.path.exists(path): os.makedirs(path) elif not os.access(path, os.W_OK): errors.append( Critical( message.format(path), hint=get_doc_url("admin/install", "file-permissions"), id="weblate.E002", )) return errors
def check_cache(app_configs, **kwargs): """Check for sane caching.""" errors = [] cache_backend = settings.CACHES["default"]["BACKEND"].split(".")[-1] if cache_backend not in GOOD_CACHE: errors.append( Critical( "The configured cache backend will lead to serious " "performance or consistency issues.", hint=get_doc_url("admin/install", "production-cache"), id="weblate.E007", ) ) if settings.ENABLE_AVATARS and "avatar" not in settings.CACHES: errors.append( Error( "Please configure separate avatar caching to reduce pressure " "on the default cache", hint=get_doc_url("admin/install", "production-cache-avatar"), id="weblate.E008", ) ) return errors
def check_data_writable(app_configs=None, **kwargs): """Check we can write to data dir.""" errors = [] dirs = [ settings.DATA_DIR, data_dir('home'), data_dir('whoosh'), data_dir('ssh'), data_dir('vcs'), data_dir('memory'), data_dir('celery'), data_dir('backups'), data_dir('fonts'), data_dir('cache', 'fonts'), ] message = 'Path {} is not writable, check your DATA_DIR settings.' for path in dirs: if not os.path.exists(path): os.makedirs(path) elif not os.access(path, os.W_OK): errors.append( Critical( message.format(path), hint=get_doc_url('admin/install', 'file-permissions'), id='weblate.E002', )) return errors
def check_version(app_configs=None, **kwargs): try: latest = get_latest_version() except (ValueError, IOError): return [] if LooseVersion(latest.version) > LooseVersion(VERSION_BASE): # With release every two months, this get's triggered after three releases if latest.timestamp + timedelta(days=180) < datetime.now(): return [ Critical( "You Weblate version is outdated, please upgrade to {}.". format(latest.version), hint=get_doc_url("admin/upgrade"), id="weblate.C031", ) ] return [ Info( "New Weblate version is available, please upgrade to {}.". format(latest.version), hint=get_doc_url("admin/upgrade"), id="weblate.I031", ) ] return []
def check_backups(app_configs, **kwargs): from weblate.wladmin.models import BackupService errors = [] if not BackupService.objects.filter(enabled=True).exists(): errors.append( Info( "Backups are not configured, " "it is highly recommended for production use", hint=get_doc_url("admin/backup"), id="weblate.I028", )) for service in BackupService.objects.filter(enabled=True): try: last_log = service.last_logs()[0].event except IndexError: last_log = "error" if last_log == "error": errors.append( Critical( "There was error while performing backups: {}".format( last_log.log), hint=get_doc_url("admin/backup"), id="weblate.C029", )) break return errors
def check_celery(app_configs, **kwargs): errors = [] if settings.CELERY_TASK_ALWAYS_EAGER: errors.append( Error( 'Celery is configured in the eager mode', hint=get_doc_url('admin/install', 'celery'), id='weblate.E005', ) ) else: if get_queue_length() > 50 or get_queue_length('search') > 10000: errors.append( Critical( 'The Celery tasks queue is too long, either the worker ' 'is not running or is too slow.', hint=get_doc_url('admin/install', 'celery'), id='weblate.E009', ) ) result = ping.delay() try: result.get(timeout=10, disable_sync_subtasks=False) except TimeoutError: errors.append( Critical( 'The Celery does not process tasks or is too slow ' 'in processing them.', hint=get_doc_url('admin/install', 'celery'), id='weblate.E019', ) ) except NotImplementedError: errors.append( Critical( 'The Celery is not configured to store results, ' 'CELERY_RESULT_BACKEND is probably not set.', hint=get_doc_url('admin/install', 'celery'), id='weblate.E020', ) ) return errors
def check_git_backend(app_configs, **kwargs): if find_git_http_backend() is None: return [ Critical( "Failed to find git-http-backend, " "the git exporter will not work.", hint=get_doc_url("admin/optionals", "git-exporter"), id="weblate.E022", ) ] return []
def check_git_backend(app_configs, **kwargs): if find_git_http_backend() is None: return [ Critical( 'Failed to find git-http-backend, ' 'the git exporter will not work.', hint=get_doc_url('admin/optionals', 'git-exporter'), id='weblate.E022', ) ] return []
def check_site(app_configs, **kwargs): from weblate.utils.site import get_site_domain, check_domain errors = [] if not check_domain(get_site_domain()): errors.append( Critical( 'Configure correct site domain', hint=get_doc_url('admin/install', 'production-site'), id='weblate.E017', )) return errors
def check_db_engine(app_configs, **kwargs): errors = [] try: if settings.DATABASES["default"]["ENGINE"] not in SUPPORTED_ENGINES: raise ValueError() except (AttributeError, KeyError, ValueError): errors.append( Critical(msg="Misago requires PostgreSQL database.", id="misago.001")) return errors
def timeline_password_initial_check(app_configs, **kwargs): errors = [] # Only useful before applying first migration. if not settings.PRIVAGAL_TIMELINE_INITIAL_PASSWORD: errors.append( Critical( "PRIVAGAL_TIMELINE_INITIAL_PASSWORD must be set in config.", id='privagal_timeline_password_initial', )) return errors
def check_encoding(app_configs=None, **kwargs): """Check there is encoding is utf-8.""" if sys.getfilesystemencoding() == "utf-8" and sys.getdefaultencoding( ) == "utf-8": return [] return [ Critical( "System encoding is not utf-8, processing non-ASCII strings will break", hint=get_doc_url("admin/install", "production-encoding"), id="weblate.C023", ) ]
def check_fonts(app_configs=None, **kwargs): """Check font rendering.""" try: render_size("DejaVu Sans", Pango.Weight.NORMAL, 11, 0, "test") return [] except Exception as error: return [ Critical( "Failed to use Pango: {}".format(error), hint=get_doc_url("admin/install", "pangocairo"), id="weblate.C024", ) ]
def sx_check(app_configs, **kwargs): """Smoketest the connection with SX cluster.""" errors = [] try: sx.listUsers.call() except SXClientException as e: logger.critical("Couldn't initialize sx console. Error message: " + e.message) hint = "Check if your sx user has admin priveleges." \ if '403' in e.message else None errors.append( Critical("SXClient error ocurred: {}".format(e), hint=hint)) return errors
def example_check(app_configs, **kwargs): errors = [] try: supported_driver = 'django.db.backends.postgresql_psycopg2' if settings.DATABASES['default']['ENGINE'] != supported_driver: raise ValueError() except (AttributeError, KeyError, ValueError): errors.append( Critical(msg='Misago requires PostgreSQL database.', id='misago.001')) return errors
def check_youtube_dl_is_installed(app_configs, **kwargs): error = Critical( '`youtube-dl` is not installed.', hint='Head to `https://rg3.github.io/youtube-dl/` to get it.', id='downloader.E_YOUTUBE_DL_NOT_INSTALLED', ) with open(os.devnull, 'w') as devnull: try: subprocess.check_call(['youtube-dl', '--version'], stdout=devnull) except subprocess.SubprocessError: return [error] return []
def check_unapplied_migrations(app_configs, **kwargs): errors = [] migrations = get_unapplied_migrations() for migration, _ in migrations: errors.append( Critical( 'migration {}.{} is not applied'.format( migration.app_label, migration.name), hint=None, obj='Critical', id='{}.W001'.format('unapplied_migrations'), )) return errors
def check_docker_images(app_configs=None, **kwargs): errors = [] if not settings.DEBUG: try: api = docker.from_env().api for name in settings.CONTAINER_NAMES.values(): print api.pull(name, 'latest') except Exception: errors.append( Critical( "Cannot pull docker images from hub. Check internet connection or images names.", ) ) return errors
def check_mail_connection(app_configs, **kwargs): errors = [] try: connection = get_connection() connection.open() connection.close() except Exception as error: message = 'Can not send email ({}), please check EMAIL_* settings.' errors.append( Critical( message.format(error), hint=get_doc_url('admin/install', 'out-mail'), id='weblate.E003', )) return errors
def check_avatars(app_configs, **kwargs): from weblate.auth.models import get_anonymous from weblate.accounts.avatar import download_avatar_image if not settings.ENABLE_AVATARS: return [] try: download_avatar_image(get_anonymous(), 32) return [] except (IOError, CertificateError) as error: return [ Critical( 'Failed to download avatar: {}'.format(error), hint=get_doc_url('admin/optionals', 'avatars'), id='weblate.E018', ) ]