def cleanup_stale_repos(): prefix = data_dir('vcs') vcs_mask = os.path.join(prefix, '*', '*') yesterday = time() - 86400 for path in glob(vcs_mask): if not os.path.isdir(path): continue # Skip recently modified paths if os.path.getmtime(path) > yesterday: continue # Parse path project, component = os.path.split(path[len(prefix) + 1:]) # Find matching components objects = Component.objects.filter( slug=component, project__slug=project ).exclude( repo__startswith='weblate:' ) # Remove stale dirs if not objects.exists(): rmtree(path, onerror=remove_readonly)
def filename(self): """Calculates unique wrapper filename. It is based on template and DATA_DIR settings. """ md5 = hashlib.md5(self.SSH_WRAPPER_TEMPLATE.encode('utf-8')) md5.update(data_dir('ssh').encode('utf-8')) return ssh_file('ssh-weblate-wrapper-{0}'.format(md5.hexdigest()))
def check_data_writable(app_configs=None, **kwargs): """Check we can write to data dir.""" errors = [] dirs = [ settings.DATA_DIR, data_dir('home'), data_dir('whoosh'), data_dir('ssh'), data_dir('vcs'), data_dir('memory'), data_dir('celery'), data_dir('backups'), ] message = 'Path {} is not writable, check your DATA_DIR settings.' for path in dirs: if not os.path.exists(path): os.makedirs(path) elif not os.access(path, os.W_OK): errors.append( Critical( message.format(path), hint=get_doc_url('admin/install', 'file-permissions'), id='weblate.E002', ) ) return errors
def __init__(self): self.index = FileStorage(data_dir('memory')).open_index() self.parser = qparser.QueryParser( 'source', schema=self.index.schema, group=qparser.OrGroup.factory(0.9), termclass=query.FuzzyTerm, ) self.searcher = None self.comparer = Comparer()
def ready(self): # Configure merge driver for Gettext PO # We need to do this behind lock to avoid errors when servers # start in parallel lockfile = FileLock(os.path.join(data_dir('home'), 'gitlock')) with lockfile: try: GitRepository.global_setup() delete_configuration_error('Git global setup') except RepositoryException as error: add_configuration_error( 'Git global setup', 'Failed to do git setup: {0}'.format(error) ) # Use it for *.po by default configdir = os.path.join(data_dir('home'), '.config', 'git') configfile = os.path.join(configdir, 'attributes') if not os.path.exists(configfile): if not os.path.exists(configdir): os.makedirs(configdir) with open(configfile, 'w') as handle: handle.write('*.po merge=weblate-merge-gettext-po\n')
def get_clean_env(extra=None): """Return cleaned up environment for subprocess execution.""" environ = { 'LANG': 'en_US.UTF-8', 'HOME': data_dir('home'), } if extra is not None: environ.update(extra) variables = ('PATH', 'LD_LIBRARY_PATH', 'SystemRoot') for var in variables: if var in os.environ: environ[var] = os.environ[var] # Python 2 on Windows doesn't handle Unicode objects in environment # even if they can be converted to ASCII string, let's fix it here if six.PY2 and sys.platform == 'win32': return { str(key): str(val) for key, val in environ.items() } return environ
def test_rename_project(self): # Remove stale dir from previous tests target = os.path.join(data_dir('vcs'), 'xxxx') if os.path.exists(target): shutil.rmtree(target) self.make_manager() self.assertContains( self.client.get(reverse('project', kwargs=self.kw_project)), '#rename', ) response = self.client.post( reverse('rename', kwargs=self.kw_project), {'slug': 'xxxx'} ) self.assertRedirects(response, '/projects/xxxx/') project = Project.objects.get(pk=self.project.pk) self.assertEqual(project.slug, 'xxxx') for component in project.component_set.all(): self.assertIsNotNone(component.repository.last_remote_revision) response = self.client.get(component.get_absolute_url()) self.assertContains(response, '/projects/xxxx/')
def get_clean_env(extra=None): """Return cleaned up environment for subprocess execution.""" environ = { 'LANG': 'en_US.UTF-8', 'HOME': data_dir('home'), } if extra is not None: environ.update(extra) variables = ( # Keep PATH setup 'PATH', # Keep linker configuration 'LD_LIBRARY_PATH', 'LD_PRELOAD', # Needed by Git on Windows 'SystemRoot', # Pass proxy configuration 'http_proxy', 'https_proxy', 'HTTPS_PROXY', 'NO_PROXY', # below two are nedded for openshift3 deployment, # where nss_wrapper is used # more on the topic on below link: # https://docs.openshift.com/enterprise/3.2/creating_images/guidelines.html 'NSS_WRAPPER_GROUP', 'NSS_WRAPPER_PASSWD', ) for var in variables: if var in os.environ: environ[var] = os.environ[var] # Python 2 on Windows doesn't handle Unicode objects in environment # even if they can be converted to ASCII string, let's fix it here if six.PY2 and sys.platform == 'win32': return { str(key): str(val) for key, val in environ.items() } return environ
def storage(self): return FileStorage(data_dir(self.LOCATION))
def ssh_file(filename): """Generate full path to SSH configuration file.""" return os.path.join( data_dir('ssh'), filename )
def cleanup(cls): directory = data_dir(cls.LOCATION) if os.path.exists(directory): shutil.rmtree(directory)
def database_backup(): if settings.DATABASE_BACKUP == "none": return with backup_lock(): database = settings.DATABASES["default"] env = get_clean_env() compress = settings.DATABASE_BACKUP == "compressed" out_compressed = data_dir("backups", "database.sql.gz") out_plain = data_dir("backups", "database.sql") if using_postgresql(): cmd = ["pg_dump", "--dbname", database["NAME"]] if database["HOST"]: cmd.extend(["--host", database["HOST"]]) if database["PORT"]: cmd.extend(["--port", database["PORT"]]) if database["USER"]: cmd.extend(["--username", database["USER"]]) if settings.DATABASE_BACKUP == "compressed": cmd.extend(["--file", out_compressed]) cmd.extend(["--compress", "6"]) compress = False else: cmd.extend(["--file", out_plain]) env["PGPASSWORD"] = database["PASSWORD"] else: cmd = [ "mysqldump", "--result-file", out_plain, "--single-transaction", "--skip-lock-tables", ] if database["HOST"]: cmd.extend(["--host", database["HOST"]]) if database["PORT"]: cmd.extend(["--port", database["PORT"]]) if database["USER"]: cmd.extend(["--user", database["USER"]]) cmd.extend(["--databases", database["NAME"]]) env["MYSQL_PWD"] = database["PASSWORD"] try: subprocess.run( cmd, env=env, capture_output=True, stdin=subprocess.DEVNULL, check=True, text=True, ) except subprocess.CalledProcessError as error: add_breadcrumb( category="backup", message="database dump output", stdout=error.stdout, stderr=error.stderr, ) report_error() raise if compress: with open(out_plain, "rb") as f_in: with gzip.open(out_compressed, "wb") as f_out: shutil.copyfileobj(f_in, f_out) os.unlink(out_plain)
def test_settings_backup(self): settings_backup() filename = data_dir("backups", "settings-expanded.py") with open(filename) as handle: self.assertIn(settings.DATA_DIR, handle.read())
def memory_backup(indent=2): filename = os.path.join(data_dir('backups'), 'memory.json') memory = TranslationMemory() with open(filename, 'w') as handle: memory.dump(handle, indent)
class Migration(migrations.Migration): replaces = [ ("fonts", "0001_initial"), ("fonts", "0002_fontgroup_fontoverride"), ("fonts", "0003_auto_20190517_1249"), ("fonts", "0004_auto_20190517_1421"), ("fonts", "0005_auto_20190517_1450"), ("fonts", "0006_auto_20190517_1900"), ("fonts", "0007_auto_20190517_1907"), ] initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("trans", "0026_alert_change"), ("trans", "0027_auto_20190517_1125"), ("lang", "0002_auto_20190516_1245"), ] operations = [ migrations.CreateModel( name="Font", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "family", models.CharField(blank=True, max_length=100, verbose_name="Font family"), ), ( "style", models.CharField(blank=True, max_length=100, verbose_name="Font style"), ), ( "font", models.FileField( help_text="OpenType and TrueType fonts are supported.", storage=django.core.files.storage.FileSystemStorage( location=data_dir("fonts")), upload_to="", validators=[weblate.fonts.validators.validate_font], verbose_name="Font file", ), ), ("timestamp", models.DateTimeField(auto_now_add=True)), ( "project", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="trans.Project"), ), ( "user", models.ForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, ), ), ], options={"unique_together": {("family", "style", "project")}}, bases=(models.Model, weblate.trans.mixins.UserDisplayMixin), ), migrations.CreateModel( name="FontGroup", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "name", models.SlugField( help_text= "Identifier you will use in checks to select this font group. Avoid whitespace or special characters.", max_length=100, verbose_name="Font group name", ), ), ( "font", models.ForeignKey( help_text= "Default font is used unless per language override matches.", on_delete=django.db.models.deletion.CASCADE, to="fonts.Font", verbose_name="Default font", ), ), ( "project", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="trans.Project"), ), ], options={"unique_together": {("name", "project")}}, ), migrations.CreateModel( name="FontOverride", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "font", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="fonts.Font", verbose_name="Font", ), ), ( "group", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="fonts.FontGroup", ), ), ( "language", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="lang.Language", verbose_name="Language", ), ), ], options={"unique_together": {("group", "language")}}, ), ]
def setup_index(): storage = FileStorage(data_dir('memory')) storage.create() return storage.create_index(TMSchema())
def clean_indexes(): """Clean all indexes.""" shutil.rmtree(data_dir('whoosh')) create_index()
def storage(self): return FileStorage(data_dir(self.LOCATION))
def database_backup(): if settings.DATABASE_BACKUP == "none": return ensure_backup_dir() database = settings.DATABASES["default"] env = get_clean_env() compress = settings.DATABASE_BACKUP == "compressed" out_compressed = data_dir("backups", "database.sql.gz") out_plain = data_dir("backups", "database.sql") if database["ENGINE"] == "django.db.backends.postgresql": cmd = ["pg_dump", "--dbname", database["NAME"]] if database["HOST"]: cmd.extend(["--host", database["HOST"]]) if database["PORT"]: cmd.extend(["--port", database["PORT"]]) if database["USER"]: cmd.extend(["--username", database["USER"]]) if settings.DATABASE_BACKUP == "compressed": cmd.extend(["--file", out_compressed]) cmd.extend(["--compress", "6"]) compress = False else: cmd.extend(["--file", out_plain]) env["PGPASSWORD"] = database["PASSWORD"] elif database["ENGINE"] == "django.db.backends.mysql": cmd = [ "mysqldump", "--result-file", out_plain, "--single-transaction", "--skip-lock-tables", ] if database["HOST"]: cmd.extend(["--host", database["HOST"]]) if database["PORT"]: cmd.extend(["--port", database["PORT"]]) if database["USER"]: cmd.extend(["--user", database["USER"]]) cmd.extend(["--databases", database["NAME"]]) env["MYSQL_PWD"] = database["PASSWORD"] else: return try: subprocess.run( cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.DEVNULL, check=True, universal_newlines=True, ) except subprocess.CalledProcessError as error: report_error(extra_data={ "stdout": error.stdout, "stderr": error.stderr }) raise if compress: with open(out_plain, "rb") as f_in: with gzip.open(out_compressed, "wb") as f_out: shutil.copyfileobj(f_in, f_out) os.unlink(out_plain)
from django.conf import settings from django.core.exceptions import ValidationError from django.core.files.storage import FileSystemStorage from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from weblate.fonts.utils import get_font_name from weblate.fonts.validators import validate_font from weblate.lang.models import Language from weblate.trans.mixins import UserDisplayMixin from weblate.trans.models import Project from weblate.utils.data import data_dir FONT_STORAGE = FileSystemStorage(location=data_dir("fonts")) class Font(models.Model, UserDisplayMixin): family = models.CharField(verbose_name=_("Font family"), max_length=100, blank=True) style = models.CharField(verbose_name=_("Font style"), max_length=100, blank=True) font = models.FileField( verbose_name=_("Font file"), validators=[validate_font], storage=FONT_STORAGE, help_text=_("OpenType and TrueType fonts are supported."), )
def download_multi(translations, fmt=None): filenames = [t.get_filename() for t in translations] return zip_download(data_dir("vcs"), [filename for filename in filenames if filename])
def _get_path(self): return os.path.join(data_dir('vcs'), self.slug)
def _get_path(self): return os.path.join(data_dir('vcs'), self.slug)
from whoosh.filedb.filestore import FileStorage from whoosh.query import Or, Term from whoosh.writing import AsyncWriter, BufferedWriter from whoosh import qparser from django.conf import settings from django.dispatch import receiver from django.db.models.signals import post_migrate from django.db.utils import IntegrityError from django.utils.encoding import force_text from django.db import transaction from weblate.lang.models import Language from weblate.utils.data import data_dir STORAGE = FileStorage(data_dir('whoosh')) class TargetSchema(SchemaClass): """Fultext index schema for target strings.""" pk = NUMERIC(stored=True, unique=True) target = TEXT() comment = TEXT() class SourceSchema(SchemaClass): """Fultext index schema for source and context strings.""" pk = NUMERIC(stored=True, unique=True) source = TEXT() context = TEXT() location = TEXT()
def ensure_backup_dir(): backup_dir = data_dir("backups") if not os.path.exists(backup_dir): os.makedirs(backup_dir)
def ssh_file(filename): """Generate full path to SSH configuration file.""" return os.path.join(data_dir('ssh'), filename)
def cleanup(cls): directory = data_dir(cls.LOCATION) if os.path.exists(directory): shutil.rmtree(directory)
def test_memory_backup(self): memory_backup() filename = data_dir("backups", "memory.json") with open(filename) as handle: data = json.load(handle) self.assertEqual(data, [])