def configure(self): self.logfiles = self.require(RotatedLogfile.key, host=self.host) self.logfiles.sort(key=lambda logfile: logfile.path) config = self.common_config + self.logrotate_template self.logrotate_conf = File("logrotate.conf", content=config) self += self.logrotate_conf
def configure(self): if self.reload_templates is None: self.reload_templates = self.debug self.address = Address(self.listen_host, self.listen_port) self += File('appserver.ini') self += Buildout(additional_config=[ Directory('profiles', source='profiles'), ]) env = [ # (name, value) ] self += Program( 'appserver', priority=20, options=dict( startsecs=20, environment=','.join('{}={}'.format(name, value) for name, value in env), ), command=self.map('bin/pserve'), args='appserver.ini', )
def configure(self): try: self.source = self.require_one('source', self.host) except SilentConfigurationError: have_dists = False self.source = None else: have_dists = len(self.source.distributions) if have_dists: self.dist_names, distributions = list( zip(*sorted(self.source.distributions.items()))) self.dist_paths = [clone.target for clone in distributions] else: self.dist_names = [] self.dist_paths = [] # A directory for eggs shared by buildouts within the deployment is # created for the service user. Assuming that a user cannot have # multiple non-sandboxed deployments and that development deployments # are sandboxed, eggs are never shared across deployments or users. self.eggs_directory = Directory(self.eggs_directory) self += self.eggs_directory self.overrides = File('buildout_overrides.cfg', source=pkg_resources.resource_filename( 'batou_scm', 'resources/buildout_overrides.cfg')) self.additional_config += (self.overrides, ) super(Buildout, self).configure()
def test_configure_verifies_against_success_file(root): root.component += File("foo/config.status", content="", leading=True) configure = Configure("foo") configure.cmd = mock.Mock() root.component += configure root.component.deploy() root.component.deploy() assert 1 == configure.cmd.call_count
def configure(self): user = self.host.service_user user_logrotate_conf = os.path.join("/var/spool/logrotate/", user) self += File( user_logrotate_conf, ensure="symlink", link_to=self.parent.logrotate_conf.path, )
def test_group_calls_chown(chown, stat, root): os.stat.return_value = Mock() os.stat.return_value.st_gid = 0 os.stat.return_value.st_mode = 0 file = File("asdf", group=current_group(), content="") root.component += file root.component.deploy() assert os.chown.called
def test_owner_calls_chown(chown, stat, root): os.stat.return_value = Mock() os.stat.return_value.st_uid = 0 os.stat.return_value.st_mode = 0 file = File("asdf", owner=getpass.getuser(), content="") root.component += file root.component.deploy() assert os.chown.called
def configure(self): # self.provide('pm2', self) self.voltoapp = self.require_one('voltoapp') self.varnish = self.require_one('varnish:http') self.zopecommon = self.require_one('zopecommon') self.elasticsearch = self.require_one('elasticsearch') self += File('website.pm2.config.js', source='website.pm2.config.js') self += RestartTasks('all')
def configure(self): self += ServiceDatabase('grafana', password=self.db_password) self += Package('apt-transport-https') self += AptRepository( 'grafana', line='deb https://packages.grafana.com/oss/deb stable main', key='https://packages.grafana.com/gpg.key') self += Package('grafana') self += File('/etc/grafana/grafana.ini') self += Service('grafana-server', action='restart', deps=self._) self += File('/srv/prometheus/grafana-nginx.conf', source='nginx.conf', is_template=False) self += VHost(self._)
def test_no_dist_sources_configured_does_not_break(root): source = Source() root.component += source root.component += Buildout(config=File('buildout.cfg', content='')) root.component.configure() buildout = root.component._ assert 'develop +=' not in buildout.overrides.content assert buildout.source is not None
def test_make_verifies_against_success_file(root): root.component += File("foo/Makefile", content="", leading=True) make = Make("foo") make.cmd = mock.Mock() root.component += make root.component.deploy() root.component.deploy() assert 1 == make.cmd.call_count
def configure(self): self += VirtualEnv(path='/srv/cgiserv/passwd') self._ += Requirements(source='passwd.txt') self += File( '/etc/sudoers.d/webpasswd', content='cgiserv ALL=(root) NOPASSWD: /srv/cgiserv/passwd/bin/webpasswd-change\n') self += File( '/srv/cgiserv/apache.d/passwd.conf', content='ScriptAlias /passwd /srv/cgiserv/passwd/bin/webpasswd-cgi\n' ) self += CGI(self._) self += File( '/srv/cgiserv/nginx.d/passwd.conf', content='location /passwd { proxy_pass http://cgi; }\n') self += VHost(self._)
def configure(self): self.services = list(self.require(Service.key)) self.services.sort(key=lambda x: (x.host.name, x.description)) self += File( self.expand('nagios-server-{{environment.service_user}}.cfg'), source=self.nagios_cfg, mode=0o644)
def configure(self): self.nameservers = [] for line in open('/etc/resolv.conf'): if line.startswith('nameserver'): parts = re.split(' +', line.strip()) self.nameservers.append(parts[1]) self.varnish_hosts = self.require('varnish:http') self += File('haproxy.cfg')
def configure(self): self.provide('supervisor', self) buildout_cfg = File('buildout.cfg', source=self.buildout_cfg) self += Buildout('buildout', version='2.13.3', setuptools='46.1.3', config=buildout_cfg, python='3') self.program_config_dir = Directory('etc/supervisor.d', leading=True) self += self.program_config_dir self += File('etc/supervisord.conf', source=self.supervisor_conf) self.logdir = Directory('var/log', leading=True) self += self.logdir postrotate = self.expand( 'kill -USR2 $({{component.workdir}}/bin/supervisorctl pid)') if self.logrotate: self += RotatedLogfile('var/log/*.log', postrotate=postrotate) self += Service('bin/supervisord', pidfile=self.pidfile) service = self._ if self.enable: self += RunningSupervisor(service) else: self += StoppedSupervisor(service) # Nagios check if self.nagios: self += File('check_supervisor', mode=0o755, source=os.path.join( os.path.dirname(__file__), 'resources', 'check_supervisor.py.in')) self += ServiceCheck( 'Supervisor programs', nrpe=True, contact_groups=self.check_contact_groups, command=self.expand('{{component.workdir}}/check_supervisor'))
def configure(self): self += Package('exim4-daemon-heavy') if self.exim_user: # Handle case "package is not installed yet" self += File('/etc/exim4/schema.sql') schema = self._ self += ServiceDatabase( self.db_name, username=self.db_username, password=self.db_password, schema=schema.path) self += Service('exim4', action='restart', deps=schema) self += DisableDebconf() self += File('/etc/exim4/domains', ensure='directory') self += Symlink( '/etc/exim4/system-filter', source='/home/wosc/.dot/mail/.filter-system') if os.path.exists('/var/mail/[email protected]'): self += Symlink('/var/mail/[email protected]/filter', source='/home/wosc/.dot/mail/.filter') self += File('/etc/aliases', is_template=False) self += File('/etc/email-addresses', content='wosc: [email protected]') self += File('/etc/default/exim4', source='exim4.default', is_template=False) self += Service('exim4', action='restart', deps=self._) self += File('/etc/exim4/exim4.conf') self += Service('exim4', action='reload', deps=self._) self += File('/var/mail', ensure='directory', group='Debian-exim')
def configure(self): self += User('thyrida') self += VirtualEnv(path='/srv/thyrida/deployment') self._ += Requirements() reqs = self._ self += File( '/srv/thyrida/paste.ini', owner='thyrida', group='thyrida', mode=0o640) self += Program( 'thyrida', command='/srv/thyrida/deployment/bin/pserve /srv/thyrida/paste.ini', user='******', dependencies=[reqs, self._]) self += File('/srv/thyrida/nginx.conf', is_template=False) self += VHost(self._)
def configure(self): if not isinstance(self.parent, VirtualEnv): raise TypeError('Requirements must be added to a VirtualEnv') if self.parent.path: self.workdir = self.map(self.parent.path) self += File(self.filename, source=self.source) self.requirements = self._.path self.dependencies = [self.requirements]
def buildout(root): source = Source(dist_sources=repr([ 'hg+https://example.com/foo', 'hg+https://example.com/bar', ])) root.component += source root.component += Buildout(config=File('buildout.cfg', content='')) root.component.configure() return root.component._
def test_content_source_unclear(root): path = "path" p = File(path) with pytest.raises(ValueError) as e: root.component += p assert str(e.value) == ("Missing implicit template file {}/path. " "Or did you want to create an empty file? " "Then use File('path', content='').".format( root.defdir))
def configure(self): tick = File('tick.sh', source='tick.sh', mode=0o755) self += tick self.provide('programs', dict(name='tick', path=tick.path, priority=10))
def configure(self): # Allow running `mailq` self += GroupMember('Debian-exim', user='******') self += File('/srv/prometheus/bin/node_exporter-mailq', source='mailq.sh', is_template=False, mode=0o755) self += CronJob('/srv/prometheus/bin/node_exporter-mailq', user='******', timing='* * * * *') # Allow reading exim mainlog self += GroupMember('adm', user='******') self += File('/srv/prometheus/bin/node_exporter-eximstats', source='eximstats.sh', is_template=False, mode=0o755) self += CronJob('/srv/prometheus/bin/node_exporter-eximstats', user='******', timing='*/5 * * * *') self += VirtualEnv() self._ += Requirements(source='mailcheck.txt') for name in ['mail', 'caldav']: self += Symlink('/srv/prometheus/bin/%s-check-roundtrip' % name, source=self.map('bin/%s-check-roundtrip' % name)) self += File('/srv/prometheus/%scheck.conf' % name, owner='prometheus', group='prometheus', mode=0o640) self += File('/srv/prometheus/bin/node_exporter-mailcheck', source='mailcheck.sh', is_template=False, mode=0o755) self += CronJob('/srv/prometheus/bin/node_exporter-mailcheck', user='******', timing='*/5 * * * *') self += File('/srv/prometheus/conf.d/alert-mailcheck.yml', is_template=False) self.provide('prom:rule', self._)
def configure(self): for name in self.packages: self += Package(name) for name in ['backup', 'restore']: self += File('/usr/local/bin/mysql-%s' % name, is_template=False, mode=0o755) self += Symlink('/etc/cron.daily/mysql-backup', source='/usr/local/bin/mysql-backup')
def test_content_with_unicode_requires_encoding(root): path = 'path' root.component.foobar = 'äsdf' p = File(path, content='örks {{component.foobar}}', encoding='ascii') with pytest.raises(UnicodeEncodeError): root.component |= p p = File(path, content='örks {{component.foobar}}', encoding='utf-8') root.component += p root.component.deploy() with open(p.path, encoding=p.encoding) as f: result = f.read() # XXX pytest reporting breaks if this fails. :( assert result == 'örks äsdf'
def configure(self): if not self.command: raise ValueError('command is required') self.require_one('supervisor', host=self.host) if self.dependencies is None: self.dependencies = (self.parent, ) self += File('/etc/supervisor/conf.d/%s.conf' % self.name, source=here + 'program.conf')
def configure(self): self.jobs = self.require(CronJob.key, host=self.host, strict=False) if self.purge and self.jobs: raise ConfigurationError( 'Found cron jobs, but expecting an empty crontab.') elif not self.purge and not self.jobs: raise ConfigurationError('No cron jobs found.', self) self.jobs.sort(key=lambda job: job.command + ' ' + job.args) self.crontab = File('crontab', source=self.crontab_template) self += self.crontab
def configure(self): self.executable = self.parent.executable self.pidfile = self.parent.pidfile if not os.path.isabs(self.pidfile): self.pidfile = os.path.join(self.root.workdir, self.pidfile) target = "/var/spool/init.d/{0}/{1}".format( self.host.service_user, os.path.basename(self.parent.executable)) init_source = os.path.join( os.path.dirname(__file__), "resources", "init.sh") self += File(target, source=init_source, mode=0o755, leading=True)
def configure(self): self += User('letsencrypt') for user in self.daemons: self += GroupMember('letsencrypt', user=user) self += File('/srv/letsencrypt/public', ensure='directory', owner='letsencrypt', group='letsencrypt') self += File('/srv/letsencrypt/data', ensure='directory', owner='letsencrypt', group='letsencrypt', mode=0o770) self += VirtualEnv(path='/srv/letsencrypt/deployment') self._ += Requirements() self += Patch( '/srv/letsencrypt/deployment/lib/python%s/site-packages' '/simp_le.py' % VirtualEnv.version, target='wosc patched', file='logging.patch') self += File( '/etc/nginx/snippets/letsencrypt.conf', source='nginx.conf', is_template=False) self += File( '/etc/sudoers.d/letsencrypt', content='letsencrypt ALL=(root) NOPASSWD: /etc/init.d/nginx, /etc/init.d/exim4, /etc/init.d/courier-imap-ssl\n') self += File( '/srv/letsencrypt/update-letsencrypt', source='update.sh', is_template=False, mode=0o755) self += CronJob( '/srv/letsencrypt/update-letsencrypt', user='******', timing='15 2 * * *') for domain in self.domains: self += File( '/srv/letsencrypt/public/%s' % domain, ensure='directory', owner='letsencrypt', group='letsencrypt') self += File( '/srv/letsencrypt/data/%s' % domain, ensure='directory', owner='letsencrypt', group='letsencrypt') for item in self.files: if not os.path.exists('%s/data/%s.%s' % ( self.defdir, domain, item['source'])): continue self += File( '/srv/letsencrypt/data/%s/%s' % (domain, item['target']), source='%s.%s' % (domain, item['source']), is_template=False, mode=item['mode'], owner='letsencrypt', group='letsencrypt')
def configure(self): for name in self.packages: self += Package(name) self += User('matomo') # Allow reading accesslogs self += GroupMember('adm', user='******') self += ServiceDatabase('matomo', password=self.db_password) self += File('/srv/matomo/setup/install.json', owner='matomo', group='matomo', mode=0o640) self += Setup() self += Download( self.url.format(version=self.version), checksum=self.checksum, requests_kwargs={'headers': {'accept-encoding': '', 'accept': ''}}) self += Extract( self._.target, target='/srv/matomo', strip=1, owner='matomo', group='matomo') self += PHP('matomo', user='******') self += File('/srv/matomo/nginx.conf', is_template=False) self += VHost(self._) self += CronJob( self.import_logs.format(id=1, domain='wosc.de'), user='******', timing='0 8 * * *') self += CronJob( self.import_logs.format(id=2, domain='grmusik.de'), user='******', timing='30 8 * * *') self += CronJob( 'php /srv/matomo/console core:archive ' '--url=https://pharos.wosc.de/logs/ > /dev/null', user='******', timing='0 9 * * *') self += File( '/etc/sudoers.d/matomo-geoip', content='matomo ALL=(root) NOPASSWD: /usr/sbin/update-geoip-database\n')
def configure(self): self.services = [ service for service in self.require(Service.key, host=self.host) if isinstance(service, NRPEService) ] self.services.sort(key=lambda x: x.name) self += File(self.expand('/etc/nagios/nrpe/local/' '{{environment.service_user}}.cfg'), source=self.nrpe_cfg, mode=0o644)