def test_directory_last_updated_reflects_file_changes(root): os.mkdir('source') open('source/one', 'w').close() open('source/two', 'w').close() d = Directory('target', source='source') root.component += d root.component.deploy() assert (d.last_updated() == os.stat( os.path.join(root.workdir, 'target', 'two')).st_mtime)
def test_directory_last_updated_reflects_file_changes(root): os.mkdir("source") open("source/one", "w").close() open("source/two", "w").close() d = Directory("target", source="source") root.component += d root.component.deploy() assert (d.last_updated() == os.stat( os.path.join(root.workdir, "target", "two")).st_mtime)
def configure(self): self.provide("supervisor", self) buildout_cfg = File("buildout.cfg", source=self.buildout_cfg) self += Buildout( version="2.13.3", setuptools="46.1.3", config=buildout_cfg, python="3", ) self.program_config_dir = Directory("etc/supervisor.d", leading=True) self += self.program_config_dir self += File("etc/supervisord.conf", source=self.supervisor_conf) self.logdir = Directory("var/log", leading=True) self += self.logdir postrotate = self.expand( "kill -USR2 $({{component.workdir}}/bin/supervisorctl pid)") if self.logrotate: self += RotatedLogfile("var/log/*.log", postrotate=postrotate) self += Service("bin/supervisord", pidfile=self.pidfile) service = self._ if self.enable: self += RunningSupervisor(service) else: self += StoppedSupervisor() # Nagios check if self.nagios: self += File( "check_supervisor", mode=0o755, source=os.path.join( os.path.dirname(__file__), "resources", "check_supervisor.py.in", ), ) self += ServiceCheck( "Supervisor programs", nrpe=True, contact_groups=self.check_contact_groups, command=self.expand("{{component.workdir}}/check_supervisor"), )
def test_directory_copies_all_files(root): os.mkdir('source') open('source/one', 'w').close() open('source/two', 'w').close() root.component += Directory('target', source='source') root.component.deploy() assert sorted(os.listdir('work/mycomponent/target')) == ['one', 'two']
def test_directory_creates_directory(root): path = "dir" d = Directory(path) root.component += d assert not os.path.isdir(d.path) root.component.deploy() assert os.path.isdir(d.path)
def configure(self): self.provide('zopecommon', self) self.common = self.require_one('common', host=self.host) self.zope_instances = self.require('zope:http') self.zope_instances.sort(key=lambda s: s.script_id) self.backupsdir = self.backupsdir or self.expand( '{{component.workdir}}/var/backup') config = File('buildout.cfg', source='buildout.cfg', template_context=self) buildout_general = File('buildout_general.cfg', source='buildout_general.cfg', template_context=self) additional_config = [ buildout_general, Directory('profiles', source='profiles') ] self += Buildout(python='3.8', version=self.common.zc_buildout, setuptools=self.common.setuptools, config=config, additional_config=additional_config) self += InstallPythonPackages() # some ElasticSearch, Celery configuration self += File('elasticsearch-mappings.json', source='elasticsearch-mappings.json') self += File('elasticsearch-preprocessings.json', source='elasticsearch-preprocessings.json') self += File('.env', source='_env', template_context=self)
def configure(self): if self.reload_templates is None: self.reload_templates = self.debug self.address = Address(self.listen_host, self.listen_port) self += File('appserver.ini') self += Buildout(additional_config=[ Directory('profiles', source='profiles'), ]) env = [ # (name, value) ] self += Program( 'appserver', priority=20, options=dict( startsecs=20, environment=','.join('{}={}'.format(name, value) for name, value in env), ), command=self.map('bin/pserve'), args='appserver.ini', )
def test_directory_copies_all_files(root): os.mkdir("source") open("source/one", "w").close() open("source/two", "w").close() root.component += Directory("target", source="source") root.component.deploy() assert sorted(os.listdir("work/mycomponent/target")) == ["one", "two"]
def configure(self): if (not self.revision_or_branch) or (self.revision and self.branch): raise ValueError( "Clone(%s) needs exactly one of revision or branch" % self.url ) self.target = self.map(self.target) self += Directory(self.target)
def configure(self): if self.provide_itself: self.provide('sshkeypair', self) self += Directory('~/.ssh', mode=0o700) # RSA if self.id_rsa: self += File('~/.ssh/id_rsa', content=self.id_rsa, mode=0o600) elif self.purge_unmanaged_keys: self += Purge('~/.ssh/id_rsa') if self.id_rsa_pub: self += File('~/.ssh/id_rsa.pub', content=self.id_rsa_pub) # ED25519 if self.id_ed25519: self += File('~/.ssh/id_ed25519', content='{}\n'.format(self.id_ed25519), mode=0o600) elif self.purge_unmanaged_keys: self += Purge('~/.ssh/id_ed25519') if self.id_ed25519_pub: self += File('~/.ssh/id_ed25519.pub', content=self.id_ed25519_pub) # ScanHost for host in self.scan_hosts: self += ScanHost(host)
def configure(self): try: self.source = self.require_one('source', self.host) except SilentConfigurationError: have_dists = False self.source = None else: have_dists = len(self.source.distributions) if have_dists: self.dist_names, distributions = list(zip( *sorted(self.source.distributions.items()))) self.dist_paths = [clone.target for clone in distributions] else: self.dist_names = [] self.dist_paths = [] # A directory for eggs shared by buildouts within the deployment is # created for the service user. Assuming that a user cannot have # multiple non-sandboxed deployments and that development deployments # are sandboxed, eggs are never shared across deployments or users. self.eggs_directory = Directory(self.eggs_directory) self += self.eggs_directory self.overrides = File( 'buildout_overrides.cfg', source=pkg_resources.resource_filename( 'batou_scm', 'resources/buildout_overrides.cfg')) self.additional_config += (self.overrides,) super(Buildout, self).configure()
def configure(self): self += File('~/.hgrc', source='hgrc', is_template=True) self.provide('hgrc', self) if self.eggs_directory: self += Directory(self.eggs_directory) self.provide('eggs-directory', self.map(self.eggs_directory))
def configure(self): self.provide('supervisor', self) buildout_cfg = File('buildout.cfg', source=self.buildout_cfg) self += Buildout('buildout', version='2.13.3', setuptools='46.1.3', config=buildout_cfg, python='3') self.program_config_dir = Directory('etc/supervisor.d', leading=True) self += self.program_config_dir self += File('etc/supervisord.conf', source=self.supervisor_conf) self.logdir = Directory('var/log', leading=True) self += self.logdir postrotate = self.expand( 'kill -USR2 $({{component.workdir}}/bin/supervisorctl pid)') if self.logrotate: self += RotatedLogfile('var/log/*.log', postrotate=postrotate) self += Service('bin/supervisord', pidfile=self.pidfile) service = self._ if self.enable: self += RunningSupervisor(service) else: self += StoppedSupervisor(service) # Nagios check if self.nagios: self += File('check_supervisor', mode=0o755, source=os.path.join( os.path.dirname(__file__), 'resources', 'check_supervisor.py.in')) self += ServiceCheck( 'Supervisor programs', nrpe=True, contact_groups=self.check_contact_groups, command=self.expand('{{component.workdir}}/check_supervisor'))
def test_directory_does_not_copy_excluded_files(root): os.mkdir('source') open('source/one', 'w').close() open('source/two', 'w').close() p = Directory('target', source='source', exclude=('two', )) root.component += p root.component.deploy() assert len(os.listdir('work/mycomponent/target')) == 1
def test_directory_does_not_copy_excluded_files(root): os.mkdir("source") open("source/one", "w").close() open("source/two", "w").close() p = Directory("target", source="source", exclude=("two",)) root.component += p root.component.deploy() assert len(os.listdir("work/mycomponent/target")) == 1
def configure(self): if self.strip and not self._supports_strip: raise ValueError("Strip is not supported by {}".format( self.__class__.__name__)) if self.create_target_dir: if self.target is None: self.target = self.extract_base_name(self.archive) if not self.target: raise AttributeError( "Target not given and not derivable from archive name " "({}).".format(self.archive)) d = Directory(self.target, leading=True) self += d self.target = d.path else: self.target = self.map('.')
def configure(self): self.db = self.require_one('roundcube::database') postfix = self.require_one('postfix') self.imap_host = postfix.connect.host self.smtp_server = postfix.connect.host self.smtp_port = postfix.connect.port self.basedir = self.map('roundcube') self.provide('roundcube', self) self += Directory('download') download = Download( 'http://downloads.sourceforge.net/project/roundcubemail/' 'roundcubemail/{}/roundcubemail-{}-complete.tar.gz'.format( self.release, self.release), target='download/roundcube-{}.tar.gz'.format(self.release), checksum=self.checksum) self += download self += Extract(download.target, target='roundcube.orig') self += SyncDirectory( self.basedir, source=self.map( 'roundcube.orig/roundcubemail-{}'.format(self.release))) self.db_dsnw = '{}://{}:{}@{}/{}'.format( self.db.dbms, self.db.username, self.db.password, self.db.address.connect.host, self.db.database) self += File( self.basedir + '/config/config.inc.php', source=self.config) self.fpm = FPM('roundcube', address=self.address) self += self.fpm self += RoundcubeInit(self)
def configure(self): self += File("default.vcl", source="default.vcl", is_template="true") self += Directory('vcl_includes') self += File("vcl_includes/acl.vcl", source="vcl_includes/acl.vcl", is_template="true") self += File("vcl_includes/backends.vcl", source="vcl_includes/backends.vcl", is_template="true") self += File("vcl_includes/backend_response.vcl", source="vcl_includes/backend_response.vcl", is_template="true") self += File("vcl_includes/backend_fetch.vcl", source="vcl_includes/backend_fetch.vcl", is_template="true") self += File("vcl_includes/deliver.vcl", source="vcl_includes/deliver.vcl", is_template="true") self += File("vcl_includes/main.vcl", source="vcl_includes/main.vcl", is_template="true") self += File("vcl_includes/recv.vcl", source="vcl_includes/recv.vcl", is_template="true") self += File("vcl_includes/synth.vcl", source="vcl_includes/synth.vcl", is_template="true") self += File("vcl_includes/hit_miss_pass.vcl", source="vcl_includes/hit_miss_pass.vcl", is_template="true") self += File("vcl_includes/test.vcl", source="vcl_includes/test.vcl", is_template="true") self.provide('varnish_dir', self.workdir) # XXX port duplicated from zeit-app-cache cookbook default attribute self.provide('varnish:http', self.host.fqdn)
def configure(self): self += Directory(self.target)
def test_directory_creates_leading_directories_if_configured(root): path = "directory/path" d = Directory(path, leading=True) root.component += d root.component.deploy() assert os.path.isdir(d.path)
def test_directory_doesnt_create_leading_directories_by_default(root): path = "directory/path" root.component += Directory(path) with pytest.raises(OSError): root.component.deploy()
def add_symlinks_to(self, other): other += Directory(other.sources_dir) for name in self.source_names: other += Symlink(os.path.join(other.sources_dir, name), source=os.path.join(self.workdir, name))