def send_deploy_event(): newrelic_key = blueprint.get('newrelic_key', None) app_name = blueprint.get('app_name', None) if newrelic_key and app_name: url = 'https://api.newrelic.com/deployments.xml' headers = {'x-api-key': newrelic_key} with cd(python_path()): tag = run('git describe --tags') commit_hash = run('git rev-parse HEAD') deployer = git.get_local_commiter() payload = { 'deployment[app_name]': app_name, # 'application_id': '1234567', 'deployment[description]': tag, 'deployment[revision]': commit_hash, # 'deployment[changelog]': changes, 'deployment[user]': deployer, } response = requests.post(url, data=payload, headers=headers) info(response.text) else: info('No key found') #http://gc-taylor.com/blog/2013/02/11/fabric-task-for-notifying-new-relic-code-deploy/#sthash.5AnhN3An.sfju
def install(): with sudo(): info('Installing Ruby v1.9.3') debian.apt_get('install', 'ruby1.9.3') info('Installing Bundler') gem('install', 'bundler')
def update_modules(beat): changes = [] # Get desired state desired_modules = set(blueprint.get('{}.modules'.format(beat), [])) # Get current state with silent(): module_files = run('find /etc/{}/modules.d -iname "*.yml"'.format(beat)).split() enabled_modules = {os.path.basename(module).split('.')[0] for module in module_files} # Disable extra services for extra in enabled_modules - desired_modules: info('Disabling {} module: {}', beat, extra) changes.append(extra) with silent(), sudo(): run('{} modules disable {}'.format(beat, extra)) # Enable services for missing in desired_modules - enabled_modules: info('Enabling {} module: {}', beat, missing) changes.append(missing) with silent(), sudo(): run('{} modules enable {}'.format(beat, missing)) return changes
def reset(branch, repository_path=None, **kwargs): """ Fetch, reset, clean and checkout repository branch. :return: commit """ if not repository_path: repository_path = debian.pwd() with cd(repository_path): name = os.path.basename(repository_path) info('Resetting git repository: {}@{}', name, branch) commands = [ 'git fetch origin', # Fetch branches and tags 'git reset --hard HEAD', # Make hard reset to HEAD 'git clean -fdx', # Remove untracked files pyc, xxx~ etc 'git checkout HEAD', # Checkout HEAD 'git reset refs/remotes/origin/{} --hard'.format( branch) # Reset to branch ] with silent(): output = run(' && '.join(commands)) output = output.split(os.linesep)[-1].lstrip('HEAD is now at ') commit = output.split()[0] info('HEAD is now at: {}', output) return commit
def install_postgis(v=None): if not v: v = version() info('Installing postgis...') debian.apt_get('install', 'postgis', 'postgresql-{}-postgis-scripts'.format(v))
def enable(conf, weight, do_restart=True): """ Enable logstash input/output provider :param conf: Input or output provider config file :param weight: Weight of provider :param do_restart: Restart service :return: Got enabled? """ enabled = False conf = conf if conf.endswith('.conf') else '{}.conf'.format(conf) with sudo(): available_conf = os.path.join(conf_available_path, conf) if not files.exists(available_conf): warn('Invalid conf: {}'.format(conf)) else: with cd(conf_enabled_path): weight = str(weight).zfill(2) conf = '{}-{}'.format(weight, conf) if not files.exists(conf): info('Enabling conf: {}', conf) with silent(): debian.ln(available_conf, conf) enabled = True if do_restart: restart('server') return enabled
def enable(program, do_reload=True): """ Enable program. :param program: Program to enable :param do_reload: Reload supervisor :return: Got enabled? """ enabled = False program = program if program.endswith( '.conf') or program == 'default' else '{}.conf'.format(program) with sudo(): available_program = os.path.join(programs_available_path, program) if not files.exists(available_program): warn('Invalid program: {}'.format(program)) else: with cd(programs_enabled_path): if not files.exists(program): info('Enabling program: {}', program) with silent(): debian.ln(available_program, program) enabled = True if do_reload: reload() return enabled
def configure_web(self): """ Render and upload web.ini vassal to <project>.ini. :return: Updated vassals """ destination = self.get_config_path() context = self.get_context() ini = self.get_web_vassal() template = os.path.join('uwsgi', ini) default_templates = uwsgi.blueprint.get_default_template_root() with settings(template_dirs=[default_templates]): # Check if a specific web vassal have been created or use the default if template not in blueprint.get_template_loader().list_templates( ): # Upload default web vassal info(indent('...using default web vassal')) template = os.path.join('uwsgi', 'default', 'web.ini') uploads = blueprint.upload(template, os.path.join(destination, ini), context=context) if uploads: self.updates.extend(uploads) # Upload remaining (local) vassals user_vassals = blueprint.upload( 'uwsgi/', destination, context=context) # TODO: skip subdirs if user_vassals: self.updates.extend(user_vassals) return self.updates
def flush(): """ Clear varnish cache """ info('Flushing Varnish...') varnishadm('ban.url .*') info('Down the drain!')
def info(scope=''): """ Get runtime information from redis itself """ with silent(), hide_prefix(): output = api.run('redis-cli info ' + scope) api.info(output)
def enable(site, do_reload=True): """ Enable site :param site: Site to enable :param do_reload: Reload nginx service :return: Got enabled? """ enabled = False site = site if site.endswith( '.conf') or site == 'default' else '{}.conf'.format(site) with sudo(): available_site = os.path.join(sites_available_path, site) if not files.exists(available_site): warn('Invalid site: {}'.format(site)) else: with cd(sites_enabled_path): if not files.exists(site): info('Enabling site: {}', site) with silent(): debian.ln(available_site, site) enabled = True if do_reload: reload() return enabled
def install_forwarder(): """ TODO: Build from github - wget https://storage.googleapis.com/golang/go1.4.1.linux-amd64.tar.gz - gunzip <go> - mv go /usr/local/ - apt-get install unzip make ruby ruby-dev - wget https://github.com/elasticsearch/logstash-forwarder/archive/master.zip - unzip <forwarder> - cd <forwarder> - go build - gem install fpm - make deb - dpkg -i <forwarder> """ with sudo(): info("Adding apt repository for {}", "logstash forwarder") debian.add_apt_repository("http://packages.elasticsearch.org/logstashforwarder/debian stable main") info("Installing {}", "logstash forwarder") debian.apt_get("update") debian.apt_get("install", "logstash-forwarder") # Upload init script blueprint.upload("forwarder/init.d/logstash-forwarder", "/etc/init.d/") debian.chmod("/etc/init.d/logstash-forwarder", mode=755) # Enable on boot debian.add_rc_service("logstash-forwarder")
def setup_schemas(drop=False): """ Create database schemas and grant user privileges :param drop: Drop existing schemas before creation """ schemas = blueprint.get('schemas', {}) with sudo('postgres'): for schema, config in schemas.iteritems(): user, password = config['user'], config.get('password') info('Creating user {}', user) if password: _client_exec("CREATE ROLE %(user)s WITH PASSWORD '%(password)s' LOGIN", user=user, password=password) else: _client_exec("CREATE ROLE %(user)s LOGIN", user=user) if drop: info('Droping schema {}', schema) _client_exec('DROP DATABASE %(name)s', name=schema) info('Creating schema {}', schema) _client_exec('CREATE DATABASE %(name)s', name=schema) info('Granting user {} to schema {}'.format(user, schema)) _client_exec("GRANT ALL PRIVILEGES ON DATABASE %(schema)s to %(user)s", schema=schema, user=user) for ext in blueprint.get('extensions', []): info('Creating extension {}'.format(ext)) _client_exec("CREATE EXTENSION IF NOT EXISTS %(ext)s", ext=ext, schema=schema)
def generate_nginx_conf(role='www'): """ Generate nginx config for reverse proxying Kibana application """ info('Generating kibana config to nginx@{}...'.format(role)) context = { 'domain': blueprint.get('domain', '_'), 'elasticsearch_host': blueprint.get('elasticsearch_host', '127.0.0.1') } template = 'nginx/kibana.conf' conf = blueprint.render_template(template, context) pwd = os.path.dirname(env['real_fabfile']) for _dir, _conf in [('sites-available', 'kibana.conf'), ('includes', 'kibana-locations.conf')]: conf_dir = os.path.join(pwd, 'templates', role, 'nginx', _dir) conf_path = os.path.join(conf_dir, _conf) if not os.path.exists(conf_dir): os.makedirs(conf_dir) with open(conf_path, 'w+') as f: f.write(conf) info('Select username and password...') passwd_dir = os.path.join(pwd, 'templates', role, 'nginx', 'conf.d') passwd_path = os.path.join(passwd_dir, 'kibana.htpasswd') if not os.path.exists(passwd_dir): os.makedirs(passwd_dir) username = prompt('Username:'******'kibana') local('htpasswd -c {filename} {username}'.format(filename=passwd_path, username=username))
def enable(site, do_reload=True): """ Enable site :param site: Site to enable :param do_reload: Reload nginx service :return: Got enabled? """ enabled = False site = site if site.endswith('.conf') or site == 'default' else '{}.conf'.format(site) with sudo(): available_site = os.path.join(sites_available_path, site) if not files.exists(available_site): warn('Invalid site: {}'.format(site)) else: with cd(sites_enabled_path): if not files.exists(site): info('Enabling site: {}', site) with silent(): debian.ln(available_site, site) enabled = True if do_reload: reload() return enabled
def configure_web(self): """ Render and upload web.ini vassal to <project>.ini. :return: Updated vassals """ destination = self.get_config_path() context = self.get_context() ini = self.get_web_vassal() template = os.path.join('uwsgi', ini) default_templates = uwsgi.blueprint.get_default_template_root() with settings(template_dirs=[default_templates]): # Check if a specific web vassal have been created or use the default if template not in blueprint.get_template_loader().list_templates(): # Upload default web vassal info(indent('...using default web vassal')) template = os.path.join('uwsgi', 'default', 'web.ini') uploads = blueprint.upload(template, os.path.join(destination, ini), context=context) if uploads: self.updates.extend(uploads) # Upload remaining (local) vassals user_vassals = blueprint.upload('uwsgi/', destination, context=context) # TODO: skip subdirs if user_vassals: self.updates.extend(user_vassals) return self.updates
def enable(program, do_reload=True): """ Enable program. :param program: Program to enable :param do_reload: Reload supervisor :return: Got enabled? """ enabled = False program = program if program.endswith(".conf") or program == "default" else "{}.conf".format(program) with sudo(): available_program = os.path.join(programs_available_path, program) if not files.exists(available_program): warn("Invalid program: {}".format(program)) else: with cd(programs_enabled_path): if not files.exists(program): info("Enabling program: {}", program) with silent(): debian.ln(available_program, program) enabled = True if do_reload: reload() return enabled
def install(): with sudo(): info("Installing Ruby v1.9.3") debian.apt_get("install", "ruby1.9.3") info("Installing Bundler") gem("install", "bundler")
def setup_schemas(drop=False): """ Create database schemas and grant user privileges :param drop: Drop existing schemas before creation """ schemas = blueprint.get('schemas', {}) with sudo('postgres'): for schema, config in schemas.iteritems(): user, password = config['user'], config.get('password') info('Creating user {}', user) if password: _client_exec( "CREATE ROLE %(user)s WITH PASSWORD '%(password)s' LOGIN", user=user, password=password) else: _client_exec("CREATE ROLE %(user)s LOGIN", user=user) if drop: info('Droping schema {}', schema) _client_exec('DROP DATABASE %(name)s', name=schema) info('Creating schema {}', schema) _client_exec('CREATE DATABASE %(name)s', name=schema) info('Granting user {} to schema {}'.format(user, schema)) _client_exec( "GRANT ALL PRIVILEGES ON DATABASE %(schema)s to %(user)s", schema=schema, user=user) for ext in blueprint.get('extensions', []): info('Creating extension {}'.format(ext)) _client_exec("CREATE EXTENSION IF NOT EXISTS %(ext)s", ext=ext, schema=schema)
def add_repository(): name = debian.lsb_codename() info('Adding postgres {} apt repository...', name) repo = 'deb https://apt.postgresql.org/pub/repos/apt/ {}-pgdg main'.format(name) debian.add_apt_key('https://www.postgresql.org/media/keys/ACCC4CF8.asc') debian.add_apt_repository(repository=repo, src=True) debian.apt_get_update()
def setup(): """ Setup Logstash server """ from .elasticsearch import add_elastic_repo with sudo(): branch = blueprint.get('branch', '6.x') add_elastic_repo(branch) version = blueprint.get('version', 'latest') info('Installing {} version {}', 'logstash', version) package = 'logstash' + ('={}'.format(version) if version != 'latest' else '') debian.apt_get('install', package) # Enable on boot debian.add_rc_service('logstash') # prep custom folders debian.mkdir(conf_available_path) debian.mkdir(conf_enabled_path) # Install plugins plugins = blueprint.get('plugins', []) for plugin in plugins: info('Installing logstash "{}" plugin...', plugin) install_plugin(plugin) configure()
def install_forwarder(): """ TODO: Build from github - wget https://storage.googleapis.com/golang/go1.4.1.linux-amd64.tar.gz - gunzip <go> - mv go /usr/local/ - apt-get install unzip make ruby ruby-dev - wget https://github.com/elasticsearch/logstash-forwarder/archive/master.zip - unzip <forwarder> - cd <forwarder> - go build - gem install fpm - make deb - dpkg -i <forwarder> """ with sudo(): info('Adding apt repository for {}', 'logstash forwarder') debian.add_apt_repository( 'http://packages.elasticsearch.org/logstashforwarder/debian stable main' ) info('Installing {}', 'logstash forwarder') debian.apt_get('update') debian.apt_get('install', 'logstash-forwarder') # Upload init script blueprint.upload('forwarder/init.d/logstash-forwarder', '/etc/init.d/') debian.chmod('/etc/init.d/logstash-forwarder', mode=755) # Enable on boot debian.add_rc_service('logstash-forwarder')
def enable(conf, weight, do_restart=True): """ Enable logstash input/output provider :param conf: Input or output provider config file :param weight: Weight of provider :param do_restart: Restart service :return: Got enabled? """ enabled = False conf = conf if conf.endswith(".conf") else "{}.conf".format(conf) with sudo(): available_conf = os.path.join(conf_available_path, conf) if not files.exists(available_conf): warn("Invalid conf: {}".format(conf)) else: with cd(conf_enabled_path): weight = str(weight).zfill(2) conf = "{}-{}".format(weight, conf) if not files.exists(conf): info("Enabling conf: {}", conf) with silent(): debian.ln(available_conf, conf) enabled = True if do_restart: restart("server") return enabled
def update_filters(): changes = [] # Generate desired state as enabled_name => source_name config = blueprint.get('config', {}) filters = { '{}-{}.conf'.format(str(weight).zfill(2), conf): "{}.conf".format(conf) for weight, conf in config.iteritems() } # Get current state with silent(): enabled_filters = run('ls {}'.format(conf_enabled_path)).split() # Disable extra services if blueprint.get('auto_disable_conf', True): for link in set(enabled_filters) - set(filters.keys()): info('Disabling conf: {}', link) changes.append(link) with silent(), sudo(), cd(conf_enabled_path): debian.rm(link) # Enable services for target in set(filters.keys()) - set(enabled_filters): source = os.path.join(conf_available_path, filters[target]) info('Enabling conf: {}', target) changes.append(source) with silent(), sudo(), cd(conf_enabled_path): debian.ln(source, target) return changes
def get_context(self): """ Build jinja context for web.ini vassal. :return: context """ context = super(UWSGIProvider, self).get_context() # Memory optimized options cpu_count = blueprint.get('web.max_cores', debian.nproc()) total_memory = int(round(debian.total_memory() / 1024.0 / 1024.0 / 1024.0)) total_memory = blueprint.get('web.max_memory', default=total_memory) workers = blueprint.get('web.workers', default=uwsgi.get_worker_count(cpu_count)) gevent = blueprint.get('web.gevent', default=0) info('Generating uWSGI conf based on {} core(s), {} GB memory and {} worker(s)', cpu_count, total_memory, workers) # TODO: Handle different loop engines (gevent) context.update({ 'cpu_affinity': uwsgi.get_cpu_affinity(cpu_count, workers), 'workers': workers, 'max_requests': int(uwsgi.get_max_requests(total_memory)), 'reload_on_as': int(uwsgi.get_reload_on_as(total_memory)), 'reload_on_rss': int(uwsgi.get_reload_on_rss(total_memory)), 'limit_as': int(uwsgi.get_limit_as(total_memory)), 'gevent': gevent, }) # Override context defaults with blueprint settings context.update(blueprint.get('web')) return context
def configure_infra(): with sudo(): info('Adding license key to config') context = {"newrelic_key": blueprint.get('newrelic_key', None)} blueprint.upload('newrelic-infra.yml', '/etc/newrelic-infra.yml', context=context)
def reset(branch, repository_path=None, **kwargs): """ Fetch, reset, clean and checkout repository branch. :return: commit """ commit = None if not repository_path: repository_path = debian.pwd() with cd(repository_path): name = os.path.basename(repository_path) info('Resetting git repository: {}@{}', name, branch) with silent('warnings'): commands = [ 'git fetch origin', # Fetch branches and tags 'git reset --hard HEAD', # Make hard reset to HEAD 'git clean -fdx', # Remove untracked files pyc, xxx~ etc 'git checkout HEAD', # Checkout HEAD 'git reset refs/remotes/origin/{} --hard'.format(branch) # Reset to branch ] output = run(' && '.join(commands)) if output.return_code != 0: warn('Failed to reset repository "{}", probably permission denied!'.format(name)) else: output = output.split(os.linesep)[-1][len('HEAD is now at '):] commit = output.split()[0] info('HEAD is now at: {}', output) return commit
def clone(url, branch=None, repository_path=None, **kwargs): """ Clone repository and branch. :param url: Git url to clone :param branch: Branch to checkout :param repository_path: Destination :param kwargs: Not used but here for easier kwarg passing :return: (destination, got_cloned bool) """ repository = parse_url(url, branch=branch) name = repository['name'] branch = repository['branch'] cloned = False if not repository_path: repository_path = os.path.join('.', name) if not files.exists(os.path.join(repository_path, '.git')): info('Cloning {}@{} into {}', url, branch, repository_path) with silent('warnings'): cmd = 'git clone -b {branch} {remote} {name}'.format(branch=branch, remote=url, name=name) output = run(cmd) if output.return_code != 0: warn('Failed to clone repository "{}", probably permission denied!'.format(name)) cloned = None else: cloned = True else: info('Git repository already cloned: {}', name) return repository_path, cloned
def install(): with sudo(): info('Install python dependencies') debian.apt_get('install', 'python-dev', 'python-setuptools') run('easy_install pip') run('touch {}'.format(pip_log_file)) debian.chmod(pip_log_file, mode=777) pip('install', 'setuptools', '--upgrade')
def flush(): """ Delete all cached keys """ info('Flushing Memcached...') with sudo(), silent(): run('echo "flush_all" | /bin/netcat -q 2 127.0.0.1 11211') info('Down the drain!')
def configure(): """ Configure newrelic server """ with sudo(): info('Adding license key to config') newrelic_key = blueprint.get('newrelic_key', None) run('nrsysmond-config --set license_key={}'.format(newrelic_key))
def create_server_ssl_cert(): with sudo(): info('Generating SSL certificate...') debian.mkdir('/etc/pki/tls/certs') debian.mkdir('/etc/pki/tls/private') with cd('/etc/pki/tls'): key = 'private/logstash-forwarder.key' crt = 'certs/logstash-forwarder.crt' run('openssl req -x509 -batch -nodes -days 3650 -newkey rsa:2048 -keyout {} -out {}'.format(key, crt))
def install(install_java=True): with sudo(): if install_java: from blues import java java.install() version = blueprint.get('version', '0.13.3') info('Downloading Metabase v%s' % version) run('mkdir -p /etc/metabase/ && cd /etc/metabase/ && curl -O ' 'http://downloads.metabase.com/v%s/metabase.jar' % version)
def add_elastic_repo(branch): with sudo(): info('Adding apt repository for {} branch {}', 'elastic.co', branch) repository = 'https://artifacts.elastic.co/packages/{}/apt stable main'.format(branch) debian.add_apt_repository(repository) info('Adding apt key for', repository) debian.add_apt_key('https://artifacts.elastic.co/GPG-KEY-elasticsearch') debian.apt_get_update()
def install(): ver = blueprint.get('version') or '' package_name = 'ruby{}'.format(ver) with sudo(): info('Installing {}', package_name) debian.apt_get('install', package_name, "{}-dev".format(package_name)) info('Installing Bundler') install_gem('bundler')
def ctl(command, program=''): """ Run supervisorctl :[command],[program] :param command: The command to run :param program: The program to run command against """ with silent(): output = supervisorctl(command, program=program) with hide_prefix(): info(output)
def ctl(command, program=""): """ Run supervisorctl :[command],[program] :param command: The command to run :param program: The program to run command against """ with silent(): output = supervisorctl(command, program=program) with hide_prefix(): info(output)
def create(path): options = '' if python.requested_version() >= (3,): options += ' -p /usr/bin/python3' if not files.exists(path): info('Creating virtualenv: {}', path) run('virtualenv{options} {}'.format(path, options=options)) else: info('Virtualenv already exists: {}', path)
def reload(vassal_path=None): """ Reload uwsgi or reload specific vassal @ path, via touch. :param vassal_path: The absolute path to vassal ini to reload. If not given, the uwsgi service will reload """ if not vassal_path: debian.service('uwsgi', 'reload', check_status=False) else: vassal_name = os.path.splitext(os.path.basename(vassal_path))[0] with sudo(), silent(): info('Reloading {} uWSGI vassal', vassal_name) run('touch {}'.format(vassal_path))
def install(): with sudo(): from blues import java java.install() version = blueprint.get('version', '1.0') info('Adding apt repository for {} version {}', 'elasticsearch', version) repository = 'http://packages.elasticsearch.org/elasticsearch/{0}/debian stable main'.format( version) debian.add_apt_repository(repository) info('Adding apt key for', repository) debian.add_apt_key( 'http://packages.elasticsearch.org/GPG-KEY-elasticsearch') debian.apt_get('update') # Install elasticsearch (and java) info('Installing {} version {}', 'elasticsearch', version) debian.apt_get('install', 'elasticsearch') # Install plugins plugins = blueprint.get('plugins', []) for plugin in plugins: info('Installing elasticsearch "{}" plugin...', plugin) install_plugin(plugin) # Enable on boot debian.add_rc_service('elasticsearch', priorities='defaults 95 10')
def install(): with sudo(): info('Downloading wowza') version = blueprint.get('wowza_version', '4.1.2') binary = 'WowzaStreamingEngine-{}.deb.bin'.format(version) version_path = version.replace('.', '-') url = 'http://www.wowza.com/downloads/WowzaStreamingEngine-{}/{}'.format( version_path, binary) run('wget -P /tmp/ {url}'.format(url=url)) debian.chmod('/tmp/{}'.format(binary), '+x') info('Installing wowza') run('/tmp/{}'.format(binary))
def migrate(): """ Migrate database """ info('Migrate database') options = env.get('django__migrate', '') if version() >= (1, 7): manage('migrate ' + options) elif blueprint.get('use_south', True): manage('syncdb --noinput') # TODO: Remove? manage('migrate --merge ' + options) # TODO: Remove --merge? else: manage('syncdb --noinput')
def install(): with sudo(): lbs_release = debian.lbs_release() if lbs_release == '12.04': debian.add_apt_ppa('webupd8team/java') debian.debconf_set_selections( 'shared/accepted-oracle-license-v1-1 select true', 'shared/accepted-oracle-license-v1-1 seen true') package = 'oracle-java7-installer' else: package = 'java7-jdk' info('Install Java 7 JDK') debian.apt_get('install', package)
def install_forwarder(): with sudo(): info('Adding apt repository for {}', 'logstash forwarder') debian.add_apt_repository('http://packages.elasticsearch.org/logstashforwarder/debian stable main') info('Installing {}', 'logstash forwarder') debian.apt_get('update') debian.apt_get('install', 'logstash-forwarder') # Upload init script blueprint.upload('forwarder/init.d/logstash-forwarder', '/etc/init.d/') debian.chmod('/etc/init.d/logstash-forwarder', mode=755) # Enable on boot debian.add_rc_service('logstash-forwarder')
def install_server(): with sudo(): version = blueprint.get('server.version', '1.4') info('Adding apt repository for {} version {}', 'logstash', version) debian.add_apt_repository('http://packages.elasticsearch.org/logstash/{}/debian stable main'.format(version)) info('Installing {} version {}', 'logstash', version) debian.apt_get('update') debian.apt_get('install', 'logstash') # Enable on boot debian.add_rc_service('logstash') # Create and download SSL cert create_server_ssl_cert() download_server_ssl_cert()
def clone(url, branch=None, repository_path=None, **kwargs): repository = parse_url(url, branch=branch) name = repository['name'] branch = repository['branch'] if not repository_path: repository_path = os.path.join('.', name) if not files.exists(os.path.join(repository_path, '.git')): info('Cloning {}@{} into {}', url, branch, repository_path) cmd = 'git clone -b {branch} {remote} {name}'.format(branch=branch, remote=url, name=name) run(cmd) else: info('Git repository already cloned: {}', name) return repository_path
def get_context(self): """ Build jinja context for web.ini vassal. :return: context """ context = super(UWSGIProvider, self).get_context() # Memory optimized options cpu_count = blueprint.get('web.max_cores', debian.nproc()) total_memory = int( round(debian.total_memory() / 1024.0 / 1024.0 / 1024.0)) total_memory = blueprint.get('web.max_memory', default=total_memory) workers = blueprint.get('web.workers', default=uwsgi.get_worker_count(cpu_count)) gevent = blueprint.get('web.gevent', default=0) info( 'Generating uWSGI conf based on {} core(s), {} GB memory and {} worker(s)', cpu_count, total_memory, workers) # TODO: Handle different loop engines (gevent) context.update({ 'cpu_affinity': uwsgi.get_cpu_affinity(cpu_count, workers), 'workers': workers, 'max_requests': int(uwsgi.get_max_requests(total_memory)), 'reload_on_as': int(uwsgi.get_reload_on_as(total_memory)), 'reload_on_rss': int(uwsgi.get_reload_on_rss(total_memory)), 'limit_as': int(uwsgi.get_limit_as(total_memory)), 'gevent': gevent, }) # Override context defaults with blueprint settings context.update(blueprint.get('web')) return context
def install(): with sudo(): # Ensure python (pip) is installed python.install() # PIP install system wide uWSGI package = 'uwsgi' version = blueprint.get('version') if version: package += '=={}'.format(version) info('Installing: {} ({})', 'uWSGI', version if version else 'latest') python.pip('install', package) python.pip('install', 'uwsgitop', 'gevent') # Create group debian.groupadd('app-data', gid_min=10000) # Create directories debian.mkdir(log_path, owner='root', group='app-data', mode=1775) debian.mkdir(tmpfs_path, owner='root', group='app-data', mode=1775)
def install(): with sudo(): info('Downloading kibana') version = blueprint.get('version', '3.1.0') tar_file = 'kibana-{}.tar.gz'.format(version) run('wget -P /tmp/ https://download.elasticsearch.org/kibana/kibana/{f}' .format(f=tar_file)) # Extract and soft link kibana in web root web_root = '/srv/www/' debian.mkdir(web_root, mode=1775, owner='www-data', group='www-data') run('tar xzf /tmp/{f} -C {web_root}'.format(f=tar_file, web_root=web_root)) src_root = os.path.join(web_root, 'kibana-{version}'.format(version=version)) debian.chown(src_root, owner='www-data', group='www-data', recursive=True) debian.ln(src_root, '/srv/www/kibana')
def install(): with sudo(): # Ensure python (pip) is installed python.install() # PIP install system wide Supervisor package = 'supervisor' version = blueprint.get('version') if version: package += '=={}'.format(version) info('Installing: {} ({})', 'Supervisor', version if version else 'latest') python.pip('install', package) # Create group debian.groupadd('app-data', gid_min=10000) # Create directories for d in (programs_available_path, programs_enabled_path, log_path, tmpfs_path): debian.mkdir(d, owner='root', group='app-data', mode=1775)
def install_solr(): with sudo(): version = blueprint.get('version') version_tuple = tuple(map(int, version.split('.'))) archive = 'solr-{}.tgz'.format(version) if version_tuple < (4, 1, 0): archive = 'apache-{}'.format(archive) url = 'https://archive.apache.org/dist/lucene/solr/{}/{}'.format( version, archive) with cd('/tmp'): info('Download {} ({})', 'Solr', version) run('wget {}'.format(url)) info('Extracting archive...') with silent(): run('tar xzf {}'.format(archive)) debian.mv(os.path.splitext(archive)[0], solr_home) debian.chmod(solr_home, 755, 'solr', 'solr', recursive=True) debian.rm(archive)
def setup_shared_memory(): """ http://leopard.in.ua/2013/09/05/postgresql-sessting-shared-memory/ """ sysctl_path = '/etc/sysctl.conf' shmmax_configured = files.contains(sysctl_path, 'kernel.shmmax') shmall_configured = files.contains(sysctl_path, 'kernel.shmall') if not any([shmmax_configured, shmall_configured]): page_size = debian.page_size() phys_pages = debian.phys_pages() shmall = phys_pages / 2 shmmax = shmall * page_size shmmax_str = 'kernel.shmmax = {}'.format(shmmax) shmall_str = 'kernel.shmall = {}'.format(shmall) with sudo(): files.append(sysctl_path, shmmax_str, partial=True) files.append(sysctl_path, shmall_str, partial=True) run('sysctl -p') info('Added **{}** to {}', shmmax_str, sysctl_path) info('Added **{}** to {}', shmall_str, sysctl_path)
def install(): with sudo(): # TODO: Change to java.install (openjdk)? debian.add_apt_ppa('webupd8team/java') debian.debconf_set_selections( 'shared/accepted-oracle-license-v1-1 select true', 'shared/accepted-oracle-license-v1-1 seen true') version = blueprint.get('version', '1.0') info('Adding apt repository for {} version {}', 'elasticsearch', version) repository = 'http://packages.elasticsearch.org/elasticsearch/{0}/debian stable main'.format( version) debian.add_apt_repository(repository) info('Adding apt key for', repository) debian.add_apt_key( 'http://packages.elasticsearch.org/GPG-KEY-elasticsearch') debian.apt_get('update') # Install elasticsearch (and java) info('Installing {} version {}', 'elasticsearch', version) debian.apt_get('install', 'oracle-java7-installer', 'elasticsearch') # Enable on boot debian.add_rc_service('elasticsearch', priorities='defaults 95 10')
def install_solr(): with sudo(): version = blueprint.get('version') version_tuple = tuple(map(int, version.split('.'))) archive = 'solr-{}.tgz'.format(version) if version_tuple < (4, 1, 0): archive = 'apache-{}'.format(archive) url = 'https://archive.apache.org/dist/lucene/solr/{}/{}'.format(version, archive) with cd('/tmp'): info('Download {} ({})', 'Solr', version) run('wget {}'.format(url)) info('Extracting archive...') with silent(): run('tar xzf {}'.format(archive)) solr_version_dir = os.path.splitext(archive)[0] solr_version_path = os.path.join('/usr', 'share', solr_version_dir) debian.chmod(solr_version_dir, 755, 'solr', 'solr', recursive=True) if files.exists(solr_version_path): info('Found same existing version, removing it...') debian.rm(solr_version_path, recursive=True) debian.mv(solr_version_dir, '/usr/share/') debian.ln(solr_version_path, solr_home) debian.rm(archive)
def dump(schema=None): """ Dump and download all configured, or given, schemas. :param schema: Specific shema to dump and download. """ if not schema: schemas = blueprint.get('schemas', {}).keys() for i, schema in enumerate(schemas, start=1): print("{i}. {schema}".format(i=i, schema=schema)) valid_indices = '[1-{}]+'.format(len(schemas)) schema_choice = prompt('Select schema to dump:', default='1', validate=valid_indices) schema = schemas[int(schema_choice) - 1] with sudo('postgres'): now = datetime.now().strftime('%Y-%m-%d') output_file = '/tmp/{}_{}.backup'.format(schema, now) filename = os.path.basename(output_file) options = dict(format='tar', output_file=output_file, schema=schema) info('Dumping schema {}...', schema) run('pg_dump -c -F {format} -f {output_file} {schema}'.format( **options)) info('Downloading dump...') local_file = '~/{}'.format(filename) files.get(output_file, local_file) with sudo(), silent(): debian.rm(output_file) info('New smoking hot dump at {}', local_file)