def tunnel(local_port, remote_port): """ Creates an SSH tunnel. """ env.tunnel_local_port = local_port env.tunnel_remote_port = remote_port local_or_dryrun(' ssh -i %(key_filename)s -L %(tunnel_local_port)s:localhost:%(tunnel_remote_port)s %(user)s@%(host_string)s -N' % env)
def generate_self_signed_certificate(domain='', r=None): """ Generates a self-signed certificate for use in an internal development environment for testing SSL pages. """ env.ssl_domain = domain or env.ssl_domain assert env.ssl_domain, 'No SSL domain defined.' role = r or env.ROLE or ALL ssl_dst = 'roles/%s/ssl' % (role, ) if not os.path.isdir(ssl_dst): os.makedirs(ssl_dst) env.ssl_base_dst = '%s/%s' % (ssl_dst, env.ssl_domain) # http://almostalldigital.wordpress.com/2013/03/07/self-signed-ssl-certificate-for-ec2-load-balancer/ #openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout www.example.com.key -out www.example.com.cert # local_or_dryrun('openssl genrsa -des3 -passout "pass:"******"/C=%(ssl_country)s/ST=%(ssl_state)s/L=%(ssl_city)s/O=%(ssl_organization)s/CN=%(ssl_domain)s" -keyout %(ssl_base_dst)s.key -out %(ssl_base_dst)s.crt' % env print(cmd) local_or_dryrun(cmd)
def delete(name=None, group=None, release=None, except_release=None, dryrun=1, verbose=1): """ Permanently erase one or more VM instances from existence. """ verbose = int(verbose) if env.vm_type == EC2: conn = get_ec2_connection() instances = list_instances( name=name, group=group, release=release, except_release=except_release, ) for instance_name, instance_data in instances.items(): public_dns_name = instance_data['public_dns_name'] print('\nDeleting %s (%s)...' \ % (instance_name, instance_data['id'])) if not get_dryrun(): conn.terminate_instances(instance_ids=[instance_data['id']]) # Clear host key on localhost. known_hosts = os.path.expanduser('~/.ssh/known_hosts') cmd = 'ssh-keygen -f "%s" -R %s' % (known_hosts, public_dns_name) local_or_dryrun(cmd) else: raise NotImplementedError
def generate_csr(domain='', r=None): """ Creates a certificate signing request to be submitted to a formal certificate authority to generate a certificate. Note, the provider may say the CSR must be created on the target server, but this is not necessary. """ from apache import set_apache_specifics, set_apache_site_specifics env.ssl_domain = domain or env.ssl_domain role = r or env.ROLE or ALL ssl_dst = 'roles/%s/ssl' % (role, ) print('ssl_dst:', ssl_dst) if not os.path.isdir(ssl_dst): os.makedirs(ssl_dst) #apache_specifics = set_apache_specifics() for site, site_data in common.iter_sites(setter=set_apache_site_specifics): print('site:', site, file=sys.stderr) # assert env.ssl_domain, 'No SSL domain defined.' #2048? env.ssl_base_dst = '%s/%s' % (ssl_dst, env.ssl_domain.replace( '*.', '')) env.ssl_csr_year = date.today().year cmd = 'openssl req -nodes -newkey rsa:%(ssl_length)s -subj "/C=%(ssl_country)s/ST=%(ssl_state)s/L=%(ssl_city)s/O=%(ssl_organization)s/CN=%(ssl_domain)s" -keyout %(ssl_base_dst)s.%(ssl_csr_year)i.key -out %(ssl_base_dst)s.%(ssl_csr_year)i.csr' % env local_or_dryrun(cmd)
def update(package='', ignore_errors=0, no_deps=0, all=0, mirrors=1): """ Updates the local cache of pip packages. If all=1, skips check of host and simply updates everything. """ assert env[ROLE] ignore_errors = int(ignore_errors) env.pip_path_versioned = env.pip_path % env env.pip_local_cache_dir = env.pip_local_cache_dir_template % env env.pip_cache_dir = env.pip_local_cache_dir if not os.path.isdir(env.pip_cache_dir): os.makedirs(env.pip_cache_dir) env.pip_package = (package or '').strip() env.pip_no_deps = '--no-deps' if int(no_deps) else '' env.pip_build_dir = tempfile.mkdtemp() # Clear build directory in case it wasn't properly cleaned up previously. cmd = 'rm -Rf %(pip_build_directory)s' % env if env.is_local: run_or_dryrun(cmd) else: sudo_or_dryrun(cmd) with settings(warn_only=ignore_errors): if package: # Download a single specific package. cmd = env.pip_update_command % env if not int(mirrors): cmd = cmd.replace('--use-mirrors', '') local_or_dryrun(cmd) else: # Download each package in a requirements file. # Note, specifying the requirements file in the command isn't properly # supported by pip, thus we have to parse the file itself and send each # to pip separately. if int(all): packages = list(iter_pip_requirements()) else: packages = [k for k, v in check()] for package in packages: env.pip_package = package.strip() cmd = env.pip_update_command % env if not int(mirrors): cmd = cmd.replace('--use-mirrors', '') local_or_dryrun(cmd)
def reset(): """ Deletes all recorded plan executions. This will cause the planner to think everything needs to be re-deployed. """ d = os.path.join(init_plan_data_dir(), env.ROLE) if env.plan_storage == STORAGE_REMOTE: sudo_or_dryrun('rm -Rf "%s"' % d) sudo_or_dryrun('mkdir -p "%s"' % d) elif env.plan_storage == STORAGE_LOCAL: local_or_dryrun('rm -Rf "%s"' % d) local_or_dryrun('mkdir -p "%s"' % d) else: raise NotImplementedError
def database_files_dump(site=None): """ Runs the Django management command to export files stored in the database to the filesystem. Assumes the app django_database_files is installed. """ from burlap.dj import render_remote_paths set_site(site or env.SITE) render_remote_paths() cmd = 'export SITE=%(SITE)s; export ROLE=%(ROLE)s; cd %(remote_manage_dir)s; %(django_manage)s database_files_dump' % env if env.is_local: local_or_dryrun(cmd) else: run_or_dryrun(cmd)
def shell(gui=0, command=''): """ Opens an SSH connection. """ from burlap.common import get_hosts_for_site try: from dj import render_remote_paths render_remote_paths() except Exception: pass _env = type(env)(env) if _env.SITE != _env.default_site: shell_hosts = get_hosts_for_site() if shell_hosts: _env.host_string = shell_hosts[0] #print 'env.remote_app_dir:',env.remote_app_dir _env.SITE = _env.SITE or _env.default_site _env.shell_x_opt = '-X' if int(gui) else '' if '@' in _env.host_string: _env.shell_host_string = _env.host_string else: _env.shell_host_string = '%(user)s@%(host_string)s' % _env _env.shell_check_host_key_str = '-o StrictHostKeyChecking=no' _env.shell_default_dir = _env.shell_default_dir_template % _env if command: _env.shell_interactive_shell_str = command else: _env.shell_interactive_shell_str = _env.shell_interactive_shell % _env if _env.is_local: cmd = '%(shell_interactive_shell_str)s' % _env elif _env.key_filename: # If host_string contains the port, then strip it off and pass separately. port = _env.shell_host_string.split(':')[-1] if port.isdigit(): _env.shell_host_string = _env.shell_host_string.split(':')[0] + (' -p %s' % port) cmd = 'ssh -t %(shell_x_opt)s %(shell_check_host_key_str)s -i %(key_filename)s %(shell_host_string)s "%(shell_interactive_shell_str)s"' % _env elif _env.password: cmd = 'ssh -t %(shell_x_opt)s %(shell_check_host_key_str)s %(shell_host_string)s "%(shell_interactive_shell_str)s"' % _env local_or_dryrun(cmd)
def deploy_cura(): """ Updates files for the Printrbot manager. e.g. fab printer deploy_cura """ # Ensure our 3d configuration options are up-to-date. run_or_dryrun( 'mkdir -p ~/git; cd ~/git; git clone https://github.com/chrisspen/3d-printer-profiles.git; cd 3d-printer-profiles; git pull' ) # Ensure our 3d models are up-to-date. sudo_or_dryrun('mkdir -p %(project_home)s/models/printable' % env) sudo_or_dryrun('chown -R %(user)s:%(user)s %(project_home)s' % env) local_or_dryrun( 'rsync -avz --delete --rsh "ssh -t -o StrictHostKeyChecking=no -i %(key_filename)s" models/printable %(user)s@%(host_string)s:%(project_home)s/models/' % env)
def version(): """ Get the Vagrant version. """ with settings(hide('running', 'warnings'), warn_only=True): res = local_or_dryrun('vagrant --version', capture=True) if res.failed: return None line = res.splitlines()[-1] version = re.match(r'Vagrant (?:v(?:ersion )?)?(.*)', line).group(1) return tuple(_to_int(part) for part in version.split('.'))
def get_expiration_date(fn): """ Reads the expiration date of a local crt file. """ env.ssl_crt_fn = fn with hide('running'): ret = local_or_dryrun('openssl x509 -noout -in %(ssl_crt_fn)s -dates' % env, capture=True) matches = re.findall('notAfter=(.*?)$', ret, flags=re.IGNORECASE) if matches: return dateutil.parser.parse(matches[0])
def dump(dest_dir=None, to_local=None, from_local=0, archive=0, dump_fn=None): """ Exports the target database to a single transportable file on the localhost, appropriate for loading using load(). """ from burlap.dj import set_db from_local = int(from_local) set_db() if dest_dir: env.db_dump_dest_dir = dest_dir env.db_date = datetime.date.today().strftime('%Y%m%d') #env.db_dump_fn = dump_fn or (env.db_dump_fn_template % env) env.db_dump_fn = get_default_db_fn(dump_fn or env.db_dump_fn_template).strip() if to_local is None and not env.is_local: to_local = 1 if env.db_dump_command: run_or_dryrun(env.db_dump_command % env) elif 'postgres' in env.db_engine or 'postgis' in env.db_engine: assert env.db_schemas, \ 'Please specify the list of schemas to dump in db_schemas.' env.db_schemas_str = ' '.join('-n %s' % _ for _ in env.db_schemas) cmd = env.db_postgresql_dump_command % env #print 'db_host:',env.db_host if env.is_local or from_local: local_or_dryrun(cmd) else: sudo_or_dryrun(cmd) elif 'mysql' in env.db_engine: cmd = env.db_mysql_dump_command % env if env.is_local: local_or_dryrun(cmd) else: sudo_or_dryrun(cmd) else: raise NotImplemented # Download the database dump file on the remote host to localhost. if not from_local and (0 if to_local is None else int(to_local)) and not env.is_local: cmd = ('rsync -rvz --progress --recursive --no-p --no-g --rsh "ssh -o StrictHostKeyChecking=no -i %(key_filename)s" %(user)s@%(host_string)s:%(db_dump_fn)s %(db_dump_fn)s') % env local_or_dryrun(cmd) if to_local and int(archive): db_fn = render_fn(env.db_dump_fn) env.db_archive_fn = '%s/%s' % (env.db_dump_archive_dir, os.path.split(db_fn)[-1]) local_or_dryrun('mv %s %s' % (db_fn, env.db_archive_fn)) return env.db_dump_fn
def verify_certificate_chain(base=None, crt=None, csr=None, key=None): """ Confirms the key, CSR, and certificate files all match. """ from burlap.common import get_verbose, print_fail, print_success verbose = get_verbose() if base: crt = base + '.crt' csr = base + '.csr' key = base + '.key' else: assert crt and csr and key, 'If base not provided, crt and csr and key must be given.' assert os.path.isfile(crt) assert os.path.isfile(csr) assert os.path.isfile(key) csr_md5 = local_or_dryrun( 'openssl req -noout -modulus -in %s | openssl md5' % csr, capture=True) key_md5 = local_or_dryrun( 'openssl rsa -noout -modulus -in %s | openssl md5' % key, capture=True) crt_md5 = local_or_dryrun( 'openssl x509 -noout -modulus -in %s | openssl md5' % crt, capture=True) match = crt_md5 == csr_md5 == key_md5 if verbose or not match: print('crt:', crt_md5) print('csr:', csr_md5) print('key:', key_md5) if match: print_success('Files look good!') else: print_fail('Files no not match!') raise Exception('Files no not match!')
def sync(sync_set, force=0): """ Uploads media to an Amazon S3 bucket using s3sync. Requires the s3sync gem: sudo gem install s3sync """ from burlap.dj import get_settings, render_remote_paths force = int(force) env.s3_sync_force_flag = ' --force ' if force else '' # print'env.SITE:',env.SITE _settings = get_settings(verbose=1) assert _settings, 'Unable to import settings.' for k in _settings.__dict__.iterkeys(): if k.startswith('AWS_'): env[k] = _settings.__dict__[k] #local_or_dryrun('which s3sync') #print 'AWS_STATIC_BUCKET_NAME:',_settings.AWS_STATIC_BUCKET_NAME render_remote_paths() site_data = env.sites[env.SITE] env.update(site_data) rets = [] for paths in env.s3_sync_sets[sync_set]: is_local = paths.get('is_local', True) local_path = paths['local_path'] % env remote_path = paths['remote_path'] remote_path = remote_path.replace(':/', '/') if not remote_path.startswith('s3://'): remote_path = 's3://' + remote_path local_path = local_path % env if is_local: #local_or_dryrun('which s3sync')#, capture=True) env.s3_local_path = os.path.abspath(local_path) else: #run('which s3sync') env.s3_local_path = local_path if local_path.endswith('/') and not env.s3_local_path.endswith('/'): env.s3_local_path = env.s3_local_path + '/' env.s3_remote_path = remote_path % env print('Syncing %s to %s...' % (env.s3_local_path, env.s3_remote_path)) # Old buggy Ruby version. # cmd = ('export AWS_ACCESS_KEY_ID=%(AWS_ACCESS_KEY_ID)s; '\ # 'export AWS_SECRET_ACCESS_KEY=%(AWS_SECRET_ACCESS_KEY)s; '\ # 's3sync --recursive --verbose --progress --public-read '\ # '%(s3_local_path)s %(s3_remote_path)s') % env # Superior Python version. if force: env.s3_sync_cmd = 'put' else: env.s3_sync_cmd = 'sync' cmd = ( 'export AWS_ACCESS_KEY_ID=%(AWS_ACCESS_KEY_ID)s; '\ 'export AWS_SECRET_ACCESS_KEY=%(AWS_SECRET_ACCESS_KEY)s; '\ 's3cmd %(s3_sync_cmd)s --progress --acl-public --guess-mime-type --no-mime-magic '\ '--delete-removed --cf-invalidate --recursive %(s3_sync_force_flag)s '\ '%(s3_local_path)s %(s3_remote_path)s') % env if is_local: local_or_dryrun(cmd) else: run_or_dryrun(cmd)
def shell(): set() local_or_dryrun(env.vagrant_shell_command)
def destroy(): local_or_dryrun('vagrant destroy' % env)
def init(): local_or_dryrun('vagrant init %(vagrant_box)s' % env)
def up(): local_or_dryrun('vagrant up --provider=%(vagrant_provider)s' % env)
def ssh(): set() hostname, port = env.host_string.split('@')[-1].split(':') local_or_dryrun( 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i %s %s@%s -p %s' % (env.key_filename, env.user, hostname, port))
def ssh(): set() hostname, port = env.host_string.split('@')[-1].split(':') local_or_dryrun('ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i %s %s@%s -p %s' % ( env.key_filename, env.user, hostname, port))
def update_dependency_cache(name=None, output=None): """ Reads all pip package dependencies and saves them to a file for later use with organizing pip-requirements.txt. Outputs CSV to stdout. """ common.set_show(0) try: shutil.rmtree('./.env/build') except OSError: pass env.pip_path_versioned = env.pip_path % env #depends_fn = get_dependencies_fn(output) fout = open(output, 'w') #fout = sys.stdout writer = csv.DictWriter(fout, PIP_DEPENDS_HEADERS) writer.writerow(dict(zip(PIP_DEPENDS_HEADERS, PIP_DEPENDS_HEADERS))) package_to_fqv = {} for dep in pip_to_deps(): #print dep assert dep.name not in package_to_fqv, 'Package %s specified multiple times!' % dep.name package_to_fqv[dep.name] = str(dep) #dep_tree = defaultdict(set) # {package:set([deps])} reqs = list(iter_pip_requirements()) total = len(reqs) i = 0 for line in reqs: i += 1 if name and name not in line: continue print('line %s: %i %i %.02f%%' % (line, i, total, i / float(total) * 100), file=sys.stderr) env.pip_package = line env.pip_download_dir = tempfile.mkdtemp() cmd = env.pip_depend_command % env #with hide('output', 'running', 'warnings'): ret = local_or_dryrun(cmd, capture=True) print('ret:', ret) matches = PIP_DEP_PATTERN.findall(ret) # [(child,parent)] print('matches:', matches) for child, parent in matches: try: child_line = child.strip() #print 'child_line:',child_line #child = Requirement(child_line) child_name = PIP_REQ_NAME_PATTERN.findall(child_line)[0] child_specs = PIP_REQ_SPEC_PATTERN.findall(child_line) #print 'child:',child_name,child_specs parent = Requirement.parse_line(parent.strip().split('->')[0]) #print 'parent:',parent.__dict__ # print('parent.specs:',parent.specs,bool(parent.specs) assert not parent.specs \ or (parent.specs and parent.specs[0][0] in ('==', '>=', '<=', '!=', '<', '>')), \ 'Invalid parent: %s (%s)' % (parent, parent.specs) # if parent.specs and parent.specs[0][0] == '==': # parent.specs[0] = list(parent.specs[0]) # parent.specs[0][0] = '>=' parent_version = '' if parent.specs: parent_version = parent.specs[0][1] writer.writerow( dict( package_name=parent.name, package_version=parent_version, dependency_name=child_name, dependency_specs=';'.join( [''.join(_) for _ in child_specs]), )) fout.flush() except Exception as e: print('Error: %s' % e, file=sys.stderr) print(e, file=sys.stderr) traceback.print_exc(file=sys.stderr) raise
def install(package='', clean=0, no_deps=1, all=0, upgrade=1): """ Installs the local cache of pip packages. """ from burlap.dj import render_remote_paths print('Installing pip requirements...') assert env[ROLE] require('is_local') # Delete any pre-existing environment. if int(clean): clean_virtualenv() render_remote_paths() if env.pip_virtual_env_dir_template: env.pip_virtual_env_dir = env.pip_virtual_env_dir_template % env env.pip_local_cache_dir = env.pip_local_cache_dir_template % env env.pip_path_versioned = env.pip_path % env if env.is_local: env.pip_cache_dir = os.path.abspath(env.pip_local_cache_dir % env) else: env.pip_cache_dir = env.pip_remote_cache_dir % env print('env.host_string:', env.host_string) print('env.key_filename:', env.key_filename) run_or_dryrun('mkdir -p %(pip_cache_dir)s' % env) if not env.pip_cache_dir.endswith('/'): env.pip_cache_dir = env.pip_cache_dir + '/' env.pip_key_filename = os.path.abspath(env.key_filename) local_or_dryrun( 'rsync -avz --progress --rsh "ssh -o StrictHostKeyChecking=no -i %(pip_key_filename)s" %(pip_local_cache_dir)s/* %(user)s@%(host_string)s:%(pip_cache_dir)s' % env) env.pip_upgrade_flag = '' if int(upgrade): env.pip_upgrade_flag = ' -U ' env.pip_no_deps = '' if int(no_deps): env.pip_no_deps = '--no-deps' if int(all): packages = list(iter_pip_requirements()) elif package: packages = [package] else: packages = [k for k, v in check()] env.pip_build_dir = tempfile.mkdtemp() for package in packages: env.pip_package = package if env.is_local: run_or_dryrun(env.pip_install_command % env) else: sudo_or_dryrun(env.pip_install_command % env) if not env.is_local: sudo_or_dryrun( 'chown -R %(pip_user)s:%(pip_group)s %(remote_app_dir)s' % env) sudo_or_dryrun('chmod -R %(pip_chmod)s %(remote_app_dir)s' % env)
def shell(self, name='default', user=None, password=None, root=0, verbose=1, write_password=1, no_db=0, no_pw=0): """ Opens a SQL shell to the given database, assuming the configured database and user supports this feature. """ from burlap.dj import set_db verbose = self.verbose root = int(root) write_password = int(write_password) no_db = int(no_db) no_pw = int(no_pw) # Load database credentials. set_db(name=name, verbose=verbose) load_db_set(name=name, verbose=verbose) set_root_login() if root: env.db_user = env.db_root_user env.db_password = env.db_root_password else: if user is not None: env.db_user = user if password is not None: env.db_password = password # Switch relative to absolute host name. env.db_shell_host = env.db_host # if env.db_shell_host in ('localhost', '127.0.0.1'): # env.db_shell_host = env.host_string if no_pw: env.db_password = '' cmds = [] env.db_name_str = '' if 'postgres' in env.db_engine or 'postgis' in env.db_engine: # Note, psql does not support specifying password at the command line. # If you don't want to manually type it at the command line, you must # add the password to your local ~/.pgpass file. # Each line in that file should be formatted as: # host:port:username:password # Set pgpass file. if write_password and env.db_password: cmds.extend(write_postgres_pgpass(verbose=0, commands_only=1, name=name)) if not no_db: env.db_name_str = ' --dbname=%(db_name)s' % env cmds.append(('/bin/bash -i -c \"psql --username=%(db_user)s '\ '--host=%(db_shell_host)s%(db_name_str)s\"') % env) elif 'mysql' in env.db_engine: if not no_db: env.db_name_str = ' %(db_name)s' % env if env.db_password: cmds.append(('/bin/bash -i -c \"mysql -u %(db_user)s '\ '-p\'%(db_password)s\' -h %(db_shell_host)s%(db_name_str)s\"') % env) else: cmds.append(('/bin/bash -i -c \"mysql -u %(db_user)s '\ '-h %(db_shell_host)s%(db_name_str)s\"') % env) else: raise NotImplementedError if cmds: for cmd in cmds: if verbose: print(cmd) if env.is_local: local_or_dryrun(cmd) else: run_or_dryrun(cmd)