def _find_start_docker_container(container_id): docker_info = local.shell( 'docker container inspect {0}'.format(container_id)) docker_info = json.loads(docker_info)[0] if docker_info['State']['Running'] is False: logger.info('Starting stopped container: {0}'.format(container_id)) local.shell('docker container start {0}'.format(container_id))
def connect(state, host, for_fact=None): chroot_directory = host.data.chroot_directory try: with progress_spinner({'chroot run'}): local.shell( 'chroot {0} ls'.format(chroot_directory), splitlines=True, ) except PyinfraError as e: raise ConnectError(e.args[0]) host.host_data['chroot_directory'] = chroot_directory return True
def connect(state, host): chroot_directory = host.data.chroot_directory try: with progress_spinner({"chroot run"}): local.shell( "chroot {0} ls".format(chroot_directory), splitlines=True, ) except PyinfraError as e: raise ConnectError(e.args[0]) host.connector_data["chroot_directory"] = chroot_directory return True
def setup(app): this_dir = path.dirname(path.realpath(__file__)) scripts_dir = path.abspath(path.join(this_dir, '..', 'scripts')) for auto_docs_name in ('modules', 'facts'): auto_docs_path = path.join(this_dir, auto_docs_name) if not path.exists(auto_docs_path): mkdir(auto_docs_path) local.shell(( 'python {0}/generate_global_kwargs_doc.py'.format(scripts_dir), 'python {0}/generate_facts_docs.py'.format(scripts_dir), 'python {0}/generate_modules_docs.py'.format(scripts_dir), ))
def generate_api_docs(): this_dir = path.dirname(path.realpath(__file__)) docs_dir = path.abspath(path.join(this_dir, '..', 'docs')) pyinfra_dir = path.abspath(path.join(this_dir, '..', 'pyinfra')) api_doc_command = ( 'sphinx-apidoc -e -M -f -o {0}/apidoc/ {1} {1}/facts {1}/operations' ).format(docs_dir, pyinfra_dir) local.shell( (api_doc_command, ), print_input=True, ) for filename in ('modules.rst', 'pyinfra.rst', 'pyinfra.api.rst'): remove('{0}/apidoc/{1}'.format(docs_dir, filename))
def setup(app): this_dir = path.dirname(path.realpath(__file__)) scripts_dir = path.abspath(path.join(this_dir, '..', 'scripts')) for auto_docs_name in ('operations', 'facts', 'apidoc'): auto_docs_path = path.join(this_dir, auto_docs_name) if path.exists(auto_docs_path): rmtree(auto_docs_path) mkdir(auto_docs_path) local.shell(( 'python {0}/generate_api_docs.py'.format(scripts_dir), 'python {0}/generate_global_kwargs_doc.py'.format(scripts_dir), 'python {0}/generate_facts_docs.py'.format(scripts_dir), 'python {0}/generate_operations_docs.py'.format(scripts_dir), ), print_input=True)
def disconnect(state, host): container_id = host.host_data['docker_container_id'][:12] with progress_spinner({'docker commit'}): image_id = local.shell( 'docker commit {0}'.format(container_id), splitlines=True, )[-1][7:19] # last line is the image ID, get sha256:[XXXXXXXXXX]... with progress_spinner({'docker rm'}): local.shell('docker rm -f {0}'.format(container_id), ) logger.info('{0}docker build complete, image ID: {1}'.format( host.print_prefix, click.style(image_id, bold=True), ))
def _get_vagrant_ssh_config(queue, target): logger.debug('Loading SSH config for {0}'.format(target)) queue.put(local.shell( 'vagrant ssh-config {0}'.format(target), splitlines=True, ))
def _start_docker_image(image_name): try: return local.shell( 'docker run -d {0} tail -f /dev/null'.format(image_name), splitlines=True, )[-1] # last line is the container ID except PyinfraError as e: raise ConnectError(e.args[0])
def get_mech_config(limit=None): logger.info('Getting Mech config...') if limit and not isinstance(limit, (list, tuple)): limit = [limit] # Note: There is no "--machine-readable" option to 'mech status' with progress_spinner({'mech ls'}) as progress: output = local.shell( 'mech ls', splitlines=True, ) progress('mech ls') targets = [] for line in output: address = '' data = line.split() target = data[0] if len(data) == 5: address = data[1] # Skip anything not in the limit if limit is not None and target not in limit: continue # For each vm that has an address, fetch it's SSH config in a thread if address != '' and address[0].isdigit(): targets.append(target) threads = [] config_queue = Queue() with progress_spinner(targets) as progress: for target in targets: thread = Thread( target=_get_mech_ssh_config, args=(config_queue, progress, target), ) threads.append(thread) thread.start() for thread in threads: thread.join() queue_items = list(config_queue.queue) lines = [] for output in queue_items: lines.extend(output) return lines
def _get_vagrant_ssh_config(queue, progress, target): logger.debug("Loading SSH config for %s", target) queue.put( local.shell( "vagrant ssh-config {0}".format(target), splitlines=True, ), ) progress(target)
def ensure_branch(data, state): # Check something local is correct, etc branch = local.shell('git rev-parse --abbrev-ref HEAD') app_branch = data.app_branch if branch != app_branch: # Raise hook.Error for pyinfra to handle raise hook.Error('We\'re on the wrong branch (want {0}, got {1})!'.format( app_branch, branch ))
def generate_api_docs(): this_dir = path.dirname(path.realpath(__file__)) docs_dir = path.abspath(path.join(this_dir, "..", "docs")) pyinfra_dir = path.abspath(path.join(this_dir, "..", "pyinfra")) api_doc_command = ( "sphinx-apidoc -e -M -f -o {0}/apidoc/ {1} {1}/facts {1}/operations" ).format( docs_dir, pyinfra_dir, ) local.shell( (api_doc_command, ), print_input=True, ) for filename in ("modules.rst", "pyinfra.rst", "pyinfra.api.rst"): remove("{0}/apidoc/{1}".format(docs_dir, filename))
def setup(app): this_dir = path.dirname(path.realpath(__file__)) scripts_dir = path.abspath(path.join(this_dir, "..", "scripts")) for auto_docs_name in ("operations", "facts", "apidoc"): auto_docs_path = path.join(this_dir, auto_docs_name) if path.exists(auto_docs_path): rmtree(auto_docs_path) mkdir(auto_docs_path) local.shell( ( "python {0}/generate_api_docs.py".format(scripts_dir), "python {0}/generate_arguments_doc.py".format(scripts_dir), "python {0}/generate_connectors_docs.py".format(scripts_dir), "python {0}/generate_facts_docs.py".format(scripts_dir), "python {0}/generate_operations_docs.py".format(scripts_dir), ), print_input=True, )
def connect(state, host, for_fact=None): if 'docker_container_id' in host.host_data: # user can provide a docker_container_id return True with progress_spinner({'docker run'}): container_id = local.shell( 'docker run -d {0} sleep 10000'.format(host.name), splitlines=True, )[-1] # last line is the container ID host.host_data['docker_container_id'] = container_id return True
def get_vagrant_config(limit=None): logger.info("Getting Vagrant config...") if limit and not isinstance(limit, (list, tuple)): limit = [limit] with progress_spinner({"vagrant status"}) as progress: output = local.shell( "vagrant status --machine-readable", splitlines=True, ) progress("vagrant status") targets = [] for line in output: line = line.strip() _, target, type_, data = line.split(",", 3) # Skip anything not in the limit if limit is not None and target not in limit: continue # For each running container - fetch it's SSH config in a thread - this # is because Vagrant *really* slow to run each command. if type_ == "state" and data == "running": targets.append(target) threads = [] config_queue = Queue() with progress_spinner(targets) as progress: for target in targets: thread = Thread( target=_get_vagrant_ssh_config, args=(config_queue, progress, target), ) threads.append(thread) thread.start() for thread in threads: thread.join() queue_items = list(config_queue.queue) lines = [] for output in queue_items: lines.extend([ln.strip() for ln in output]) return lines
def connect(state, host, for_fact=None): if 'docker_container_id' in host.host_data: # user can provide a docker_container_id return True try: with progress_spinner({'docker run'}): container_id = local.shell( 'docker run -d {0} tail -f /dev/null'.format(host.data.docker_image), splitlines=True, )[-1] # last line is the container ID except PyinfraError as e: raise ConnectError(e.args[0]) host.host_data['docker_container_id'] = container_id return True
def _get_vagrant_config(limit=None): if limit and not isinstance(limit, list): limit = [limit] with progress_spinner({'vagrant status'}) as progress: output = local.shell( 'vagrant status --machine-readable', splitlines=True, ) progress('vagrant status') targets = [] for line in output: _, target, type_, data = line.split(',', 3) # Skip anything not in the limit if limit is not None and target not in limit: continue # For each running container - fetch it's SSH config in a thread - this # is because Vagrant *really* slow to run each command. if type_ == 'state' and data == 'running': targets.append(target) threads = [] config_queue = Queue() with progress_spinner(targets) as progress: for target in targets: thread = Thread( target=_get_vagrant_ssh_config, args=(config_queue, progress, target), ) threads.append(thread) thread.start() for thread in threads: thread.join() queue_items = list(config_queue.queue) lines = [] for output in queue_items: lines.extend(output) return lines
def _get_mech_ssh_config(queue, progress, target): logger.debug('Loading SSH config for {0}'.format(target)) # Note: We have to work-around the fact that "mech ssh-config somehost" # does not return the correct "Host" value. When "mech" fixes this # issue we can simply this code. lines = local.shell( 'mech ssh-config {0}'.format(target), splitlines=True, ) newlines = [] for line in lines: if line.startswith('Host '): newlines.append('Host ' + target) else: newlines.append(line) queue.put(newlines) progress(target)
def make_names_data(output_key=None): show_warning() if not output_key: raise InventoryError("No Terraform output key!") with progress_spinner({"fetch terraform output"}): tf_output_raw = local.shell("terraform output -json") tf_output = json.loads(tf_output_raw) tf_output = _flatten_dict(tf_output) if output_key not in tf_output: raise InventoryError(f"No Terraform output with key: `{output_key}`") tf_output_value = tf_output[output_key] if not isinstance(tf_output_value, list): raise InventoryError( f"Invalid Terraform output type, should be list, got `{type(tf_output_value)}`", ) for ssh_target in tf_output_value: data = {"ssh_hostname": ssh_target} yield "@terraform/{0}".format(ssh_target), data, ["@terraform"]
def some_python(state, host, *args, **kwargs): print('connecting host name: {0}, actual: {1}'.format( host.name, host.fact.hostname)) local.shell('echo "local stuff!"')