예제 #1
0
    def publish_artifacts(self, stack, artifacts=None):
        """Make and publish all the artifacts for a single stack"""
        stack.find_missing_build_env()

        if artifacts is None:
            artifacts = stack.artifacts.items()

        # Iterate over each artifact we need to build
        for key, artifact in artifacts:
            # Skip artifacts that are created elsewhere
            if artifact.not_created_here:
                continue

            # Gather our environment variables
            environment = dict(env.pair for env in stack.build_env)

            # Create a temporary file to tar to
            with hp.a_temp_file() as temp_tar_file:
                # Make the artifact
                hp.generate_tar_file(temp_tar_file,
                                     artifact.commands + artifact.paths +
                                     artifact.files,
                                     environment=environment,
                                     compression=artifact.compression_type)
                log.info("Finished generating artifact: {0}".format(key))

                # Upload the artifact
                s3_location = artifact.upload_to.format(**environment)
                if stack.bespin.dry_run:
                    log.info("DRYRUN: Would upload tar file to %s",
                             s3_location)
                else:
                    stack.s3.upload_file_to_s3(temp_tar_file.name, s3_location)
예제 #2
0
파일: builder.py 프로젝트: jonhiggs/bespin
    def publish_artifacts(self, stack, artifacts=None):
        """Make and publish all the artifacts for a single stack"""
        stack.find_missing_build_env()

        if artifacts is None:
            artifacts = stack.artifacts.items()

        # Iterate over each artifact we need to build
        for key, artifact in artifacts:
            # Skip artifacts that are created elsewhere
            if artifact.not_created_here:
                continue

            # Gather our environment variables
            environment = dict(env.pair for env in stack.build_env)

            # Create a temporary file to tar to
            with hp.a_temp_file() as temp_tar_file:
                # Make the artifact
                hp.generate_tar_file(temp_tar_file, artifact.commands + artifact.paths + artifact.files
                    , environment=environment
                    , compression=artifact.compression_type
                    )
                log.info("Finished generating artifact: {0}".format(key))

                # Upload the artifact
                s3_location = artifact.upload_to.format(**environment)
                if stack.bespin.dry_run:
                    log.info("DRYRUN: Would upload tar file to %s", s3_location)
                else:
                    stack.s3.upload_file_to_s3(temp_tar_file.name, s3_location)
예제 #3
0
    def sanity_check(self):
        self.find_missing_env()
        if all(isinstance(item, six.string_types) for item in (self.params_json, self.params_yaml)):
            raise BadStack("Need either params_json or params_yaml", looking_in=[self.params_json, self.params_yaml])
        if not any(isinstance(item, six.string_types) for item in (self.params_json, self.params_yaml)):
            raise BadStack("Please don't have both params_json and params_yaml")

        # Hack for sanity check
        for var in self.nested_vars():
            if (
                hasattr(var, "stack")
                and not isinstance(var.stack, six.string_types)
                and not var.stack.cloudformation.status.exists
            ):
                var._resolved = "YYY_RESOLVED_BY_MISSING_STACK_YYY"

        matches = re.findall("XXX_[A-Z_]+_XXX", json.dumps(self.params_json_obj))
        for var in self.nested_vars():
            if hasattr(var, "_resolved"):
                var._resolved = None

        if matches:
            raise BadStack("Found placeholders in the generated params file", stack=self.name, found=matches)
        if self.cloudformation.status.failed:
            raise BadStack(
                "Stack is in a failed state, this means it probably has to be deleted first....", stack=self.stack_name
            )

        with hp.a_temp_file() as fle:
            json.dump(self.stack_json_obj, open(fle.name, "w"))
            self.cloudformation.validate_template(fle.name)
예제 #4
0
파일: show_tasks.py 프로젝트: carukc/bespin
    def run(self):
        """For each file in noseOfYeti/specs, output nodes to represent each spec file"""
        with hp.a_temp_file() as fle:
            fle.write(dedent("""
                ---
                environments: { dev: {account_id: "123"} }
                stacks: { app: {} }
            """).encode('utf-8'))
            fle.seek(0)
            collector = Collector()
            collector.prepare(fle.name, {'bespin': {'extra': ""}, "command": None, "bash": None})

        section = nodes.section()
        section['ids'].append("available-tasks")

        title = nodes.title()
        title += nodes.Text("Default tasks")
        section += title

        for name, task in sorted(collector.configuration['task_finder'].tasks.items(), key=lambda x: len(x[0])):

            lines = [name] + ["  {0}".format(line.strip()) for line in task.description.split('\n')]
            viewlist = ViewList()
            for line in lines:
                viewlist.append(line, name)
            self.state.nested_parse(viewlist, self.content_offset, section)

        return [section]
예제 #5
0
    def add_to_tar(self, tar, environment=None):
        """Add this file to the tar"""
        if environment is None:
            environment = {}

        with hp.a_temp_file() as f:
            if self.content is not NotSpecified:
                if getattr(self, "_no_more_formatting", False):
                    f.write(self.content.encode('utf-8'))
                else:
                    f.write(self.content.format(**environment).encode('utf-8'))
            else:
                self.task_runner(self.task, printer=f)

            f.close()
            self.stdout.write(self.path)
            self.stdout.write("\n")
            self.stdout.flush()
            tar.add(f.name, self.path)
예제 #6
0
    def add_to_tar(self, tar, environment=None):
        """Add this file to the tar"""
        if environment is None:
            environment = {}

        with hp.a_temp_file() as f:
            if self.content is not NotSpecified:
                if getattr(self, "_no_more_formatting", False):
                    f.write(self.content.encode("utf-8"))
                else:
                    f.write(self.content.format(**environment).encode("utf-8"))
            else:
                self.task_runner(self.task, printer=f)

            f.close()
            self.stdout.write(self.path)
            self.stdout.write("\n")
            self.stdout.flush()
            tar.add(f.name, self.path)
예제 #7
0
    def run(self):
        """For each file in noseOfYeti/specs, output nodes to represent each spec file"""
        with hp.a_temp_file() as fle:
            fle.write(
                dedent("""
                ---
                environments: { dev: {account_id: "123"} }
                stacks: { app: {} }
            """).encode('utf-8'))
            fle.seek(0)
            collector = Collector()
            collector.prepare(fle.name, {
                'bespin': {
                    'extra': ""
                },
                "command": None,
                "bash": None
            })

        section = nodes.section()
        section['ids'].append("available-tasks")

        title = nodes.title()
        title += nodes.Text("Default tasks")
        section += title

        for name, task in sorted(
                collector.configuration['task_finder'].tasks.items(),
                key=lambda x: len(x[0])):

            lines = [name] + [
                "  {0}".format(line.strip())
                for line in task.description.split('\n')
            ]
            viewlist = ViewList()
            for line in lines:
                viewlist.append(line, name)
            self.state.nested_parse(viewlist, self.content_offset, section)

        return [section]
예제 #8
0
파일: ssh.py 프로젝트: danvela/bespin
    def run(self):
        jb = None
        defaults = config.load_default_settings()

        original_paramiko_agent = paramiko.Agent
        with hp.a_temp_file() as fle:
            if self.proxy and self.proxy_ssh_key:
                fle.write("keyfile|{0}|{1}\n".format(self.proxy, self.proxy_ssh_key).encode('utf-8'))
            if self.ssh_key:
                fle.write("keyfile|*|{0}\n".format(self.ssh_key).encode('utf-8'))
            fle.close()

            auth_file = fle.name if (self.ssh_key or self.proxy_ssh_key) else None

            if self.proxy:
                jb = plugins.load_plugin(jumpbox.__file__)
                jb.init(auth=AuthManager(self.proxy_ssh_user, auth_file=auth_file), defaults=defaults)

            login = AuthManager(self.ssh_user, auth_file=auth_file, include_agent=True)
            keys = {}
            for key in login.agent_connection.get_keys():
                ident = str(uuid.uuid1())
                identity = type("Identity", (object, ), {
                      "__str__": lambda s: ident
                    , "get_name": lambda s: key.get_name()
                    , "asbytes": lambda s: key.asbytes()
                    , "sign_ssh_data": lambda s, *args, **kwargs: key.sign_ssh_data(*args, **kwargs)
                    })()
                keys[identity] = key
                login.deferred_keys[identity] = key

        try:
            outputs = defaultdict(lambda: {"stdout": [], "stderr": []})
            class TwoQueue(object):
                def __init__(self):
                    self.q = queue.Queue(300)

                def put(self, thing):
                    (host, is_stderr), line = thing
                    outputs[host][["stdout", "stderr"][is_stderr]].append(line)
                    self.q.put(thing)

                def __getattr__(self, key):
                    if key in ("q", "put"):
                        return object.__getattribute__(self, key)
                    else:
                        return getattr(self.q, key)

            console = RadSSHConsole(q=TwoQueue())
            connections = [(ip, None) for ip in self.ips]
            if jb:
                jb.add_jumpbox(self.proxy)
                connections = list((ip, socket) for _, ip, socket in jb.do_jumpbox_connections(self.proxy, self.ips))

            cluster = None
            try:
                log.info("Connecting")
                authenticated = False
                for _ in hp.until(timeout=120):
                    if authenticated:
                        break

                    cluster = Cluster(connections, login, console=console, defaults=defaults)
                    for _ in hp.until(timeout=10, step=0.5):
                        if not any(cluster.pending):
                            break

                    if cluster.pending:
                        raise BespinError("Timedout waiting to connect to some hosts", waiting_for=cluster.pending.keys())

                    for _ in hp.until(timeout=10, step=0.5):
                        connections = list(cluster.connections.values())
                        if any(isinstance(connection, socket.gaierror) for connection in connections):
                            raise BespinError("Some connections failed!", failures=[conn for conn in connections if isinstance(conn, socket.gaierror)])

                        if all(conn.authenticated for conn in connections):
                            break

                    authenticated = all(conn.authenticated for conn in cluster.connections.values())
                    if not authenticated:
                        unauthenticated = [host for host, conn in cluster.connections.items() if not conn.authenticated]
                        log.info("Failed to authenticate will try to reconnect in 5 seconds\tunauthenticate=%s", unauthenticated)
                        time.sleep(5)

                # Try to reauth if not authenticated yet
                unauthenticated = [host for host, conn in cluster.connections.items() if not conn.authenticated]
                if unauthenticated:
                    for host in unauthenticated:
                        print('{0:14s} : {1}'.format(str(host), cluster.connections[host]))
                    raise BespinError("Timedout waiting to authenticate all the hosts, do you have an ssh-agent running?", unauthenticated=unauthenticated)

                failed = []
                for host, status in cluster.connections.items():
                    print('{0:14s} : {1}'.format(str(host), status))
                    if type(status) is socket.gaierror:
                        failed.append(host)

                if failed:
                    raise BespinError("Failed to connect to some hosts", failed=failed)

                cluster.run_command(self.command)

                error = False
                for host, job in cluster.last_result.items():
                    if not job.completed or job.result.return_code not in self.acceptable_return_codes:
                        log.error('%s -%s', host, cluster.connections[host])
                        log.error('%s, %s', job, job.result.status)
                        error = True

                if error:
                    raise BespinError("Failed to run the commands")

                return outputs
            finally:
                if cluster:
                    cluster.close_connections()
        finally:
            paramiko.Agent = original_paramiko_agent
예제 #9
0
from bespin.option_spec.artifact_objs import ArtifactPath, ArtifactFile

from tests.helpers import BespinCase

from contextlib import contextmanager
import tarfile
import zipfile
import nose
import mock
import six
import sys
import os

describe BespinCase, "a_temp_file":
    it "yields the file object of a file that disappears after the context":
        with a_temp_file() as fle:
            assert os.path.exists(fle.name)
        assert not os.path.exists(fle.name)

    it "can write to the temporary file, close it and still read from it":
        with a_temp_file() as fle:
            fle.write("blah".encode("utf-8"))
            fle.close()
            with open(fle.name) as fread:
                self.assertEqual(fread.read(), "blah")
        assert not os.path.exists(fle.name)

describe BespinCase, "a_temp_directory":
    it "yields the name of a directory that disappears after the context":
        with a_temp_directory() as directory:
            assert os.path.exists(directory)
예제 #10
0
 def validate_template(self):
     """ Validate stack template against CloudFormation """
     with hp.a_temp_file() as fle:
         with open(fle.name, "w") as fle:
             fle.write(self.dumped_stack_obj)
         return self.cloudformation.validate_template(fle.name)
예제 #11
0
파일: ssh.py 프로젝트: jonhiggs/bespin
    def run(self):
        jb = None
        defaults = config.load_default_settings()
        defaults['hostkey.verify'] = 'ignore'

        original_paramiko_agent = paramiko.Agent
        with hp.a_temp_file() as fle:
            if self.proxy and self.proxy_ssh_key:
                fle.write("keyfile|{0}|{1}\n".format(self.proxy, self.proxy_ssh_key).encode('utf-8'))
            if self.ssh_key:
                fle.write("keyfile|*|{0}\n".format(self.ssh_key).encode('utf-8'))
            fle.close()

            auth_file = fle.name if (self.ssh_key or self.proxy_ssh_key) else None

            if self.proxy:
                jb = plugins.load_plugin(jumpbox.__file__)
                jb.init(auth=AuthManager(self.proxy_ssh_user, auth_file=auth_file), defaults=defaults)

            login = AuthManager(self.ssh_user, auth_file=auth_file, include_agent=True)
            keys = {}
            for key in login.agent_connection.get_keys():
                ident = str(uuid.uuid1())
                identity = type("Identity", (object, ), {
                      "__str__": lambda s: ident
                    , "get_name": lambda s: key.get_name()
                    , "asbytes": lambda s: key.asbytes()
                    , "sign_ssh_data": lambda s, *args, **kwargs: key.sign_ssh_data(*args, **kwargs)
                    })()
                keys[identity] = key
                login.deferred_keys[identity] = key

            # Diry dirty hack
            # Waiting for https://github.com/radssh/radssh/pull/10
            paramiko.Agent = type("AgentConnection", (object, ), {"get_keys": lambda *args: keys.keys()})

        try:
            console = RadSSHConsole()
            connections = [(ip, None) for ip in self.ips]
            if jb:
                jb.add_jumpbox(self.proxy)
                connections = list((ip, socket) for _, ip, socket in jb.do_jumpbox_connections(self.proxy, self.ips))

            cluster = None
            try:
                log.info("Connecting")
                authenticated = False
                for _ in hp.until(timeout=120):
                    if authenticated:
                        break

                    cluster = Cluster(connections, login, console=console, defaults=defaults)
                    for _ in hp.until(timeout=10, step=0.5):
                        if not any(cluster.pending):
                            break

                    if cluster.pending:
                        raise BespinError("Timedout waiting to connect to some hosts", waiting_for=cluster.pending.keys())

                    for _ in hp.until(timeout=10, step=0.5):
                        if all(conn.authenticated for conn in cluster.connections.values()):
                            break

                    authenticated = all(conn.authenticated for conn in cluster.connections.values())
                    if not authenticated:
                        unauthenticated = [host for host, conn in cluster.connections.items() if not conn.authenticated]
                        log.info("Failed to authenticate will try to reconnect in 5 seconds\tunauthenticate=%s", unauthenticated)
                        time.sleep(5)

                # Try to reauth if not authenticated yet
                unauthenticated = [host for host, conn in cluster.connections.items() if not conn.authenticated]
                if unauthenticated:
                    for host in unauthenticated:
                        print('{0:14s} : {1}'.format(str(host), cluster.connections[host]))
                    raise BespinError("Timedout waiting to authenticate all the hosts, do you have an ssh-agent running?", unauthenticated=unauthenticated)

                failed = []
                for host, status in cluster.connections.items():
                    print('{0:14s} : {1}'.format(str(host), status))
                    if type(status) is socket.gaierror:
                        failed.append(host)

                if failed:
                    raise BespinError("Failed to connect to some hosts", failed=failed)

                cluster.run_command(self.command)

                error = False
                for host, job in cluster.last_result.items():
                    if not job.completed or job.result.return_code not in self.acceptable_return_codes:
                        log.error('%s -%s', host, cluster.connections[host])
                        log.error('%s, %s', job, job.result.status)
                        error = True

                if error:
                    raise BespinError("Failed to run the commands")
            finally:
                if cluster:
                    cluster.close_connections()
        finally:
            paramiko.Agent = original_paramiko_agent
예제 #12
0
파일: ssh.py 프로젝트: carukc/bespin
    def run(self):
        jb = None
        defaults = config.load_default_settings()
        defaults['hostkey.verify'] = 'ignore'

        original_paramiko_agent = paramiko.Agent
        with hp.a_temp_file() as fle:
            if self.proxy and self.proxy_ssh_key:
                fle.write("keyfile|{0}|{1}\n".format(self.proxy, self.proxy_ssh_key).encode('utf-8'))
            if self.ssh_key:
                fle.write("keyfile|*|{0}\n".format(self.ssh_key).encode('utf-8'))
            fle.close()

            auth_file = fle.name if (self.ssh_key or self.proxy_ssh_key) else None

            if self.proxy:
                jb = plugins.load_plugin(jumpbox.__file__)
                jb.init(auth=AuthManager(self.proxy_ssh_user, auth_file=auth_file), defaults=defaults)

            login = AuthManager(self.ssh_user, auth_file=auth_file, include_agent=True)
            keys = {}
            for key in login.agent_connection.get_keys():
                ident = str(uuid.uuid1())
                identity = type("Identity", (object, ), {
                      "__str__": lambda s: ident
                    , "get_name": lambda s: key.get_name()
                    , "asbytes": lambda s: key.asbytes()
                    , "sign_ssh_data": lambda s, *args, **kwargs: key.sign_ssh_data(*args, **kwargs)
                    })()
                keys[identity] = key
                login.deferred_keys[identity] = key

        try:
            outputs = defaultdict(lambda: {"stdout": [], "stderr": []})
            class TwoQueue(object):
                def __init__(self):
                    self.q = queue.Queue(300)

                def put(self, thing):
                    (host, is_stderr), line = thing
                    outputs[host][["stdout", "stderr"][is_stderr]].append(line)
                    self.q.put(thing)

                def __getattr__(self, key):
                    if key in ("q", "put"):
                        return object.__getattribute__(self, key)
                    else:
                        return getattr(self.q, key)

            console = RadSSHConsole(q=TwoQueue())
            connections = [(ip, None) for ip in self.ips]
            if jb:
                jb.add_jumpbox(self.proxy)
                connections = list((ip, socket) for _, ip, socket in jb.do_jumpbox_connections(self.proxy, self.ips))

            cluster = None
            try:
                log.info("Connecting")
                authenticated = False
                for _ in hp.until(timeout=120):
                    if authenticated:
                        break

                    cluster = Cluster(connections, login, console=console, defaults=defaults)
                    for _ in hp.until(timeout=10, step=0.5):
                        if not any(cluster.pending):
                            break

                    if cluster.pending:
                        raise BespinError("Timedout waiting to connect to some hosts", waiting_for=cluster.pending.keys())

                    for _ in hp.until(timeout=10, step=0.5):
                        connections = list(cluster.connections.values())
                        if any(isinstance(connection, socket.gaierror) for connection in connections):
                            raise BespinError("Some connections failed!", failures=[conn for conn in connections if isinstance(conn, socket.gaierror)])

                        if all(conn.authenticated for conn in connections):
                            break

                    authenticated = all(conn.authenticated for conn in cluster.connections.values())
                    if not authenticated:
                        unauthenticated = [host for host, conn in cluster.connections.items() if not conn.authenticated]
                        log.info("Failed to authenticate will try to reconnect in 5 seconds\tunauthenticate=%s", unauthenticated)
                        time.sleep(5)

                # Try to reauth if not authenticated yet
                unauthenticated = [host for host, conn in cluster.connections.items() if not conn.authenticated]
                if unauthenticated:
                    for host in unauthenticated:
                        print('{0:14s} : {1}'.format(str(host), cluster.connections[host]))
                    raise BespinError("Timedout waiting to authenticate all the hosts, do you have an ssh-agent running?", unauthenticated=unauthenticated)

                failed = []
                for host, status in cluster.connections.items():
                    print('{0:14s} : {1}'.format(str(host), status))
                    if type(status) is socket.gaierror:
                        failed.append(host)

                if failed:
                    raise BespinError("Failed to connect to some hosts", failed=failed)

                cluster.run_command(self.command)

                error = False
                for host, job in cluster.last_result.items():
                    if not job.completed or job.result.return_code not in self.acceptable_return_codes:
                        log.error('%s -%s', host, cluster.connections[host])
                        log.error('%s, %s', job, job.result.status)
                        error = True

                if error:
                    raise BespinError("Failed to run the commands")

                return outputs
            finally:
                if cluster:
                    cluster.close_connections()
        finally:
            paramiko.Agent = original_paramiko_agent