Beispiel #1
0
class TestContainerDefinition_load_yaml(unittest.TestCase):

    def setUp(self):
        current_dir = os.path.dirname(os.path.abspath(__file__))
        state_file = os.path.join(current_dir, 'terraform.tfstate')
        config_yml = os.path.join(current_dir, 'interpolate.yml')
        with open(state_file) as f:
            tfstate = json.loads(f.read())
        os.environ['FOOBAR_ENV'] = "hi_mom"
        with Replacer() as r:
            get_mock = r('deployfish.terraform.Terraform._get_state_file_from_s3', Mock())
            get_mock.return_value = tfstate
            self.config = Config(filename=config_yml)

    def tearDown(self):
        del os.environ['FOOBAR_ENV']

    def test_terraform_simple_interpolation(self):
        self.assertEqual(self.config.get_service('cit-auth-prod')['cluster'], 'foobar-proxy-prod')

    def test_terraform_nested_dict_interpolation(self):
        self.assertEqual(self.config.get_service('cit-auth-prod')['load_balancer']['load_balancer_name'], 'foobar-proxy-prod')

    def test_terraform_nested_list_interpolation(self):
        self.assertEqual(self.config.get_service('cit-auth-prod')['containers'][0]['environment'][2], 'SECRETS_BUCKET_NAME=ac-config-store')

    def test_environment_simple_interpolation(self):
        self.assertEqual(self.config.get_service('cit-auth-prod')['config'][0], 'FOOBAR=hi_mom')
class TestContainerDefinition_load_yaml_no_interpolate(unittest.TestCase):

    def setUp(self):
        current_dir = os.path.dirname(os.path.abspath(__file__))
        state_file = os.path.join(current_dir, 'terraform.tfstate')
        config_yml = os.path.join(current_dir, 'interpolate.yml')
        env_file = os.path.join(current_dir, 'env_file.env')
        with open(state_file) as f:
            tfstate = json.loads(f.read())
        with Replacer() as r:
            get_mock = r('deployfish.terraform.Terraform._get_state_file_from_s3', Mock())
            get_mock.return_value = tfstate
            self.config = Config(filename=config_yml, env_file=env_file, interpolate=False)

    def test_simple_interpolation(self):
        self.assertEqual(self.config.get_service('foobar-prod')['cluster'], '${terraform.cluster_name}')

    def test_nested_dict_interpolation(self):
        self.assertEqual(
            self.config.get_service('foobar-prod')['load_balancer']['load_balancer_name'],
            '${terraform.elb_id}'
        )

    def test_nested_list_interpolation(self):
        self.assertEqual(
            self.config.get_service('foobar-prod')['containers'][0]['environment'][2],
            'SECRETS_BUCKET_NAME=${terraform.secrets_bucket_name}'
        )

    def test_environment_simple_interpolation(self):
        self.assertEqual(self.config.get_service('foobar-prod')['config'][0], 'FOOBAR=${env.FOOBAR_ENV}')
        self.assertEqual(
            self.config.get_service('foobar-prod')['config'][2],
            'FOO_BAR_PREFIX=${env.FOO_BAR_PREFIX_ENV}/test'
        )
Beispiel #3
0
class TestContainerDefinition_load_yaml(unittest.TestCase):
    def setUp(self):
        current_dir = os.path.dirname(os.path.abspath(__file__))
        state_file = os.path.join(current_dir, 'terraform.tfstate')
        config_yml = os.path.join(current_dir, 'interpolate.yml')
        env_file = os.path.join(current_dir, 'env_file.env')
        with open(state_file) as f:
            tfstate = json.loads(f.read())
        with Replacer() as r:
            get_mock = r(
                'deployfish.terraform.Terraform._get_state_file_from_s3',
                Mock())
            get_mock.return_value = tfstate
            self.config = Config(filename=config_yml, env_file=env_file)

    def tearDown(self):
        pass

    def test_terraform_simple_interpolation(self):
        self.assertEqual(
            self.config.get_service('foobar-prod')['cluster'],
            'foobar-cluster-prod')

    def test_terraform_nested_dict_interpolation(self):
        self.assertEqual(
            self.config.get_service('foobar-prod')['load_balancer']
            ['load_balancer_name'], 'foobar-elb-prod')

    def test_terraform_nested_list_interpolation(self):
        self.assertEqual(
            self.config.get_service('foobar-prod')['containers'][0]
            ['environment'][2], 'SECRETS_BUCKET_NAME=my-config-store')

    def test_terraform_list_output_interpolation(self):
        self.assertListEqual(
            self.config.get_service('foobar-prod')['vpc_configuration']
            ['security_groups'], ['sg-1234567', 'sg-2345678', 'sg-3456789'])

    def test_terraform_map_output_interpolation(self):
        self.assertListEqual(
            self.config.get_service('output-test')['vpc_configuration']
            ['subnets'], ['subnet-1234567'])
        self.assertListEqual(
            self.config.get_service('output-test')['vpc_configuration']
            ['security_groups'], ['sg-1234567'])
        self.assertEqual(
            self.config.get_service('output-test')['vpc_configuration']
            ['public_ip'], 'DISABLED')

    def test_environment_simple_interpolation(self):
        self.assertEqual(
            self.config.get_service('foobar-prod')['config'][0],
            'FOOBAR=hi_mom')
        self.assertEqual(
            self.config.get_service('foobar-prod')['config'][2],
            'FOO_BAR_PREFIX=oh_no/test')
class TestService_no_ec2_secrets_in_task_definition_if_no_execution_role(unittest.TestCase):

    def setUp(self):
        current_dir = os.path.dirname(os.path.abspath(__file__))
        filename = os.path.join(current_dir, 'simple.yml')
        self.config = Config(filename=filename, interpolate=False)
        del self.config.raw['services'][4]['execution_role']
        client_mock = Mock()
        client_mock.get_parameters.return_value = {'Parameters': []}
        client_mock.describe_parameters.return_value = {'Parameters': []}
        session_mock = Mock(client=Mock(return_value=client_mock))
        with Replacer() as r:
            r.replace('deployfish.aws.ecs.Service.from_aws', Mock())
            r.replace('deployfish.aws.ecs.TaskDefinition.create', Mock())
            r.replace('deployfish.aws.boto3_session', session_mock)
            self.service = Service('foobar-secrets-ec2', config=self.config)
            self.service.create()

    def test_sanity_check_name(self):
        self.assertEqual(self.service.serviceName, 'foobar-secrets-ec2')

    def test_sanity_check_config(self):
        self.assertEqual(len(self.config.get_service('foobar-secrets-ec2')['config']), 3)

    def test_config_with_execution_role(self):
        self.assertEqual(len(self.service.desired_task_definition.containers[0].secrets), 0)
Beispiel #5
0
def tunnel(ctx, tunnel_name):
    """
    Tunnel through an EC2 instance in the ECS cluster.

    The parameters for this command should be found in a tunnels: top-level section in the yaml file, in the format:

    \b
    tunnels:
      - name: my_tunnel
        service: my_service
        host: config.MY_TUNNEL_DESTINATION_HOST
        port: 3306
        local_port: 8888

    where config.MY_TUNNEL_DESTINATION_HOST is the value of MY_TUNNEL_DESTINATION_HOST
    for this service in the AWS Parameter Store. The host value could also just
    be a hostname.

    """
    config = Config(filename=ctx.obj['CONFIG_FILE'],
                    env_file=ctx.obj['ENV_FILE'],
                    import_env=ctx.obj['IMPORT_ENV'],
                    tfe_token=ctx.obj['TFE_TOKEN'])
    yml = config.get_category_item('tunnels', tunnel_name)
    service_name = yml['service']

    service = Service(yml=config.get_service(service_name))
    host = _interpolate_tunnel_info(yml['host'], service)
    port = int(_interpolate_tunnel_info(yml['port'], service))
    local_port = int(_interpolate_tunnel_info(yml['local_port'], service))

    interim_port = random.randrange(10000, 64000, 1)

    service.tunnel(host, local_port, interim_port, port)
Beispiel #6
0
def entrypoint(ctx, command, dry_run):
    """
    Use this as the entrypoint for your containers.

    It will look in the shell environment for the environment variables
    DEPLOYFISH_SERVICE_NAME and DEPLOYFISH_CLUSTER_NAME.  If found, it will
    use them to:

    \b
    * download the parameters listed in "config:" section for service
      DEPLOYFISH_SERVICE_NAME from the AWS System Manager Parameter Store (which
      are prefixed by "${DEPLOYFISH_CLUSTER_NAME}.${DEPLOYFISH_SERVICE_NAME}.")
    * set those parameters and their values as environment variables
    * run COMMAND

    If either DEPLOYFISH_SERVICE_NAME or DEPLOYFISH_CLUSTER_NAME are not in
    the environment, just run COMMMAND.
    """
    service_name = os.environ.get('DEPLOYFISH_SERVICE_NAME', None)
    cluster_name = os.environ.get('DEPLOYFISH_CLUSTER_NAME', None)
    if service_name and cluster_name:
        config = Config(filename=ctx.obj['CONFIG_FILE'], interpolate=False)
        try:
            service_yml = config.get_service(service_name)
        except KeyError:
            click.echo(
                "Our container's deployfish config file '{}' does not have service '{}'"
                .format(ctx.obj['CONFIG_FILE'] or 'deployfish.yml',
                        service_name))
            sys.exit(1)
        parameter_store = []
        if 'config' in service_yml:
            parameter_store = ParameterStore(service_name,
                                             cluster_name,
                                             yml=service_yml['config'])
            parameter_store.populate()
        if not dry_run:
            for param in parameter_store:
                if param.exists:
                    if param.should_exist:
                        os.environ[param.key] = param.aws_value
                    else:
                        print(
                            "event='deploy.entrypoint.parameter.ignored.not_in_deployfish_yml' service='{}' parameter='{}'"
                            .format(service_name, param.name))
                else:
                    print(
                        "event='deploy.entrypoint.parameter.ignored.not_in_aws' service='{}' parameter='{}'"
                        .format(service_name, param.name))
        else:
            exists = []
            notexists = []
            for param in parameter_store:
                if param.exists:
                    exists.append(param)
                else:
                    notexists.append(param)
            click.secho("Would have set these environment variables:",
                        fg="cyan")
            for param in exists:
                click.echo('  {}={}'.format(param.key, param.aws_value))
            click.secho("\nThese parameters are not in AWS:", fg="red")
            for param in notexists:
                click.echo('  {}'.format(param.key))
    if dry_run:
        click.secho('\n\nCOMMAND: {}'.format(command))
    else:
        subprocess.call(command)
Beispiel #7
0
def entrypoint(ctx, command, dry_run):
    """
    Use this as the entrypoint for your containers.

    It will look in the shell environment for the environment variables
    DEPLOYFISH_SERVICE_NAME and DEPLOYFISH_CLUSTER_NAME.  If found, it will
    use them to:

    \b
    * download the parameters listed in "config:" section for service
      DEPLOYFISH_SERVICE_NAME from the AWS System Manager Parameter Store (which
      are prefixed by "${DEPLOYFISH_CLUSTER_NAME}.${DEPLOYFISH_SERVICE_NAME}.")
    * set those parameters and their values as environment variables
    * run COMMAND

    If either DEPLOYFISH_SERVICE_NAME or DEPLOYFISH_CLUSTER_NAME are not in
    the environment, just run COMMMAND.

    \b
    NOTE:

        "deploy entrypoint" IGNORES any "aws:" section in your config file.
        We're assuming that you're only ever running "deploy entrypoint" inside
        a container in your AWS service.  It should get its credentials
        from the container's IAM ECS Task Role.
    """
    service_name = os.environ.get('DEPLOYFISH_SERVICE_NAME', None)
    cluster_name = os.environ.get('DEPLOYFISH_CLUSTER_NAME', None)
    if service_name and cluster_name:
        # The only thing we need out of Config is the names of any config:
        # section variables we might have.  We don't need to do interpolation
        # in the config: section, because we retrieve the values from Parameter
        # Store, and we don't want to use any aws: section that might be in the
        # deployfish.yml to configure our boto3 session because we want to defer
        # to the IAM ECS Task Role.
        config = Config(filename=ctx.obj['CONFIG_FILE'],
                        interpolate=False,
                        use_aws_section=False)
        try:
            service_yml = config.get_service(service_name)
        except KeyError:
            click.echo(
                "Our container's deployfish config file '{}' does not have service '{}'"
                .format(ctx.obj['CONFIG_FILE'] or 'deployfish.yml',
                        service_name))
            sys.exit(1)
        parameter_store = []
        if 'config' in service_yml:
            parameter_store = ParameterStore(service_name,
                                             cluster_name,
                                             yml=service_yml['config'])
            parameter_store.populate()
        if not dry_run:
            for param in parameter_store:
                if param.exists:
                    if param.should_exist:
                        os.environ[param.key] = param.aws_value
                    else:
                        print(
                            "event='deploy.entrypoint.parameter.ignored.not_in_deployfish_yml' service='{}' parameter='{}'"
                            .format(service_name, param.name))
                else:
                    print(
                        "event='deploy.entrypoint.parameter.ignored.not_in_aws' service='{}' parameter='{}'"
                        .format(service_name, param.name))
        else:
            exists = []
            notexists = []
            for param in parameter_store:
                if param.exists:
                    exists.append(param)
                else:
                    notexists.append(param)
            click.secho("Would have set these environment variables:",
                        fg="cyan")
            for param in exists:
                click.echo('  {}={}'.format(param.key, param.aws_value))
            click.secho("\nThese parameters are not in AWS:", fg="red")
            for param in notexists:
                click.echo('  {}'.format(param.key))
    if dry_run:
        click.secho('\n\nCOMMAND: {}'.format(command))
    else:
        subprocess.call(command)