def create_compute_images(config): """Creates new Compute Engine VM images if specified in config.""" gce_instances = config.project.get('gce_instances') if not gce_instances: logging.info('No GCS Images required.') return project_id = config.project['project_id'] for instance in gce_instances: custom_image = instance.get('custom_boot_image') if not custom_image: logging.info('Using existing compute image %s.', instance['existing_boot_image']) continue # Check if custom image already exists. if runner.run_gcloud_command([ 'compute', 'images', 'list', '--no-standard-images', '--filter', 'name={}'.format( custom_image['image_name']), '--format', 'value(name)' ], project_id=project_id): logging.info('Image %s already exists, skipping image creation.', custom_image['image_name']) continue logging.info('Creating VM Image %s.', custom_image['image_name']) # Create VM image using gcloud rather than deployment manager so that the # deployment manager service account doesn't need to be granted access to # the image GCS bucket. image_uri = 'gs://' + custom_image['gcs_path'] runner.run_gcloud_command([ 'compute', 'images', 'create', custom_image['image_name'], '--source-uri', image_uri ], project_id=project_id)
def create_new_deployment(deployment_template, deployment_name, project_id): """Creates a new Deployment Manager deployment from a template. Args: deployment_template (dict): The dictionary representation of a deployment manager YAML template. deployment_name (string): The name for the deployment. project_id (string): The project under which to create the deployment. """ # Save the deployment manager template to a temporary file in the same # directory as the deployment manager templates. dm_template_file = tempfile.NamedTemporaryFile(suffix='.yaml') write_yaml_file(deployment_template, dm_template_file.name) # Create the deployment. runner.run_gcloud_command( [ 'deployment-manager', 'deployments', 'create', deployment_name, '--config', dm_template_file.name, '--automatic-rollback-on-error' ], project_id=project_id, ) # Check deployment exists (and wasn't automcatically rolled back runner.run_gcloud_command( ['deployment-manager', 'deployments', 'describe', deployment_name], project_id=project_id)
def enable_deployment_manager(config): """Enables Deployment manager, with role/owners for its service account.""" logging.info('Setting up Deployment Manager...') project_id = config.project['project_id'] # Enabled Deployment Manger and Cloud Resource Manager for this project. runner.run_gcloud_command([ 'services', 'enable', 'deploymentmanager', 'cloudresourcemanager.googleapis.com' ], project_id=project_id) # Grant deployment manager service account (temporary) owners access. dm_service_account = utils.get_deployment_manager_service_account( project_id) for role in _DEPLOYMENT_MANAGER_ROLES: runner.run_gcloud_command([ 'projects', 'add-iam-policy-binding', project_id, '--member', dm_service_account, '--role', role ], project_id=None) logging.info('Sleeping for %d seconds to let IAM updates propagate.', _IAM_PROPAGATAION_WAIT_TIME_SECS) time.sleep(_IAM_PROPAGATAION_WAIT_TIME_SECS)
def enable_services_apis(config): """Enables services for this project. Use this function instead of enabling private APIs in deployment manager because deployment-management does not have all the APIs' access, which might triger PERMISSION_DENIED errors. Args: config (ProjectConfig): The config of a single project to setup. Returns: List[string]: commands to remove APIs not found in the enabled set. """ project_id = config.project['project_id'] want_apis = set(config.project.get('enabled_apis', [])) want_apis.add('deploymentmanager.googleapis.com') # For project level iam policy updates. want_apis.add('cloudresourcemanager.googleapis.com') # TODO long term solution for updating APIs. resources = config.project.get('resources', {}) if 'iam_custom_roles' in resources: want_apis.add('iam.googleapis.com') if 'chc_datasets' in resources: want_apis.add('healthcare.googleapis.com') if 'gke_clusters' in resources: want_apis.add('container.googleapis.com') want_apis = list(want_apis) # Send in batches to avoid hitting quota limits. for i in range(0, len(want_apis), 10): runner.run_gcloud_command(['services', 'enable'] + want_apis[i:i + 10], project_id=project_id)
def deploy_project_resources(config): """Deploys resources into the new data project.""" logging.info('Deploying Project resources...') setup_account = utils.get_gcloud_user() has_organization = bool(config.overall.get('organization_id')) project_id = config.project['project_id'] dm_service_account = utils.get_deployment_manager_service_account(project_id) # Build a deployment config for the data_project.py deployment manager # template. properties = copy.deepcopy(config.project) # Remove the current user as an owner of the project if project is part of an # organization. properties['has_organization'] = has_organization if has_organization: properties['remove_owner_user'] = setup_account # Change audit_logs to either local_audit_logs or remote_audit_logs in the # deployment manager template properties. audit_logs = properties.pop('audit_logs') if config.audit_logs_project: properties['remote_audit_logs'] = { 'audit_logs_project_id': config.audit_logs_project['project_id'], 'logs_bigquery_dataset_id': audit_logs['logs_bigquery_dataset']['name'], } # Logs GCS bucket is not required for projects without data GCS buckets. if 'logs_gcs_bucket' in audit_logs: properties['remote_audit_logs']['logs_gcs_bucket_name'] = ( audit_logs['logs_gcs_bucket']['name']) else: properties['local_audit_logs'] = audit_logs path = os.path.join(os.path.dirname(__file__), 'templates/data_project.py') dm_template_dict = { 'imports': [{'path': path}], 'resources': [{ 'type': path, 'name': 'data_project_deployment', 'properties': properties, }] } # Create the deployment. utils.create_new_deployment(dm_template_dict, 'data-project-deployment', project_id) # Create project liens if requested. if config.project.get('create_deletion_lien'): runner.run_gcloud_command([ 'alpha', 'resource-manager', 'liens', 'create', '--restrictions', 'resourcemanager.projects.delete', '--reason', 'Automated project deletion lien deployment.' ], project_id=project_id) # Remove Owners role from the DM service account. runner.run_gcloud_command(['projects', 'remove-iam-policy-binding', project_id, '--member', dm_service_account, '--role', 'roles/owner'], project_id=None)
def create_compute_vms(config): """Creates new GCE VMs and firewall rules if specified in config.""" if 'gce_instances' not in config.project: logging.info('No GCS VMs required.') return project_id = config.project['project_id'] logging.info('Creating GCS VMs.') # Enable OS Login for VM SSH access. runner.run_gcloud_command([ 'compute', 'project-info', 'add-metadata', '--metadata', 'enable-oslogin=TRUE' ], project_id=project_id) gce_instances = [] for instance in config.project['gce_instances']: if 'existing_boot_image' in instance: image_name = instance['existing_boot_image'] else: image_name = ('global/images/' + instance['custom_boot_image']['image_name']) gce_template_dict = { 'name': instance['name'], 'zone': instance['zone'], 'machine_type': instance['machine_type'], 'boot_image_name': image_name, 'start_vm': instance['start_vm'] } startup_script_str = instance.get('startup_script') if startup_script_str: gce_template_dict['metadata'] = { 'items': [{ 'key': 'startup-script', 'value': startup_script_str }] } gce_instances.append(gce_template_dict) deployment_name = 'gce-vms' path = os.path.join(os.path.dirname(__file__), 'templates/gce_vms.py') dm_template_dict = { 'imports': [{ 'path': path }], 'resources': [{ 'type': path, 'name': deployment_name, 'properties': { 'gce_instances': gce_instances, 'firewall_rules': config.project.get('gce_firewall_rules', []), } }] } utils.run_deployment(dm_template_dict, deployment_name, project_id, is_deployed(config.project))
def setup_billing(config): """Sets the billing account for this project.""" logging.info('Setting up billing...') billing_acct = config.overall['billing_account'] project_id = config.project['project_id'] # Set the appropriate billing account for this project: runner.run_gcloud_command(['beta', 'billing', 'projects', 'link', project_id, '--billing-account', billing_acct], project_id=None)
def _add_binding(project_id, forseti_service_account, role): """Add an IAM Policy for the Forseti service account for the given role.""" cmd = [ 'projects', 'add-iam-policy-binding', project_id, '--member', 'serviceAccount:{}'.format(forseti_service_account), '--role', role, ] runner.run_gcloud_command(cmd, project_id=None)
def setup_billing(config): """Sets the billing account for this project.""" billing_acct = config.root['overall']['billing_account'] project_id = config.project['project_id'] # Set the appropriate billing account for this project: runner.run_gcloud_command([ 'beta', 'billing', 'projects', 'link', project_id, '--billing-account', billing_acct ], project_id=None)
def _stackdriver_account_exists(project_id): """Determine whether the stackdriver account exists.""" try: runner.run_gcloud_command(['alpha', 'monitoring', 'policies', 'list'], project_id=project_id) return True except subprocess.CalledProcessError as e: logging.warning( 'Error reading Stackdriver account (likely does not exist): %s', e) return False
def _create_custom_role(custom_role, project_id): """Create a custom IAM role in the project.""" cmd = [ 'iam', 'roles', 'create', custom_role.name, '--project', project_id, '--title', custom_role.title, '--description', custom_role.description, '--stage', 'ALPHA', '--permissions', ','.join(custom_role.permissions), ] runner.run_gcloud_command(cmd, project_id=None)
def create_notification_channel(alert_email, project_id): """Creates a new Stackdriver email notification channel. Args: alert_email (string): The email address to send alerts to. project_id (string): The project under which to create the channel. Returns: A string, the name of the notification channel Raises: GcloudRuntimeError: when the channel cannot be created. """ # Create a config file for the new Email notification channel. config_file = tempfile.NamedTemporaryFile(suffix='.yaml') channel_config = { 'type': 'email', 'displayName': 'Email', 'labels': { 'email_address': alert_email } } write_yaml_file(channel_config, config_file.name) # Create the new channel and get its name. channel_name = runner.run_gcloud_command([ 'alpha', 'monitoring', 'channels', 'create', '--channel-content-from-file', config_file.name, '--format', 'value(name)' ], project_id=project_id).strip() return channel_name
def get_server_service_account(forseti_project_id): """Get the service account for the Forseti server instance. Assumes there is only one Forseti instance installed in the project. Args: forseti_project_id (str): id of the Forseti project. Returns: str: the forseti server service account. Raises: ValueError: if gcloud returns an unexpected number of service accounts. """ output = runner.run_gcloud_command([ 'iam', 'service-accounts', 'list', '--format', 'value(email)', '--filter', _FORSETI_SERVER_SERVICE_ACCOUNT_FILTER, ], project_id=forseti_project_id) service_accounts = output.strip().split('\n') if len(service_accounts) != 1: raise ValueError( ('Unexpected number of Forseti server service accounts: ' 'got {}, want 1, {}'.format(len(service_accounts), output))) return service_accounts[0]
def enable_services_apis(config): """Enables services for this project. Use this function instead of enabling private APIs in deployment manager because deployment-management does not have all the APIs' access, which might triger PERMISSION_DENIED errors. Args: config (ProjectConfig): The config of a single project to setup. """ logging.info('Enabling APIs...') project_id = config.project['project_id'] apis = config.project.get('enabled_apis', []) for i in range(0, len(apis), 10): runner.run_gcloud_command(['services', 'enable'] + apis[i:i + 10], project_id=project_id)
def create_new_project(config): """Creates the new GCP project.""" logging.info('Creating a new GCP project...') project_id = config.project['project_id'] org_id = config.overall.get('organization_id') folder_id = config.overall.get('folder_id') create_project_command = ['projects', 'create', project_id] if folder_id: create_project_command.extend(['--folder', folder_id]) elif org_id: create_project_command.extend(['--organization', org_id]) else: logging.info('Deploying without a parent organization or folder.') # Create the new project. runner.run_gcloud_command(create_project_command, project_id=None)
def enable_deployment_manager(config): """Enables Deployment manager, with role/owners for its service account.""" logging.info('Setting up Deployment Manager...') project_id = config.project['project_id'] # Enabled Deployment Manger and Cloud Resource Manager for this project. runner.run_gcloud_command(['services', 'enable', 'deploymentmanager', 'cloudresourcemanager.googleapis.com'], project_id=project_id) # Grant deployment manager service account (temporary) owners access. dm_service_account = utils.get_deployment_manager_service_account(project_id) runner.run_gcloud_command(['projects', 'add-iam-policy-binding', project_id, '--member', dm_service_account, '--role', 'roles/owner'], project_id=None)
def get_log_sink_service_account(log_sink_name, project_id): """Gets the service account name for the given log sink.""" sink_service_account = runner.run_gcloud_command([ 'logging', 'sinks', 'describe', log_sink_name, '--format', 'value(writerIdentity)'], project_id).strip() # The name returned has a 'serviceAccount:' prefix, so remove this. return sink_service_account.split(':')[1]
def create_stackdriver_account(config): """Prompts the user to create a new Stackdriver Account.""" # Creating a Stackdriver account cannot be done automatically, so ask the # user to create one. if 'stackdriver_alert_email' not in config.project: logging.warning( 'No Stackdriver alert email specified, skipping creation ' 'of Stackdriver account.') return logging.info('Creating Stackdriver account.') project_id = config.project['project_id'] message = """ ------------------------------------------------------------------------------ To create email alerts, this project needs a Stackdriver account. Create a new Stackdriver account for this project by visiting: https://console.cloud.google.com/monitoring?project={} Only add this project, and skip steps for adding additional GCP or AWS projects. You don't need to install Stackdriver Agents. IMPORTANT: Wait about 5 minutes for the account to be created. For more information, see: https://cloud.google.com/monitoring/accounts/ After the account is created, enter [Y] to continue, or enter [N] to skip the creation of Stackdriver alerts. ------------------------------------------------------------------------------ """.format(project_id) print(message) # Keep trying until Stackdriver account is ready, or user skips. while True: if not utils.wait_for_yes_no('Account created [y/N]?'): logging.warning('Skipping creation of Stackdriver Account.') return # Verify account was created. try: runner.run_gcloud_command( ['alpha', 'monitoring', 'policies', 'list'], project_id=project_id) return except subprocess.CalledProcessError as e: logging.error('Error reading Stackdriver account %s', e) print('Could not find Stackdriver account.')
def run_deployment(deployment_template, deployment_name, project_id, is_update): """Creates a new Deployment Manager deployment from a template. Args: deployment_template (dict): The dictionary representation of a deployment manager YAML template. deployment_name (string): The name for the deployment. project_id (string): The project under which to create the deployment. is_update (bool): Whether this deployment is an update. """ # Save the deployment manager template to a temporary file in the same # directory as the deployment manager templates. dm_template_file = tempfile.NamedTemporaryFile(suffix='.yaml') write_yaml_file(deployment_template, dm_template_file.name) if is_update: gcloud_cmd = [ 'deployment-manager', 'deployments', 'update', deployment_name, '--config', dm_template_file.name, '--delete-policy', 'ABANDON', ] else: gcloud_cmd = [ 'deployment-manager', 'deployments', 'create', deployment_name, '--config', dm_template_file.name, '--automatic-rollback-on-error', ] # Create the deployment. runner.run_gcloud_command(gcloud_cmd, project_id=project_id) # Check deployment exists (and wasn't automcatically rolled back) runner.run_gcloud_command( ['deployment-manager', 'deployments', 'describe', deployment_name], project_id=project_id)
def enable_services_apis(config): """Enables services for this project. Use this function instead of enabling private APIs in deployment manager because deployment-management does not have all the APIs' access, which might triger PERMISSION_DENIED errors. Args: config (ProjectConfig): The config of a single project to setup. Returns: List[string]: commands to remove APIs not found in the enabled set. """ project_id = config.project['project_id'] existing_services = runner.run_gcloud_command( ['services', 'list', '--format', 'value(name)'], project_id=project_id).split('\n') # The gcloud call returns service name as a full path (including project id). # We only need the base name which is the URL. existing_apis = set([os.path.basename(svc) for svc in existing_services]) want_apis = set(config.project.get('enabled_apis', [])) want_apis.add('deploymentmanager.googleapis.com') # For project level iam policy updates. want_apis.add('cloudresourcemanager.googleapis.com') resources = config.project.get('resources', {}) if 'iam_custom_roles' in resources: want_apis.add('iam.googleapis.com') unexpected_apis = existing_apis.difference(want_apis) unenabled_apis = list(want_apis.difference(existing_apis)) # Send in batches to avoid hitting quota limits. for i in range(0, len(unenabled_apis), 10): runner.run_gcloud_command( ['services', 'enable'] + unenabled_apis[i:i + 10], project_id=project_id) cleanup_commands = [ 'gcloud services disable {} --project={}'.format(api, project_id) for api in unexpected_apis ] return Output(cleanup_commands=cleanup_commands)
def create_new_project(config): """Creates the new GCP project.""" project_id = config.project['project_id'] overall_config = config.root['overall'] org_id = overall_config.get('organization_id') folder_id = config.project.get('folder_id', overall_config.get('folder_id')) create_project_command = ['projects', 'create', project_id] if folder_id: create_project_command.extend(['--folder', folder_id]) elif org_id: create_project_command.extend(['--organization', org_id]) else: logging.info('Deploying without a parent organization or folder.') # Create the new project. runner.run_gcloud_command(create_project_command, project_id=None) generated_fields = field_generation.get_generated_fields_ref( project_id, config.root) generated_fields['project_number'] = utils.get_project_number(project_id)
def get_gce_instance_info(project_id): """Gets a list of GCE instance info for each instance.""" output = runner.run_gcloud_command( ['compute', 'instances', 'list', '--format', 'value(name,id)'], project_id=project_id) instance_info = [] for line in output.split('\n'): name, instance_id = line.split() instance_info.append({'name': name, 'id': instance_id}) return instance_info
def grant_deployment_manager_access(config): """Grants Deployment manager service account administration roles.""" if FLAGS.enable_new_style_resources: logging.info('DM service account will be granted access through CFT.') return project_id = config.project['project_id'] # Grant deployment manager service account (temporary) owners access. dm_service_account = utils.get_deployment_manager_service_account(project_id) for role in _DEPLOYMENT_MANAGER_ROLES: runner.run_gcloud_command([ 'projects', 'add-iam-policy-binding', project_id, '--member', dm_service_account, '--role', role ], project_id=None) logging.info('Sleeping for %d seconds to let IAM updates propagate.', _IAM_PROPAGATAION_WAIT_TIME_SECS) runner.run(time.sleep, _IAM_PROPAGATAION_WAIT_TIME_SECS)
def create_deletion_lien(config): """Create the project deletion lien, if specified.""" # Create project liens if requested. if 'create_deletion_lien' not in config.project: return project_id = config.project['project_id'] existing_restrictions = runner.run_gcloud_command( [ 'alpha', 'resource-manager', 'liens', 'list', '--format', 'value(restrictions)' ], project_id=project_id).split('\n') if _LIEN_RESTRICTION not in existing_restrictions: runner.run_gcloud_command([ 'alpha', 'resource-manager', 'liens', 'create', '--restrictions', _LIEN_RESTRICTION, '--reason', 'Automated project deletion lien deployment.' ], project_id=project_id)
def get_iam_policy_cleanup(config): """Get cleanup commands for unexpected IAM bindings.""" project_id = config.project['project_id'] policy_str = runner.run_gcloud_command( ['projects', 'get-iam-policy', project_id], project_id=project_id) policy = yaml.YAML().load(policy_str) existing_role_to_members = _get_role_to_members(policy['bindings']) # TODO: avoid duplication with data_project.py and rule generator # project config once we switch to CFT owners_group_role = ('roles/owner' if 'organization_id' in config.root['overall'] else 'roles/resourcemanager.projectIamAdmin') initial_bindings = [ { 'role': owners_group_role, 'members': ['group:{}'.format(config.project['owners_group'])] }, { 'role': 'roles/iam.securityReviewer', 'members': ['group:{}'.format(config.project['auditors_group'])] }, ] if 'editors_group' in config.project: initial_bindings.append({ 'role': 'roles/editor', 'members': [ 'group:{}'.format(group) for group in config.project['editors_group'] ] }) want_role_to_members = _get_role_to_members( initial_bindings, config.project.get('additional_project_permissions', [])) for role, members in existing_role_to_members.items(): existing_role_to_members[role].difference_update( want_role_to_members[role]) cleanup_commands = [] for role, members in existing_role_to_members.items(): for member in members: cleanup_commands.append( 'gcloud projects remove-iam-policy-binding {project_id} ' '--member={member} --role={role} --project={project_id}'. format(project_id=project_id, member=member, role=role)) return Output(cleanup_commands=cleanup_commands)
def get_or_create_new_project(config): """Creates the new GCP project if it does not exist.""" project_id = config.project['project_id'] # Attempt to get the project number first and fall back to creating the # project. # Note that it is possible that the `gcloud projects describe` command fails # due to reasons other than project does not exist (e.g. caller does not have # sufficient permission). In that case, project could exist and the code will # still attempt to create the project and fail if the project already exists. # # In the case where project exists, the organization_id / billing_account / # folder_id could be different from those specified in the config. # TODO: add check to enforce the metadata set in the config on the # existing project. try: config.generated_fields['projects'][project_id]['project_number'] = ( utils.get_project_number(project_id)) logging.info('Project %s exists, skip creating project.', project_id) except subprocess.CalledProcessError: overall_config = config.root['overall'] org_id = overall_config.get('organization_id') folder_id = config.project.get('folder_id', overall_config.get('folder_id')) create_project_command = ['projects', 'create', project_id] if folder_id: create_project_command.extend(['--folder', folder_id]) elif org_id: create_project_command.extend(['--organization', org_id]) else: logging.info('Deploying without a parent organization or folder.') # Create the new project. runner.run_gcloud_command(create_project_command, project_id=None) config.generated_fields['projects'][project_id]['project_number'] = ( utils.get_project_number(project_id))
def get_gce_instance_info(project_id): """Gets a list of GCE instance info for each instance.""" output = runner.run_gcloud_command( ['compute', 'instances', 'list', '--format', 'value(name,id)'], project_id=project_id) if not output: return [] if FLAGS.dry_run: return [{'name': '__DRY_RUN_NAME__', 'id': '__DRY_RUN_ID__'}] instance_info = [] for line in output.split('\n'): name, instance_id = line.split() instance_info.append({'name': name, 'id': instance_id}) return instance_info
def deployment_exists(deployment_name, project_id): """Determine whether the deployment exists. Args: deployment_name (string): name of deployment. project_id: ID of project. Returns: bool: True if deployment exists in the projet. """ out = runner.run_gcloud_command( ['deployment-manager', 'deployments', 'list', '--format', 'json'], project_id=project_id) for info in json.loads(out): if deployment_name == info['name']: return True return False
def _is_service_enabled(service_name, project_id): """Check if the service_name is already enabled.""" enabled_services = runner.run_gcloud_command( ['services', 'list', '--format', 'value(NAME)'], project_id=project_id) services_list = enabled_services.strip().split('\n') return service_name in services_list
def get_project_number(project_id): """Returns the project number the given project.""" return runner.run_gcloud_command([ 'projects', 'describe', project_id, '--format', 'value(projectNumber)' ], project_id=None).strip()