Пример #1
0
def deploy_gcs_audit_logs(config):
    """Deploys the GCS logs bucket to the remote audit logs project, if used."""
    # The GCS logs bucket must be created before the data buckets.
    if not config.audit_logs_project:
        logging.info('Using local GCS audit logs.')
        return
    logs_gcs_bucket = config.project['audit_logs'].get('logs_gcs_bucket')
    if not logs_gcs_bucket:
        logging.info('No remote GCS logs bucket required.')
        return

    logging.info('Creating remote GCS logs bucket.')
    data_project_id = config.project['project_id']
    logs_project = config.audit_logs_project
    audit_project_id = logs_project['project_id']

    deployment_name = 'audit-logs-{}-gcs'.format(
        data_project_id.replace('_', '-'))
    dm_template_dict = {
        'imports': [{
            'path': 'remote_audit_logs.py'
        }],
        'resources': [{
            'type': 'remote_audit_logs.py',
            'name': deployment_name,
            'properties': {
                'owners_group': logs_project['owners_group'],
                'auditors_group': config.project['auditors_group'],
                'logs_gcs_bucket': logs_gcs_bucket,
            },
        }]
    }
    utils.create_new_deployment(dm_template_dict, deployment_name,
                                audit_project_id)
Пример #2
0
def deploy_project_resources(config):
  """Deploys resources into the new data project."""
  logging.info('Deploying Project resources...')
  setup_account = utils.get_gcloud_user()
  has_organization = bool(config.overall.get('organization_id'))
  project_id = config.project['project_id']
  dm_service_account = utils.get_deployment_manager_service_account(project_id)

  # Build a deployment config for the data_project.py deployment manager
  # template.
  properties = copy.deepcopy(config.project)
  # Remove the current user as an owner of the project if project is part of an
  # organization.
  properties['has_organization'] = has_organization
  if has_organization:
    properties['remove_owner_user'] = setup_account

  # Change audit_logs to either local_audit_logs or remote_audit_logs in the
  # deployment manager template properties.
  audit_logs = properties.pop('audit_logs')
  if config.audit_logs_project:
    properties['remote_audit_logs'] = {
        'audit_logs_project_id': config.audit_logs_project['project_id'],
        'logs_bigquery_dataset_id': audit_logs['logs_bigquery_dataset']['name'],
    }
    # Logs GCS bucket is not required for projects without data GCS buckets.
    if 'logs_gcs_bucket' in audit_logs:
      properties['remote_audit_logs']['logs_gcs_bucket_name'] = (
          audit_logs['logs_gcs_bucket']['name'])
  else:
    properties['local_audit_logs'] = audit_logs
  path = os.path.join(os.path.dirname(__file__), 'templates/data_project.py')
  dm_template_dict = {
      'imports': [{'path': path}],
      'resources': [{
          'type': path,
          'name': 'data_project_deployment',
          'properties': properties,
      }]
  }

  # Create the deployment.
  utils.create_new_deployment(dm_template_dict, 'data-project-deployment',
                              project_id)

  # Create project liens if requested.
  if config.project.get('create_deletion_lien'):
    runner.run_gcloud_command([
        'alpha', 'resource-manager', 'liens', 'create',
        '--restrictions', 'resourcemanager.projects.delete',
        '--reason', 'Automated project deletion lien deployment.'
    ], project_id=project_id)

  # Remove Owners role from the DM service account.
  runner.run_gcloud_command(['projects', 'remove-iam-policy-binding',
                             project_id,
                             '--member', dm_service_account,
                             '--role', 'roles/owner'],
                            project_id=None)
Пример #3
0
def create_compute_vms(config):
    """Creates new GCE VMs and firewall rules if specified in config."""
    if 'gce_instances' not in config.project:
        logging.info('No GCS VMs required.')
        return
    project_id = config.project['project_id']
    logging.info('Creating GCS VMs.')

    # Enable OS Login for VM SSH access.
    runner.run_gcloud_command([
        'compute', 'project-info', 'add-metadata', '--metadata',
        'enable-oslogin=TRUE'
    ],
                              project_id=project_id)

    gce_instances = []
    for instance in config.project['gce_instances']:
        if 'existing_boot_image' in instance:
            image_name = instance['existing_boot_image']
        else:
            image_name = ('global/images/' +
                          instance['custom_boot_image']['image_name'])

        gce_template_dict = {
            'name': instance['name'],
            'zone': instance['zone'],
            'machine_type': instance['machine_type'],
            'boot_image_name': image_name,
            'start_vm': instance['start_vm']
        }
        startup_script_str = instance.get('startup_script')
        if startup_script_str:
            gce_template_dict['metadata'] = {
                'items': [{
                    'key': 'startup-script',
                    'value': startup_script_str
                }]
            }
        gce_instances.append(gce_template_dict)

    deployment_name = 'gce-vms'
    path = os.path.join(os.path.dirname(__file__), 'templates/gce_vms.py')
    dm_template_dict = {
        'imports': [{
            'path': path
        }],
        'resources': [{
            'type': path,
            'name': deployment_name,
            'properties': {
                'gce_instances': gce_instances,
                'firewall_rules': config.project.get('gce_firewall_rules', []),
            }
        }]
    }
    utils.create_new_deployment(dm_template_dict, deployment_name, project_id)
Пример #4
0
def deploy_bigquery_audit_logs(config):
    """Deploys the BigQuery audit logs dataset, if used."""
    data_project_id = config.project['project_id']
    logs_dataset = copy.deepcopy(
        config.project['audit_logs']['logs_bigquery_dataset'])
    if config.audit_logs_project:
        logging.info('Creating remote BigQuery logs dataset.')
        audit_project_id = config.audit_logs_project['project_id']
        owners_group = config.audit_logs_project['owners_group']
    else:
        logging.info('Creating local BigQuery logs dataset.')
        audit_project_id = data_project_id
        logs_dataset['name'] = 'audit_logs'
        owners_group = config.project['owners_group']

    # Get the service account for the newly-created log sink.
    logs_dataset[
        'log_sink_service_account'] = utils.get_log_sink_service_account(
            _LOG_SINK_NAME, data_project_id)

    deployment_name = 'audit-logs-{}-bq'.format(
        data_project_id.replace('_', '-'))
    path = os.path.join(os.path.dirname(__file__),
                        'templates/remote_audit_logs.py')
    dm_template_dict = {
        'imports': [{
            'path': path
        }],
        'resources': [{
            'type': path,
            'name': deployment_name,
            'properties': {
                'owners_group': owners_group,
                'auditors_group': config.project['auditors_group'],
                'logs_bigquery_dataset': logs_dataset,
            },
        }]
    }
    utils.create_new_deployment(dm_template_dict, deployment_name,
                                audit_project_id)