Exemplo n.º 1
0
def DeployGcsAuditLogs(config):
    """Deploys the GCS logs bucket to the remote audit logs project, if used."""
    # The GCS logs bucket must be created before the data buckets.
    remote_logs = config.get('remote_audit_logs')
    if remote_logs is None:
        logging.info('Using local GCS audit logs.')
        return
    if 'logs_gcs_bucket' not in remote_logs:
        logging.info('No remote GCS logs bucket required.')
        return

    logging.info('Creating remote GCS logs bucket.')
    data_project_id = config['project_config']['project_id']
    audit_project_id = remote_logs['audit_logs_project_id']

    deployment_name = 'audit-logs-{}-gcs'.format(
        data_project_id.replace('_', '-'))
    dm_template_dict = {
        'imports': [{
            'path': 'remote_audit_logs.py'
        }],
        'resources': [{
            'type': 'remote_audit_logs.py',
            'name': deployment_name,
            'properties': {
                'owners_group': remote_logs['audit_logs_owners_group'],
                'auditors_group': config['project_config']['auditors_group'],
                'logs_gcs_bucket': remote_logs['logs_gcs_bucket'],
            },
        }]
    }
    utils.CreateNewDeployment(dm_template_dict, deployment_name,
                              audit_project_id)
Exemplo n.º 2
0
def DeployProjectResources(config):
    """Deploys resources into the new data project."""
    logging.info('Deploying Project resources...')
    setup_account = utils.GetGcloudUser()
    has_organization = bool(config.overall.get('organization_id'))
    project_id = config.project['project_id']
    dm_service_account = utils.GetDeploymentManagerServiceAccount(project_id)

    # Build a deployment config for the data_project.py deployment manager
    # template.
    # Shallow copy is sufficient for this script.
    properties = config.project.copy()
    # Remove the current user as an owner of the project if project is part of an
    # organization.
    properties['has_organization'] = has_organization
    if has_organization:
        properties['remove_owner_user'] = setup_account

    # Change audit_logs to either local_audit_logs or remote_audit_logs in the
    # deployment manager template properties.
    audit_logs = properties.pop('audit_logs')
    if config.audit_logs_project:
        properties['remote_audit_logs'] = {
            'audit_logs_project_id':
            config.audit_logs_project['project_id'],
            'logs_bigquery_dataset_id':
            audit_logs['logs_bigquery_dataset']['name'],
        }
        # Logs GCS bucket is not required for projects without data GCS buckets.
        if 'logs_gcs_bucket' in audit_logs:
            properties['remote_audit_logs']['logs_gcs_bucket_name'] = (
                audit_logs['logs_gcs_bucket']['name'])
    else:
        properties['local_audit_logs'] = audit_logs
    dm_template_dict = {
        'imports': [{
            'path': 'data_project.py'
        }],
        'resources': [{
            'type': 'data_project.py',
            'name': 'data_project_deployment',
            'properties': properties,
        }]
    }

    # Create the deployment.
    utils.CreateNewDeployment(dm_template_dict, 'data-project-deployment',
                              project_id)

    # Remove Owners role from the DM service account.
    utils.RunGcloudCommand([
        'projects', 'remove-iam-policy-binding', project_id, '--member',
        dm_service_account, '--role', 'roles/owner'
    ],
                           project_id=None)
Exemplo n.º 3
0
def DeployProjectResources(config):
    """Deploys resources into the new data project."""
    logging.info('Deploying Data Project resources...')
    setup_account = utils.GetGcloudUser()
    has_organization = bool(config.get('organization_id'))
    project_id = config['project_config']['project_id']
    dm_service_account = utils.GetDeploymentManagerServiceAccount(project_id)

    # Build a deployment config for the data_project.py deployment manager
    # template.
    # Shallow copy is sufficient for this script.
    properties = config['project_config'].copy()
    # Remove the current user as an owner of the project if project is part of an
    # organization.
    properties['has_organization'] = has_organization
    if has_organization:
        properties['remove_owner_user'] = setup_account
    # If using remote_audit_logs, set properties for the data project.
    remote_audit_logs = config.get('remote_audit_logs')
    if remote_audit_logs:
        properties['remote_audit_logs'] = {
            'audit_logs_project_id':
            remote_audit_logs['audit_logs_project_id'],
            'logs_gcs_bucket_name':
            remote_audit_logs['logs_gcs_bucket']['name'],
            'logs_bigquery_dataset_id':
            (remote_audit_logs['logs_bigquery_dataset']['name']),
        }
    dm_template_dict = {
        'imports': [{
            'path': 'data_project.py'
        }],
        'resources': [{
            'type': 'data_project.py',
            'name': 'data_project_deployment',
            'properties': properties,
        }]
    }

    # Create the deployment.
    utils.CreateNewDeployment(dm_template_dict, 'data-project-deployment',
                              project_id)

    # Remove Owners role from the DM service account.
    utils.RunGcloudCommand([
        'projects', 'remove-iam-policy-binding', project_id, '--member',
        dm_service_account, '--role', 'roles/owner'
    ],
                           project_id=None)
Exemplo n.º 4
0
def CreateComputeVms(config):
    """Creates new GCE VMs and firewall rules if specified in config."""
    if 'gce_instances' not in config.project:
        logging.info('No GCS VMs required.')
        return
    project_id = config.project['project_id']
    logging.info('Creating GCS VMs.')

    # Enable OS Login for VM SSH access.
    utils.RunGcloudCommand([
        'compute', 'project-info', 'add-metadata', '--metadata',
        'enable-oslogin=TRUE'
    ],
                           project_id=project_id)

    gce_instances = []
    for instance in config.project['gce_instances']:
        if 'existing_boot_image' in instance:
            image_name = instance['existing_boot_image']
        else:
            image_name = ('global/images/' +
                          instance['custom_boot_image']['image_name'])
        gce_instances.append({
            'name': instance['name'],
            'zone': instance['zone'],
            'machine_type': instance['machine_type'],
            'boot_image_name': image_name,
            'start_vm': instance['start_vm'],
        })
    deployment_name = 'gce-vms'
    dm_template_dict = {
        'imports': [{
            'path': 'gce_vms.py'
        }],
        'resources': [{
            'type': 'gce_vms.py',
            'name': deployment_name,
            'properties': {
                'gce_instances': gce_instances,
                'firewall_rules': config.project.get('gce_firewall_rules', []),
            }
        }]
    }
    utils.CreateNewDeployment(dm_template_dict, deployment_name, project_id)
Exemplo n.º 5
0
def DeployBigQueryAuditLogs(config):
    """Deploys the BigQuery audit logs dataset, if used."""
    data_project_id = config['project_config']['project_id']
    if 'remote_audit_logs' in config:
        logging.info('Creating remote BigQuery logs dataset.')
        remote_logs = config['remote_audit_logs']
        audit_project_id = remote_logs['audit_logs_project_id']
        logs_dataset = remote_logs['logs_bigquery_dataset'].copy()
        owners_group = remote_logs['audit_logs_owners_group']
    else:
        logging.info('Creating local BigQuery logs dataset.')
        local_logs = config['project_config']['local_audit_logs']
        audit_project_id = data_project_id
        logs_dataset = {
            'name': 'audit_logs',
            'location': local_logs['logs_bigquery_dataset']['location'],
        }
        owners_group = config['project_config']['owners_group']

    # Get the service account for the newly-created log sink.
    logs_dataset['log_sink_service_account'] = utils.GetLogSinkServiceAccount(
        _LOG_SINK_NAME, data_project_id)

    deployment_name = 'audit-logs-{}-bq'.format(
        data_project_id.replace('_', '-'))
    dm_template_dict = {
        'imports': [{
            'path': 'remote_audit_logs.py'
        }],
        'resources': [{
            'type': 'remote_audit_logs.py',
            'name': deployment_name,
            'properties': {
                'owners_group': owners_group,
                'auditors_group': config['project_config']['auditors_group'],
                'logs_bigquery_dataset': logs_dataset,
            },
        }]
    }
    utils.CreateNewDeployment(dm_template_dict, deployment_name,
                              audit_project_id)