class MetadataUpdate(ndb.Model): """A pending metadata update. Standalone instances should not be present in the datastore. """ # Checksum for this metadata. checksum = ndb.ComputedProperty( lambda self: utilities.compute_checksum(self.metadata)) # Metadata to modify. Keys present will overwrite existing metadata. # Use null values to delete keys. metadata = ndb.JsonProperty() # URL for the pending operation to apply this metadata update. url = ndb.StringProperty(indexed=False)
def update(key): """Updates instance metadata. Args: key: ndb.Key for a models.instance entity. """ entity = key.get() if not entity: logging.warning('Instance does not exist: %s', key) return if not entity.active_metadata_update: logging.warning('Instance active metadata update unspecified: %s', key) return if entity.active_metadata_update.url: return parent = key.parent().get() if not parent: logging.warning('InstanceGroupManager does not exist: %s', key.parent()) return grandparent = parent.key.parent().get() if not grandparent: logging.warning('InstanceTemplateRevision does not exist: %s', parent.key.parent()) return if not grandparent.project: logging.warning('InstanceTemplateRevision project unspecified: %s', grandparent.key) return result = net.json_request(entity.url, scopes=gce.AUTH_SCOPES) api = gce.Project(grandparent.project) operation = api.set_metadata( parent.key.id(), key.id(), result['metadata']['fingerprint'], apply_metadata_update(result['metadata']['items'], entity.active_metadata_update.metadata), ) metrics.send_machine_event('METADATA_UPDATE_SCHEDULED', key.id()) associate_metadata_operation( key, utilities.compute_checksum(entity.active_metadata_update.metadata), operation.url, )
def update(key): """Updates instance metadata. Args: key: ndb.Key for a models.instance entity. """ entity = key.get() if not entity: logging.warning('Instance does not exist: %s', key) return if not entity.active_metadata_update: logging.warning('Instance active metadata update unspecified: %s', key) return if entity.active_metadata_update.url: return parent = key.parent().get() if not parent: logging.warning('InstanceGroupManager does not exist: %s', key.parent()) return grandparent = parent.key.parent().get() if not grandparent: logging.warning( 'InstanceTemplateRevision does not exist: %s', parent.key.parent()) return if not grandparent.project: logging.warning( 'InstanceTemplateRevision project unspecified: %s', grandparent.key) return result = net.json_request(entity.url, scopes=gce.AUTH_SCOPES) api = gce.Project(grandparent.project) operation = api.set_metadata( parent.key.id(), key.id(), result['metadata']['fingerprint'], apply_metadata_update( result['metadata']['items'], entity.active_metadata_update.metadata), ) associate_metadata_operation( key, utilities.compute_checksum(entity.active_metadata_update.metadata), operation.url, )
def compute_template_checksum(template_cfg): """Computes a checksum from the given config. Args: template_cfg: proto.config_pb2.InstanceTemplateConfig.InstanceTemplate. Returns: The checksum string. """ identifying_properties = { 'auto-assign-external-ip': template_cfg.auto_assign_external_ip, 'dimensions': _load_dict(template_cfg.dimensions), 'disk-size-gb': template_cfg.disk_size_gb, 'disk-type': template_cfg.disk_type, 'image-name': template_cfg.image_name, 'image-project': template_cfg.image_project, 'machine-type': template_cfg.machine_type, 'metadata': _load_dict(template_cfg.metadata), 'min-cpu-platform': template_cfg.min_cpu_platform, 'network_url': template_cfg.network_url, 'project': template_cfg.project, 'service-accounts': [], 'tags': sorted(template_cfg.tags), } if template_cfg.service_accounts: # Changing the first service account has special meaning because the # first service account is the one we communicate to Machine Provider. identifying_properties['service-accounts'].append({ 'name': template_cfg.service_accounts[0].name, 'scopes': sorted(template_cfg.service_accounts[0].scopes), }) # The rest of the service accounts have no special meaning, so changing # their order shouldn't affect the checksum. identifying_properties['service-accounts'].extend([{ 'name': i.name, 'scopes': sorted(i.scopes), } for i in sorted(template_cfg.service_accounts[1:], key=lambda i: i.name)]) return utilities.compute_checksum(identifying_properties)
def compute_template_checksum(template_cfg): """Computes a checksum from the given config. Args: template_cfg: proto.config_pb2.InstanceTemplateConfig.InstanceTemplate. Returns: The checksum string. """ identifying_properties = { 'dimensions': _load_dict(template_cfg.dimensions), 'disk-size-gb': template_cfg.disk_size_gb, 'image-name': template_cfg.image_name, 'machine-type': template_cfg.machine_type, 'metadata': _load_dict(template_cfg.metadata), 'project': template_cfg.project, 'service-accounts': [], 'tags': sorted(template_cfg.tags), } if template_cfg.service_accounts: # Changing the first service account has special meaning because the # first service account is the one we communicate to Machine Provider. identifying_properties['service-accounts'].append({ 'name': template_cfg.service_accounts[0].name, 'scopes': sorted(template_cfg.service_accounts[0].scopes), }) # The rest of the service accounts have no special meaning, so changing # their order shouldn't affect the checksum. identifying_properties['service-accounts'].extend([ { 'name': i.name, 'scopes': sorted(i.scopes), } for i in sorted(template_cfg.service_accounts[1:], key=lambda i: i.name) ]) return utilities.compute_checksum(identifying_properties)