def _set_metrics_for_resource(session, r, metrics): for name, value in six.iteritems(metrics): if isinstance(value, uuid.UUID): try: update = session.query(Metric).filter( Metric.id == value, (Metric.created_by_user_id == r.created_by_user_id), (Metric.created_by_project_id == r.created_by_project_id), ).update({"resource_id": r.id, "name": name}) except exception.DBDuplicateEntry: raise indexer.NamedMetricAlreadyExists(name) if update == 0: raise indexer.NoSuchMetric(value) else: ap_name = value['archive_policy_name'] m = Metric(id=uuid.uuid4(), created_by_user_id=r.created_by_user_id, created_by_project_id=r.created_by_project_id, archive_policy_name=ap_name, name=name, resource_id=r.id) session.add(m) try: session.flush() except exception.DBDuplicateEntry: raise indexer.NamedMetricAlreadyExists(name) except exception.DBReferenceError as e: if (e.constraint == 'fk_metric_archive_policy_name_archive_policy_name'): raise indexer.NoSuchArchivePolicy(ap_name) raise session.expire(r, ['metrics'])
def create_archive_policy_rule(self, name, metric_pattern, archive_policy_name): apr = ArchivePolicyRule( name=name, archive_policy_name=archive_policy_name, metric_pattern=metric_pattern ) session = self.engine_facade.get_session() session.add(apr) try: session.flush() except exception.DBDuplicateEntry: raise indexer.ArchivePolicyRuleAlreadyExists(name) return apr
def create_archive_policy(self, archive_policy): ap = ArchivePolicy( name=archive_policy.name, back_window=archive_policy.back_window, definition=archive_policy.definition, aggregation_methods=list(archive_policy.aggregation_methods), ) session = self.engine_facade.get_session() session.add(ap) try: session.flush() except exception.DBDuplicateEntry: raise indexer.ArchivePolicyAlreadyExists(archive_policy.name) return ap
def create_metric(self, id, created_by_user_id, created_by_project_id, archive_policy_name, name=None, resource_id=None, details=False): m = Metric(id=id, created_by_user_id=created_by_user_id, created_by_project_id=created_by_project_id, archive_policy_name=archive_policy_name, name=name, resource_id=resource_id) session = self.engine_facade.get_session() session.add(m) session.flush() if details: # Fetch archive policy m.archive_policy return m
def save_blob_data_batch(context, blobs, session): """Perform batch uploading to database.""" with session.begin(): locations = [] # blobs is a list of tuples (blob_data_id, data) for blob_data_id, data in blobs: blob_data = models.ArtifactBlobData() blob_data.id = blob_data_id blob_data.data = data.read() session.add(blob_data) locations.append("sql://" + blob_data.id) session.flush() return locations
def set_quotas(values, session): """Create new quota instances in database""" with session.begin(): for project_id, project_quotas in values.items(): # reset all project quotas session.query(models.ArtifactQuota).filter( models.ArtifactQuota.project_id == project_id).delete() # generate new quotas for quota_name, quota_value in project_quotas.items(): q = models.ArtifactQuota() q.project_id = project_id q.quota_name = quota_name q.quota_value = quota_value session.add(q) # save all quotas session.flush()
def create_metric(self, id, created_by_user_id, created_by_project_id, archive_policy_name, name=None, resource_id=None): m = Metric(id=id, created_by_user_id=created_by_user_id, created_by_project_id=created_by_project_id, archive_policy_name=archive_policy_name, name=name, resource_id=resource_id) session = self.engine_facade.get_session() session.add(m) try: session.flush() except exception.DBReferenceError as e: if (e.constraint == 'fk_metric_archive_policy_name_archive_policy_name'): raise indexer.NoSuchArchivePolicy(archive_policy_name) raise session.expunge_all() return m
def create_resource(self, resource_type, id, created_by_user_id, created_by_project_id, user_id=None, project_id=None, started_at=None, ended_at=None, metrics=None, **kwargs): resource_cls = self._resource_type_to_class(resource_type) if (started_at is not None and ended_at is not None and started_at > ended_at): raise ValueError("Start timestamp cannot be after end timestamp") r = resource_cls( id=id, type=resource_type, created_by_user_id=created_by_user_id, created_by_project_id=created_by_project_id, user_id=user_id, project_id=project_id, started_at=started_at, ended_at=ended_at, **kwargs) session = self.engine_facade.get_session() with session.begin(): session.add(r) try: session.flush() except exception.DBDuplicateEntry: raise indexer.ResourceAlreadyExists(id) except exception.DBReferenceError as ex: raise indexer.ResourceValueError(r.type, ex.key, getattr(r, ex.key)) if metrics is not None: self._set_metrics_for_resource(session, r, metrics) # NOTE(jd) Force load of metrics :) r.metrics session.expunge_all() return r
def delete_metric(self, id): session = self.engine_facade.get_session() session.query(Metric).filter(Metric.id == id).delete() session.flush()