def cluster_template_create(context, values): values = values.copy() cluster_template = m.ClusterTemplate() node_groups = values.pop("node_groups") or [] cluster_template.update(values) session = get_session() with session.begin(): try: cluster_template.save(session=session) except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry( "Duplicate entry for ClusterTemplate: %s" % e.columns) try: for ng in node_groups: node_group = m.TemplatesRelation() node_group.update({"cluster_template_id": cluster_template.id}) node_group.update(ng) node_group.save(session=session) except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for TemplatesRelation:" "%s" % e.columns) return cluster_template_get(context, cluster_template.id)
def cluster_create(context, values): values = values.copy() cluster = m.Cluster() node_groups = values.pop("node_groups", []) cluster.update(values) session = get_session() with session.begin(): try: cluster.save(session=session) except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for Cluster: %s" % e.columns) try: for ng in node_groups: node_group = m.NodeGroup() node_group.update({"cluster_id": cluster.id}) node_group.update(ng) node_group.save(session=session) except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for NodeGroup: %s" % e.columns) return cluster_get(context, cluster.id)
def data_source_create(context, values): data_source = m.DataSource() data_source.update(values) try: data_source.save() except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for DataSource: %s" % e.columns) return data_source
def node_group_template_create(context, values): node_group_template = m.NodeGroupTemplate() node_group_template.update(values) try: node_group_template.save() except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for NodeGroupTemplate: %s" % e.columns) return node_group_template
def job_execution_create(context, values): session = get_session() with session.begin(): job_ex = m.JobExecution() job_ex.update(values) try: job_ex.save() except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for JobExecution: %s" % e.columns) return job_ex
def job_binary_create(context, values): """Returns a JobBinary that does not contain a data field The data column uses deferred loading. """ job_binary = m.JobBinary() job_binary.update(values) try: job_binary.save() except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for JobBinary: %s" % e.columns) return job_binary
def job_binary_internal_create(context, values): """Returns a JobBinaryInternal that does not contain a data field The data column uses deferred loading. """ values["datasize"] = len(values["data"]) datasize_KB = values["datasize"] / 1024.0 if datasize_KB > CONF.job_binary_max_KB: raise ex.DataTooBigException( round(datasize_KB, 1), CONF.job_binary_max_KB, "Size of internal binary (%sKB) is " "greater than the maximum (%sKB)") job_binary_int = m.JobBinaryInternal() job_binary_int.update(values) try: job_binary_int.save() except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for JobBinaryInternal: %s" % e.columns) return job_binary_internal_get(context, job_binary_int.id)
def job_create(context, values): mains = values.pop("mains", []) libs = values.pop("libs", []) session = get_session() with session.begin(): job = m.Job() job.update(values) # libs and mains are 'lazy' objects. The initialization below # is needed here because it provides libs and mains to be initialized # within a session even if the lists are empty job.mains = [] job.libs = [] try: _append_job_binaries(context, session, mains, job.mains) _append_job_binaries(context, session, libs, job.libs) job.save(session=session) except db_exc.DBDuplicateEntry as e: raise ex.DBDuplicateEntry("Duplicate entry for Job: %s" % e.columns) return job