def test_compute_node_get_all(): context = Context("project1", "user1") service_count = Query(models.Service).count() service = models.Service() service.host = "host1" service.binary = "binary1" service.topic = "topic1" service.report_count = 0 service.save() compute_node_count = Query(models.ComputeNode).count() compute_nodes = [] for i in range(0, 2): compute_node = models.ComputeNode() compute_node.vcpus = 12 compute_node.service = service compute_node.save() compute_nodes += [compute_node] assert Query(models.Service).count() == service_count + 1 assert Query( models.ComputeNode).count() == compute_node_count + len(compute_nodes) compute_nodes_linked_to_this_service = Query(models.ComputeNode).filter( models.ComputeNode.service_id == service.id).all() for cn in compute_nodes_linked_to_this_service: assert cn.service.id == service.id
def test_relationships_list_int(save_fixed_ip=True): print("Ensure that foreign keys are working test_relationships_list_int(save_fixed_ip=%s)" % (save_fixed_ip)) network = models.Network() network.save() fixed_ips = [] for i in range(0, 5): fixed_ip = models.FixedIp() fixed_ip.network_id = network.id fixed_ips += [fixed_ip] if not save_fixed_ip: fixed_ip.network = network network.save() else: fixed_ip.save() network_from_db = Query(models.Network, models.Network.id==network.id).first() for fixed_ip in fixed_ips: fixed_ip_from_db = Query(models.FixedIp, models.FixedIp.network_id==network.id, models.FixedIp.id==fixed_ip.id).first() assert network_from_db.id == network.id assert fixed_ip_from_db.id == fixed_ip.id network_from_db.load_relationships() assert network_from_db.fixed_ips is not None and len(network_from_db.fixed_ips) > 0 assert fixed_ip_from_db.id in map(lambda x: x.id, network_from_db.fixed_ips) assert fixed_ip_from_db.network is not None assert fixed_ip_from_db.network.id == network_from_db.id assert fixed_ip_from_db.network_id == network_from_db.id
def test_selection(self): logging.getLogger().setLevel(logging.DEBUG) query = Query(Dog).filter(Dog.name=="Bobby").filter(Dog.specy=="Griffon") bobby = query.first() bobby.save() print("My dog's name is %s" % (bobby.name)) self.assertEqual(True, True)
def setUp(self): map(lambda x: x.delete(), Query(models.Service).all()) map(lambda x: x.delete(), Query(models.ComputeNode).all()) super(ComputeNodeTestCase, self).setUp() self.ctxt = context.get_admin_context() self.service_dict = dict(host='host1', binary='nova-compute', topic=CONF.compute_topic, report_count=1, disabled=False) self.service = db.service_create(self.ctxt, self.service_dict) self.compute_node_dict = dict(vcpus=2, memory_mb=1024, local_gb=2048, vcpus_used=0, memory_mb_used=0, local_gb_used=0, free_ram_mb=1024, free_disk_gb=2048, hypervisor_type="xen", hypervisor_version=1, cpu_info="", running_vms=0, current_workload=0, service_id=self.service['id'], disk_available_least=100, hypervisor_hostname='abracadabra104', host_ip='127.0.0.1', supported_instances='', pci_stats='', metrics='', extra_resources='', stats='', numa_topology='') # add some random stats self.stats = dict(num_instances=3, num_proj_12345=2, num_proj_23456=2, num_vm_building=3) self.compute_node_dict['stats'] = jsonutils.dumps(self.stats) # self.flags(reserved_host_memory_mb=0) # self.flags(reserved_host_disk_mb=0) self.item = db.compute_node_create(self.ctxt, self.compute_node_dict)
class LazyRelationship(): def __init__(self, rel, request_uuid=None): from lib.rome.core.orm.query import Query self.data = None self.rel = rel self.request_uuid = request_uuid self.is_loaded = False self.is_relationship_list = self.rel.to_many # print(self.request_uuid) # self.query = Query(rel.remote_class) # self.query = self.query.filter(getattr(rel.remote_class, rel.remote_object_field)==rel.local_fk_value) def reload(self): def match(x, rel): field_name = rel.remote_object_field x_value = getattr(x, field_name, "None") return x_value == rel.local_fk_value if self.data is not None: return data = database_driver.get_driver().getall(self.rel.remote_object_tablename, [[self.rel.remote_object_field, self.rel.local_fk_value]]) if len(data) == 0: from lib.rome.core.orm.query import Query self.query = Query(self.rel.remote_class) self.query = self.query.filter(getattr(self.rel.remote_class, self.rel.remote_object_field)==self.rel.local_fk_value) if self.request_uuid: data = self.query.all(request_uuid=self.request_uuid) #if self.rel.to_many else self.query.first()data else: data = self.query.all() #if self.rel.to_many else self.query.first()data else: from lib.rome.core.lazy import LazyValue data = map(lambda x: LazyValue(x, self.request_uuid), data) self.__dict__["data"] = data self.data = filter(lambda x: match(x, self.rel), self.data) if not self.rel.to_many: if len(self.data) > 0: self.data = self.data[0] else: self.data = None self.is_loaded = True def __getattr__(self, item): if item not in ["data", "rel", "query", "is_relationship_list", "is_loaded", "request_uuid"]: self.reload() if item == "iteritems": if self.is_relationship_list: return self.data.iteritems else: None if item == "__nonzero__" and self.is_relationship_list: return getattr(self.data, "__len__", None) return getattr(self.data, item, None) def __setattr__(self, name, value): if name in ["data", "rel", "query", "is_relationship_list", "is_loaded", "request_uuid"]: self.__dict__[name] = value else: self.reload() setattr(self.data, name, value) return self
def flavor_get_all(context, inactive=False, filters=None, sort_key='flavorid', sort_dir='asc', limit=None, marker=None): """Returns all flavors. """ filters = filters or {} # FIXME(sirp): now that we have the `disabled` field for flavors, we # should probably remove the use of `deleted` to mark inactive. `deleted` # should mean truly deleted, e.g. we can safely purge the record out of the # database. read_deleted = "yes" if inactive else "no" query = _flavor_get_query(context, read_deleted=read_deleted) if 'min_memory_mb' in filters: query = query.filter( models.InstanceTypes.memory_mb >= filters['min_memory_mb']) if 'min_root_gb' in filters: query = query.filter( models.InstanceTypes.root_gb >= filters['min_root_gb']) if 'disabled' in filters: query = query.filter( models.InstanceTypes.disabled == filters['disabled']) if 'is_public' in filters and filters['is_public'] is not None: the_filter = [models.InstanceTypes.is_public == filters['is_public']] # if filters['is_public'] and context.project_id is not None: # the_filter.extend([ # models.InstanceTypes.projects.any( # project_id=context.project_id, deleted=0) # ]) if len(the_filter) > 1: query = query.filter(or_(*the_filter)) else: query = query.filter(the_filter[0]) marker_row = None if marker is not None: marker_row = _flavor_get_query(context, read_deleted=read_deleted).\ filter_by(flavorid=marker).\ first() if not marker_row: raise Exception("MarkerNotFound(%s)" % (marker)) # query = sqlalchemyutils.paginate_query(query, models.InstanceTypes, limit, # [sort_key, 'id'], # marker=marker_row, # sort_dir=sort_dir) query = RomeQuery(models.InstanceTypes) inst_types = query.all() return [_dict_with_extra_specs(i) for i in inst_types]
def test_concurrent_update(self): logging.getLogger().setLevel(logging.DEBUG) for i in range(1, 2): import threading import time # print("cleaning existing accounts") existing_accounts = Query(BankAccount).all() for each in existing_accounts: each.soft_delete() bob_account = BankAccount() bob_account.money = 1000 bob_account.owner = "bob" bob_account.save() alice_account = BankAccount() alice_account.money = 1000 alice_account.owner = "alice" alice_account.save() @_retry_on_deadlock def transfer(): session = Session() with session.begin(): # print("executing") accounts = Query(BankAccount, session=session).all() bob_account = accounts[ 0] if accounts[0].owner is "bob" else accounts[1] alice_account = accounts[ 0] if accounts[0].owner is "alice" else accounts[1] # bob_account.money -= 100 # alice_account.money += 100 bob_account.update({"money": bob_account.money - 100}) alice_account.update({"money": alice_account.money + 100}) session.add(bob_account) session.flush() session.add(alice_account) # bob_account.save() # alice_account.save() a = threading.Thread(target=transfer) b = threading.Thread(target=transfer) a.start() b.start() time.sleep(1) existing_accounts = Query(BankAccount).all() for each in existing_accounts: print(each.money) print "____"
def test_relationships_single_str(save_instance=True, save_info_cache=True, use_update=False, use_session=False): print( "Ensure that foreign keys are working test_relationships_single_str(save_instance=%s, save_info_cache=%s, use_update=%s, use_session=%s)" % (save_instance, save_info_cache, use_update, use_session)) session = None if use_session: session = Session() instance_count = Query(models.Instance).count() instance = models.Instance() instance.uuid = "uuid_%s" % (instance_count) if save_instance: if use_session: session.add(instance) else: instance.save() instance_info_cache = models.InstanceInfoCache() if not use_update: instance_info_cache.instance_uuid = instance.uuid else: instance_info_cache.update({"instance_uuid": instance.uuid}) if not save_info_cache: if use_session: session.add(instance) else: instance.save() else: if use_session: session.add(instance_info_cache) else: instance_info_cache.save() if use_session: session.flush() instance_from_db = Query(models.Instance, models.Instance.id == instance.id).first() instance_info_cache_from_db = Query( models.InstanceInfoCache, models.InstanceInfoCache.id == instance_info_cache.id).first() assert instance_from_db.id == instance.id assert instance_info_cache_from_db.id == instance_info_cache.id assert instance_from_db.info_cache is not None assert instance_from_db.info_cache.id == instance_info_cache.id assert instance_info_cache_from_db.instance is not None assert instance_info_cache_from_db.instance.id == instance.id assert instance_info_cache_from_db.instance_uuid == instance.uuid
def test_join(self): # First way to make a join items = Query(Dog, func.sum(Dog.id)).join(Specy, Specy.name==Dog.specy).all() for item in items: print("%s" % (item)) # Second way to make a join items = Query(Dog, Specy).filter(Specy.name==Dog.specy).all() for item in items: print("%s" % (item))
def flavor_get_all(context, inactive=False, filters=None, sort_key='flavorid', sort_dir='asc', limit=None, marker=None): """Returns all flavors. """ filters = filters or {} # FIXME(sirp): now that we have the `disabled` field for flavors, we # should probably remove the use of `deleted` to mark inactive. `deleted` # should mean truly deleted, e.g. we can safely purge the record out of the # database. read_deleted = "yes" if inactive else "no" query = _flavor_get_query(context, read_deleted=read_deleted) if 'min_memory_mb' in filters: query = query.filter( models.InstanceTypes.memory_mb >= filters['min_memory_mb']) if 'min_root_gb' in filters: query = query.filter( models.InstanceTypes.root_gb >= filters['min_root_gb']) if 'disabled' in filters: query = query.filter( models.InstanceTypes.disabled == filters['disabled']) if 'is_public' in filters and filters['is_public'] is not None: the_filter = [models.InstanceTypes.is_public == filters['is_public']] # if filters['is_public'] and context.project_id is not None: # the_filter.extend([ # models.InstanceTypes.projects.any( # project_id=context.project_id, deleted=0) # ]) if len(the_filter) > 1: query = query.filter(or_(*the_filter)) else: query = query.filter(the_filter[0]) marker_row = None if marker is not None: marker_row = _flavor_get_query(context, read_deleted=read_deleted).\ filter_by(flavorid=marker).\ first() if not marker_row: raise Exception("MarkerNotFound(%s)" % (marker)) # query = sqlalchemyutils.paginate_query(query, models.InstanceTypes, limit, # [sort_key, 'id'], # marker=marker_row, # sort_dir=sort_dir) query = RomeQuery(models.InstanceTypes) inst_types = query.all() return [_dict_with_extra_specs(i) for i in inst_types]
def test_image_get_all_devstack(context): from test.glance.api import image_get_all as image_get_all_api images = Query(models.Image).all() result = image_get_all_api(context, filters={'deleted': False}, marker=None, limit=25, sort_key=['created_at', 'id'], sort_dir=['desc', 'desc'], member_status="accepted", is_public=None, admin_as_user=False, return_tag=True) print(result) result = image_get_all_api(context, filters={'deleted': False}, marker=images[2].id, limit=25, sort_key=['created_at', 'id'], sort_dir=['desc', 'desc'], member_status="accepted", is_public=None, admin_as_user=False, return_tag=True) print(result)
def reload(self): def match(x, rel): field_name = rel.remote_object_field x_value = getattr(x, field_name, "None") return x_value == rel.local_fk_value if self.data is not None: return data = database_driver.get_driver().getall(self.rel.remote_object_tablename, [[self.rel.remote_object_field, self.rel.local_fk_value]]) if len(data) == 0: from lib.rome.core.orm.query import Query self.query = Query(self.rel.remote_class) self.query = self.query.filter(getattr(self.rel.remote_class, self.rel.remote_object_field)==self.rel.local_fk_value) if self.request_uuid: data = self.query.all(request_uuid=self.request_uuid) #if self.rel.to_many else self.query.first()data else: data = self.query.all() #if self.rel.to_many else self.query.first()data else: from lib.rome.core.lazy import LazyValue data = map(lambda x: LazyValue(x, self.request_uuid), data) self.__dict__["data"] = data self.data = filter(lambda x: match(x, self.rel), self.data) if not self.rel.to_many: if len(self.data) > 0: self.data = self.data[0] else: self.data = None self.is_loaded = True
def network_get_associated_fixed_ips(context, network_id, host=None): # FIXME(sirp): since this returns fixed_ips, this would be better named # fixed_ip_get_all_by_network. # NOTE(vish): The ugly joins here are to solve a performance issue and # should be removed once we can add and remove leases # without regenerating the whole list vif_and = and_( models.VirtualInterface.id == models.FixedIp.virtual_interface_id, models.VirtualInterface.deleted == 1) inst_and = and_(models.Instance.uuid == models.FixedIp.instance_uuid, models.Instance.deleted == 1) session = get_session() query = session.query(models.FixedIp.address, models.FixedIp.instance_uuid, models.FixedIp.network_id, models.FixedIp.virtual_interface_id, models.VirtualInterface.address, models.Instance.hostname, models.Instance.updated_at, models.Instance.created_at, models.FixedIp.allocated, models.FixedIp.leased) query = query.join(models.VirtualInterface).join(models.Instance) query = query.filter(models.FixedIp.deleted == 0) query = query.filter(models.FixedIp.network_id == network_id) query = query.join((models.VirtualInterface, vif_and)) query = query.filter(models.FixedIp.instance_uuid != None) query = query.filter(models.FixedIp.virtual_interface_id != None) # query = query.filter(models.FixedIp.deleted == 0).\ # filter(models.FixedIp.network_id == network_id).\ # join((models.VirtualInterface, vif_and)).\ # join((models.Instance, inst_and)).\ # filter(models.FixedIp.instance_uuid != None).\ # filter(models.FixedIp.virtual_interface_id != None) if host: query = query.filter(models.Instance.host == host) result = query.all() plop1 = Query(models.FixedIp).join(models.Instance).filter( models.Instance.uuid == models.FixedIp.instance_uuid).all() print(plop1) data = [] for datum in result: cleaned = {} cleaned['address'] = datum[0] cleaned['instance_uuid'] = datum[1] cleaned['network_id'] = datum[2] cleaned['vif_id'] = datum[3] cleaned['vif_address'] = datum[4] cleaned['instance_hostname'] = datum[5] cleaned['instance_updated'] = datum[6] cleaned['instance_created'] = datum[7] cleaned['allocated'] = datum[8] cleaned['leased'] = datum[9] cleaned['default_route'] = datum[10] is not None data.append(cleaned) return data
def network_get_all_by_host(context, host): session = get_session() network_ids1 = Query( models.Network, models.Network.id).filter(models.Network.host == host).all() network_ids2 = Query(models.Network, models.Network.id).join(models.FixedIp)\ .filter(models.Network.id==models.FixedIp.network_id)\ .filter(models.FixedIp.host==host)\ .all() network_ids3 = Query(models.Network, models.Network.id).join(models.FixedIp).join(models.Instance)\ .filter(models.Network.id==models.FixedIp.network_id)\ .filter(models.Instance.uuid==models.FixedIp.instance_uuid)\ .filter(models.Instance.host==host)\ .all() # network_ids = Query(models.Network, models.Network.id).filter(models.Network.host==host).all() # fixed_host_filter = or_(models.FixedIp.host == host, # and_(models.FixedIp.instance_uuid != None, # models.Instance.host == host)) # fixed_ip_query = model_query(context, models.FixedIp.network_id, # base_model=models.FixedIp, # session=session).\ # outerjoin((models.Instance, # models.Instance.uuid == # models.FixedIp.instance_uuid)).\ # filter(fixed_host_filter) # # NOTE(vish): return networks that have host set # # or that have a fixed ip with host set # # or that have an instance with host set # host_filter = or_(models.Network.host == host, # models.Network.id.in_(fixed_ip_query.subquery())) # return _network_get_query(context, session=session).\ # filter(host_filter).\ # all() processed_pairs = [] result = [] for pair in network_ids1 + network_ids2 + network_ids3: if pair[1] not in processed_pairs: processed_pairs += [pair[1]] result += [pair[0]] return result
def test_relationships_list_int(save_fixed_ip=True): print( "Ensure that foreign keys are working test_relationships_list_int(save_fixed_ip=%s)" % (save_fixed_ip)) network = models.Network() network.save() fixed_ips = [] for i in range(0, 5): fixed_ip = models.FixedIp() fixed_ip.network_id = network.id fixed_ips += [fixed_ip] if not save_fixed_ip: fixed_ip.network = network network.save() else: fixed_ip.save() network_from_db = Query(models.Network, models.Network.id == network.id).first() for fixed_ip in fixed_ips: fixed_ip_from_db = Query(models.FixedIp, models.FixedIp.network_id == network.id, models.FixedIp.id == fixed_ip.id).first() assert network_from_db.id == network.id assert fixed_ip_from_db.id == fixed_ip.id network_from_db.load_relationships() assert network_from_db.fixed_ips is not None and len( network_from_db.fixed_ips) > 0 assert fixed_ip_from_db.id in map(lambda x: x.id, network_from_db.fixed_ips) assert fixed_ip_from_db.network is not None assert fixed_ip_from_db.network.id == network_from_db.id assert fixed_ip_from_db.network_id == network_from_db.id
def reload(self): def match(x, rel): field_name = rel.remote_object_field x_value = getattr(x, field_name, "None") return x_value == rel.local_fk_value if self.data is not None: return data = database_driver.get_driver().getall( self.rel.remote_object_tablename, [[self.rel.remote_object_field, self.rel.local_fk_value]]) if len(data) == 0: from lib.rome.core.orm.query import Query self.query = Query(self.rel.remote_class) self.query = self.query.filter( getattr(self.rel.remote_class, self.rel.remote_object_field) == self.rel.local_fk_value) if self.request_uuid: data = self.query.all( request_uuid=self.request_uuid ) #if self.rel.to_many else self.query.first()data else: data = self.query.all( ) #if self.rel.to_many else self.query.first()data else: from lib.rome.core.lazy import LazyValue data = map(lambda x: LazyValue(x, self.request_uuid), data) self.__dict__["data"] = data self.data = filter(lambda x: match(x, self.rel), self.data) if not self.rel.to_many: if len(self.data) > 0: self.data = self.data[0] else: self.data = None self.is_loaded = True
def transfer(): session = Session() with session.begin(): # print("executing") accounts = Query(BankAccount, session=session).all() bob_account = accounts[0] if accounts[0].owner is "bob" else accounts[1] alice_account = accounts[0] if accounts[0].owner is "alice" else accounts[1] # bob_account.money -= 100 # alice_account.money += 100 bob_account.update({"money": bob_account.money - 100}) alice_account.update({"money": alice_account.money + 100}) session.add(bob_account) session.flush() session.add(alice_account)
def _normalize_tags(image): undeleted_tags = filter(lambda x: not x.deleted, image['tags']) image['tags'] = [tag['value'] for tag in undeleted_tags] return image class Context(object): def __init__(self, project_id, user_id, can_see_deleted, is_admin): self.project_id = project_id self.user_id = user_id self.can_see_deleted = can_see_deleted self.is_admin = is_admin if __name__ == "__main__": logging.getLogger().setLevel(logging.DEBUG) context = Context("project1", "user1", True, True) if Query(models.Image).count() == 0: import uuid values = {"size": 256, "status": "queued", "id": str(uuid.uuid1())} image_create(context, values) image_id = Query(models.Image).first().id values = {"size": 256, "status": "active"} _image_update(context, values, image_id) print(Query(models.Image).first().status)
# all() processed_pairs = [] result = [] for pair in network_ids1 + network_ids2 + network_ids3: if pair[1] not in processed_pairs: processed_pairs += [pair[1]] result += [pair[0]] return result class Context(object): def __init__(self, project_id, user_id): self.project_id = project_id self.user_id = user_id if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) context = Context("admin", "admin") host = "jonathan-VirtualBox" # host = "edel-17" print(network_get_all_by_host(context, host)) fixed_ips = Query( models.FixedIp).filter(models.FixedIp.updated_at != None).all() print(fixed_ips[0].updated_at)
def run(self): query = Query(models.InstanceInfoCache).filter_by(instance_uuid=instance_uuid) query.first()
# # instance.obj_reset_changes() # return instance class Context(object): def __init__(self, project_id, user_id): self.project_id = project_id self.user_id = user_id class ModelInstance(dict): def __init__(self): self.fields = [] self.deleted = None self.cleaned = None if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) context = Context("project1", "user1") one_instance = Query(models.Instance).first() # not one_instance.system_metadata coin = ModelInstance() _from_db_object( context, coin, one_instance, ['metadata', 'system_metadata', 'info_cache', 'security_groups'])
__author__ = 'jonathan' import _fixtures as models from lib.rome.core.orm.query import Query import logging if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) instance_uuid = "e114f2ae-007b-4c51-bda5-f120119ea732" host = "hercule-2" topic = "conductor" # result = Query(models.Service).filter(models.Service.host==host).filter(models.Service.topic==topic).all() result = Query( models.InstanceExtra).filter_by(instance_uuid=instance_uuid).all() result = Query( models.InstanceExtra).filter_by(instance_uuid=instance_uuid).all() result = Query( models.InstanceExtra).filter_by(instance_uuid=instance_uuid).all() result = Query( models.InstanceExtra).filter_by(instance_uuid=instance_uuid).all() result = Query( models.InstanceExtra).filter_by(instance_uuid=instance_uuid).all() result = Query( models.InstanceExtra).filter_by(instance_uuid=instance_uuid).all() print(len(result))
def test_instance_faults_get_by_instance_uuids(): instances = Query(models.Instance).all() instances_uuids = map(lambda x: x.uuid, instances) get_by_instance_uuids(InstanceFault, context, instances_uuids)
print(result) def test(context): from glance.db.discovery.api import _select_images_query as foo query = foo(context, [], False, "accepted", None) print(query.all()) if __name__ == "__main__": logging.getLogger().setLevel(logging.DEBUG) context = Context("admin", "admin", True, True) if Query(models.Image).count() == 0: create_mock_data() # result = Query(models.Image.id, models.ImageMember.id, models.Image).join(models.ImageMember).all() # print(result) # # print(models.Image.name.desc()) # v = models.Image.name.desc() # # query = Query(models.Image).order_by(models.Image.created_at.desc(), models.Image.id.desc()) # query = Query(models.Image).order_by(models.Image.created_at.desc(), models.Image.id.desc()) # # query = Query(models.Image).order_by(models.Image.created_at.desc()) # result = query.all() # # for r in result: # print(r.id)
def test_relationships_single_object(save_instance=True, save_info_cache=True, use_update=False, update_instance=False, use_session=False): print( "Ensure that foreign keys are working test_relationships_single_object(save_instance=%s, save_info_cache=%s, use_update=%s, update_instance=%s, use_session=%s)" % (save_instance, save_info_cache, use_update, update_instance, use_session)) session = None if use_session: session = Session() instance_count = Query(models.Instance).count() instance = models.Instance() instance_uuid = "uuid_%s" % (instance_count) if save_instance: if use_session: session.add(instance) else: instance.save() instance_info_cache = models.InstanceInfoCache() if update_instance: if not use_update: instance.info_cache = instance_info_cache instance.uuid = instance_uuid else: # CLASSIC # instance.update({"info_cache": instance_info_cache}) # DEBUG values = {} values['uuid'] = instance_uuid # instance['info_cache'] = models.InstanceInfoCache() instance['info_cache'] = instance_info_cache info_cache = values.pop('info_cache', None) if info_cache is not None: instance['info_cache'].update(info_cache) instance.update(values, do_save=False) if not save_info_cache: if use_session: session.add(instance) else: instance.save() else: if use_session: session.add(instance_info_cache) else: instance_info_cache.save() else: instance.uuid = instance_uuid if not use_update: instance_info_cache.instance = instance else: instance_info_cache.update({"instance": instance}) if not save_info_cache: instance.save() else: if use_session: session.add(instance_info_cache) else: instance_info_cache.save() if use_session: session.flush() instance_from_db = Query(models.Instance, models.Instance.id == instance.id).first() instance_info_cache_from_db = Query( models.InstanceInfoCache, models.InstanceInfoCache.id == instance_info_cache.id).first() assert instance_from_db.id == instance.id assert instance_info_cache_from_db.id == instance_info_cache.id assert instance_from_db.info_cache is not None assert instance_from_db.info_cache.id == instance_info_cache.id assert instance_info_cache_from_db.instance is not None assert instance_info_cache_from_db.instance.id == instance.id assert instance_info_cache_from_db.instance_uuid == instance.uuid
def run(self): query = Query(models.FixedIp).join( models.Network, models.Network.id == models.FixedIp.network_id) query.all()
extra_specs = [(k, v) for k, v in sys_meta.items() if k.startswith('%sinstance_type_extra_' % prefix)] if extra_specs: instance_type['extra_specs'] = {} for key, value in extra_specs: extra_key = key[len('%sinstance_type_extra_' % prefix):] instance_type['extra_specs'][extra_key] = value return instance_type class Context(object): def __init__(self, project_id, user_id): self.project_id = project_id self.user_id = user_id class ModelInstance(dict): def __init__(self): self.fields = [] self.deleted = None self.cleaned = None if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) context = Context("project1", "user1") for instance in Query(models.Instance).all(): # one_instance = Query(models.Instance).filter(models.Instance.id==1).first() extract_flavor(instance)
def run(self): query = Query( models.InstanceInfoCache).filter_by(instance_uuid=instance_uuid) query.first()
class LazyRelationship(): def __init__(self, rel, request_uuid=None): from lib.rome.core.orm.query import Query self.data = None self.rel = rel self.request_uuid = request_uuid self.is_loaded = False self.is_relationship_list = self.rel.to_many # print(self.request_uuid) # self.query = Query(rel.remote_class) # self.query = self.query.filter(getattr(rel.remote_class, rel.remote_object_field)==rel.local_fk_value) def reload(self): def match(x, rel): field_name = rel.remote_object_field x_value = getattr(x, field_name, "None") return x_value == rel.local_fk_value if self.data is not None: return data = database_driver.get_driver().getall( self.rel.remote_object_tablename, [[self.rel.remote_object_field, self.rel.local_fk_value]]) if len(data) == 0: from lib.rome.core.orm.query import Query self.query = Query(self.rel.remote_class) self.query = self.query.filter( getattr(self.rel.remote_class, self.rel.remote_object_field) == self.rel.local_fk_value) if self.request_uuid: data = self.query.all( request_uuid=self.request_uuid ) #if self.rel.to_many else self.query.first()data else: data = self.query.all( ) #if self.rel.to_many else self.query.first()data else: from lib.rome.core.lazy import LazyValue data = map(lambda x: LazyValue(x, self.request_uuid), data) self.__dict__["data"] = data self.data = filter(lambda x: match(x, self.rel), self.data) if not self.rel.to_many: if len(self.data) > 0: self.data = self.data[0] else: self.data = None self.is_loaded = True def __getattr__(self, item): if item not in [ "data", "rel", "query", "is_relationship_list", "is_loaded", "request_uuid" ]: self.reload() if item == "iteritems": if self.is_relationship_list: return self.data.iteritems else: None if item == "__nonzero__" and self.is_relationship_list: return getattr(self.data, "__len__", None) return getattr(self.data, item, None) def __setattr__(self, name, value): if name in [ "data", "rel", "query", "is_relationship_list", "is_loaded", "request_uuid" ]: self.__dict__[name] = value else: self.reload() setattr(self.data, name, value) return self
def query(self, *entities, **kwargs): from lib.rome.core.orm.query import Query return Query(*entities, **merge_dicts(kwargs, {"session": self}))
def run(self): query = Query(models.FixedIp).join(models.Network, models.Network.id==models.FixedIp.network_id) query.all()
fip = FixedIP(context=context, address=info['address'], instance_uuid=info['instance_uuid'], network_id=info['network_id'], virtual_interface_id=info['vif_id'], allocated=info['allocated'], leased=info['leased'], default_route=info['default_route'], instance=inst, virtual_interface=vif) fips.objects.append(fip) fips.obj_reset_changes() return fips class Context(object): def __init__(self, project_id, user_id): self.project_id = project_id self.user_id = user_id if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) context = Context("admin", "admin") network = Query(models.Network).filter(models.Network.id == 1).all()[0] result = get_by_network(FixedIP, context, network) print(result)
def compute_node_get_all(context, no_date_fields): # NOTE(msdubov): Using lower-level 'select' queries and joining the tables # manually here allows to gain 3x speed-up and to have 5x # less network load / memory usage compared to the sqla ORM. # engine = get_engine() # # Retrieve ComputeNode, Service # compute_node = models.ComputeNode.__table__ # service = models.Service.__table__ # with engine.begin() as conn: # redundant_columns = set(['deleted_at', 'created_at', 'updated_at', # 'deleted']) if no_date_fields else set([]) # def filter_columns(table): # return [c for c in table.c if c.name not in redundant_columns] # compute_node_query = sql.select(filter_columns(compute_node)).\ # where(compute_node.c.deleted == 0).\ # order_by(compute_node.c.service_id) # compute_node_rows = conn.execute(compute_node_query).fetchall() # service_query = sql.select(filter_columns(service)).\ # where((service.c.deleted == 0) & # (service.c.binary == 'nova-compute')).\ # order_by(service.c.id) # service_rows = conn.execute(service_query).fetchall() # # Join ComputeNode & Service manually. # services = {} # for proxy in service_rows: # services[proxy['id']] = dict(proxy.items()) # compute_nodes = [] # for proxy in compute_node_rows: # node = dict(proxy.items()) # node['service'] = services.get(proxy['service_id']) # compute_nodes.append(node) from lib.rome.core.dataformat.json import Encoder from lib.rome.core.dataformat.json import Decoder query = RomeQuery(models.ComputeNode) compute_nodes = query.all() def novabase_to_dict(ref): request_uuid = uuid.uuid1() encoder = Encoder(request_uuid=request_uuid) decoder = Decoder(request_uuid=request_uuid) json_object = encoder.simplify(ref) json_object.pop("_metadata_novabase_classname") return decoder.desimplify(json_object) # result = [] # for each in compute_nodes: # compute_node = novabase_to_dict(each) # compute_node["service"] = novabase_to_dict(compute_node["service"]) # compute_node["service"].pop("compute_node") # result += [compute_node] return compute_nodes
first() if not result: Exception("toto") return result class Context(object): def __init__(self, project_id, user_id): self.project_id = project_id self.user_id = user_id class ModelInstance(dict): def __init__(self): self.fields = [] self.deleted = None self.cleaned = None if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) context = Context("project1", "user1") host = "jonathan-VirtualBox" # host = "edel-17" for network in Query(models.Network): fixed_ip_get_by_network_host(context, network.id, host) break
def compute_node_get_all(context, no_date_fields): # NOTE(msdubov): Using lower-level 'select' queries and joining the tables # manually here allows to gain 3x speed-up and to have 5x # less network load / memory usage compared to the sqla ORM. # engine = get_engine() # # Retrieve ComputeNode, Service # compute_node = models.ComputeNode.__table__ # service = models.Service.__table__ # with engine.begin() as conn: # redundant_columns = set(['deleted_at', 'created_at', 'updated_at', # 'deleted']) if no_date_fields else set([]) # def filter_columns(table): # return [c for c in table.c if c.name not in redundant_columns] # compute_node_query = sql.select(filter_columns(compute_node)).\ # where(compute_node.c.deleted == 0).\ # order_by(compute_node.c.service_id) # compute_node_rows = conn.execute(compute_node_query).fetchall() # service_query = sql.select(filter_columns(service)).\ # where((service.c.deleted == 0) & # (service.c.binary == 'nova-compute')).\ # order_by(service.c.id) # service_rows = conn.execute(service_query).fetchall() # # Join ComputeNode & Service manually. # services = {} # for proxy in service_rows: # services[proxy['id']] = dict(proxy.items()) # compute_nodes = [] # for proxy in compute_node_rows: # node = dict(proxy.items()) # node['service'] = services.get(proxy['service_id']) # compute_nodes.append(node) from lib.rome.core.dataformat.json import Encoder from lib.rome.core.dataformat.json import Decoder query = RomeQuery(models.ComputeNode) compute_nodes = query.all() def novabase_to_dict(ref): request_uuid = uuid.uuid1() encoder = Encoder(request_uuid=request_uuid) decoder = Decoder(request_uuid=request_uuid) json_object = encoder.simplify(ref) json_object.pop("_metadata_novabase_classname") return decoder.desimplify(json_object) # result = [] # for each in compute_nodes: # compute_node = novabase_to_dict(each) # compute_node["service"] = novabase_to_dict(compute_node["service"]) # compute_node["service"].pop("compute_node") # result += [compute_node] return compute_nodes
id = Column(Integer, primary_key=True) name = Column(String(255)) specy_id = Column(Integer) specy = orm.relationship(Specy, backref="dogs", foreign_keys=specy_id, primaryjoin='Dog.specy_id == Specy.id') if __name__ == '__main__': dogs_names = ["rintintin", "rantanplan", "bobby"] species_names = ["griffon", "beaggle", "labrador", "cocker"] if Query(Specy).count() == 0: for specy_name in species_names: specy = Specy() specy.name = specy_name specy.save() if Query(Dog).count() == 0: for specy in Query(Specy).all(): for dog_name in dogs_names: dog = Dog() dog.name = dog_name dog.specy_id = specy.id dog.save() # query = Query(Dog).join(Specy, Dog.specy_id==Specy.id) # for row in query.all():
def query(self, *entities, **kwargs): return Query(*entities, **kwargs)