def extract_complex_object(self, obj): """Extract an object where each attribute has been simplified.""" fields_iterator = None if hasattr(obj, "_sa_class_manager"): fields_iterator = obj._sa_class_manager elif hasattr(obj, "__dict__"): fields_iterator = obj.__dict__ elif obj.__class__.__name__ == "dict": fields_iterator = obj complex_object = {} relationships_fields = map(lambda x: x.local_object_field, obj.get_relationships()) if fields_iterator is not None: for field in fields_iterator: field_value = getattr(obj, field) if field in relationships_fields: self.process_object(field_value) continue if utils.is_novabase(field_value): complex_object[field] = self.process_field(field_value) elif isinstance( field_value, list ): #or hasattr(field_value, "is_relationship_list"): field_list = [] for item in field_value: field_list += [self.process_field(item)] complex_object[field] = field_list else: complex_object[field] = self.process_field(field_value) return complex_object
def process_field(self, field_value): """Inner function that processes a value.""" if utils.is_novabase(field_value): if not self.already_processed(field_value): self.process_object(field_value, False) key = self.get_cache_key(field_value) result = self.simple_cache[key] else: result = self.process_object(field_value, False) return result
def extract_adress(obj): """Extract an indentifier for the given object: if the object contains an id, it returns the id, otherwise it returns the memory address of the given object.""" result = hex(id(obj)) try: if utils.is_novabase(obj): result = str(obj).split("at ")[1].split(">")[0] except: pass return result
def object_simplify(self, obj): """Convert this object to dictionnary that contains simplified values: every value is simplified according to the appropriate strategy.""" result = obj do_deep_simplification = False is_basic_type = False try: if hasattr(obj, "__dict__") or obj.__class__.__name__ == "dict": do_deep_simplification = True except: is_basic_type = True if do_deep_simplification and not is_basic_type: novabase_classname = str(obj.__class__.__name__) if novabase_classname == "LazyReference" or novabase_classname == "LazyValue": novabase_classname = obj.resolve_model_name() if isinstance(obj, dict) and "novabase_classname" in obj: novabase_classname = obj["novabase_classname"] # Initialize fields to iterate if hasattr(obj, "reload_default_values"): obj.reload_default_values() result = self.extract_complex_object(obj) if utils.is_novabase(obj): key = self.get_cache_key(obj) if not key in self.complex_cache: self.complex_cache[key] = result self.simple_cache[key] = self.novabase_simplify(obj, True) self.target_cache[key] = obj metadata_class_name = novabase_classname metadata_dict = { "_metadata_novabase_classname": metadata_class_name, "_pid": extract_adress(obj), "_rid": str(self.request_uuid) } self.complex_cache[key] = utils.merge_dicts( self.complex_cache[key], metadata_dict) self.simple_cache[key] = utils.merge_dicts( self.simple_cache[key], metadata_dict) result = self.complex_cache[key] return result
def process_object(self, obj, skip_reccursive_call=True): """Apply the best simplification strategy to the given object.""" from lib.rome.core.lazy import LazyValue should_skip = self.already_processed(obj) or skip_reccursive_call if utils.is_novabase(obj): if should_skip: result = self.novabase_simplify(obj) else: key = self.get_cache_key(obj) self.novabase_simplify(obj) result = self.complex_cache[key] elif obj.__class__.__name__ == "datetime": result = self.datetime_simplify(obj) elif obj.__class__.__name__ == "IPNetwork": result = self.ipnetwork_simplify(obj) else: result = obj return result
def construct_rows(models, criterions, hints, session=None, request_uuid=None, order_by=None): """This function constructs the rows that corresponds to the current orm. :return: a list of row, according to sqlalchemy expectation """ current_milli_time = lambda: int(round(time.time() * 1000)) metadata = {} part1_starttime = current_milli_time() if request_uuid is None: request_uuid = uuid.uuid1() else: request_uuid = request_uuid labels = [] columns = set([]) rows = [] model_set = extract_models(models) """ Get the fields of the join result """ for selectable in model_set: labels += [find_table_name(selectable._model)] if selectable._attributes == "*": try: selected_attributes = selectable._model._sa_class_manager except: traceback.print_exc() selected_attributes = selectable._model.class_._sa_class_manager pass else: selected_attributes = [selectable._attributes] for field in selected_attributes: attribute = None if has_attribute(models, "class_"): attribute = selectable._model.class_._sa_class_manager[ field].__str__() elif has_attribute(models, "_sa_class_manager"): attribute = selectable._model._sa_class_manager[field].__str__( ) if attribute is not None: columns.add(attribute) part2_starttime = current_milli_time() """ Loading objects (from database) """ list_results = [] for selectable in model_set: tablename = find_table_name(selectable._model) authorized_secondary_indexes = get_attribute(selectable._model, "_secondary_indexes", []) selected_hints = filter( lambda x: x.table_name == tablename and (x.attribute == "id" or x.attribute in authorized_secondary_indexes ), hints) reduced_hints = map(lambda x: (x.attribute, x.value), selected_hints) objects = get_objects(tablename, request_uuid=request_uuid, skip_loading=False, hints=reduced_hints) list_results += [objects] part3_starttime = current_milli_time() """ Building tuples """ building_tuples = join_building_tuples tuples = building_tuples(list_results, labels, criterions, hints, metadata=metadata, order_by=order_by) part4_starttime = current_milli_time() """ Filtering tuples (cartesian product) """ for product in tuples: if len(product) > 0: row = KeyedTuple(product, labels=labels) rows += [extract_sub_row(row, model_set, labels)] part5_starttime = current_milli_time() deconverter = get_decoder(request_uuid=request_uuid) """ Reordering tuples (+ selecting attributes) """ final_rows = [] showable_selection = [ x for x in models if (not x.is_hidden) or x._is_function ] part6_starttime = current_milli_time() """ Selecting attributes """ if any_selectable_is_function(models): final_row = [] for selection in showable_selection: if selection._is_function: value = selection._function._function(rows) final_row += [value] else: final_row += [None] final_row = map(lambda x: deconverter.desimplify(x), final_row) return [final_row] else: for row in rows: final_row = [] for selection in showable_selection: if selection._is_function: value = selection._function._function(rows) final_row += [value] else: current_table_name = find_table_name(selection._model) key = current_table_name if not is_novabase(row) and has_attribute(row, key): value = get_attribute(row, key) else: value = row if value is not None: if selection._attributes != "*": final_row += [ get_attribute(value, selection._attributes) ] else: final_row += [value] final_row = map( lambda x: wrap_with_lazy_value(x, request_uuid=request_uuid), final_row) if len(showable_selection) == 1: final_rows += final_row else: final_rows += [final_row] part7_starttime = current_milli_time() query_information = """{"building_query": %s, "loading_objects": %s, "building_tuples": %s, "filtering_tuples": %s, "reordering_columns": %s, "selecting_attributes": %s, "description": "%s", "timestamp": %i}""" % ( part2_starttime - part1_starttime, part3_starttime - part2_starttime, part4_starttime - part3_starttime, part5_starttime - part4_starttime, part6_starttime - part5_starttime, part7_starttime - part6_starttime, metadata["sql"] if "sql" in metadata else """{\\"models\\": \\"%s\\", \\"criterions\\": \\"%s\\"}""" % (models, criterions), current_milli_time()) logging.info(query_information) if file_logger_enabled: file_logger.info(query_information) return final_rows
def construct_rows(models, criterions, hints, session=None): """This function constructs the rows that corresponds to the current orm. :return: a list of row, according to sqlalchemy expectation """ current_milli_time = lambda: int(round(time.time() * 1000)) part1_starttime = current_milli_time() request_uuid = uuid.uuid1() labels = [] columns = set([]) rows = [] model_set = extract_models(models) # get the fields of the join result for selectable in model_set: labels += [find_table_name(selectable._model)] if selectable._attributes == "*": try: selected_attributes = selectable._model._sa_class_manager except: selected_attributes = selectable._model.class_._sa_class_manager pass else: selected_attributes = [selectable._attributes] for field in selected_attributes: attribute = None if has_attribute(models, "class_"): attribute = selectable._model.class_._sa_class_manager[field].__str__() elif has_attribute(models, "_sa_class_manager"): attribute = selectable._model._sa_class_manager[field].__str__() if attribute is not None: columns.add(attribute) part2_starttime = current_milli_time() # loading objects (from database) list_results = [] for selectable in model_set: tablename = find_table_name(selectable._model) authorized_secondary_indexes = get_attribute(selectable._model, "_secondary_indexes", []) selected_hints = filter(lambda x: x.table_name == tablename and (x.attribute == "id" or x.attribute in authorized_secondary_indexes), hints) reduced_hints = map(lambda x:(x.attribute, x.value), selected_hints) objects = get_objects(tablename, request_uuid=request_uuid, skip_loading=False, hints=reduced_hints) list_results += [objects] part3_starttime = current_milli_time() # construct the cartesian product # tuples = building_tuples(list_results, labels, criterions) tuples = building_tuples_experimental(list_results, labels, criterions, hints) part4_starttime = current_milli_time() # # filtering tuples (cartesian product) # # # aggregated_filters = {} # # for criterion in criterions: # # for each in # # criterion # # pass # indexed_rows = {} for product in tuples: if len(product) > 0: row = KeyedTuple(product, labels=labels) row_index_key = "%s" % (str(row)) if row_index_key in indexed_rows: continue all_criterions_satisfied = True for criterion in criterions: if not criterion.evaluate(row): all_criterions_satisfied = False if all_criterions_satisfied: indexed_rows[row_index_key] = True rows += [extract_sub_row(row, model_set)] part5_starttime = current_milli_time() deconverter = get_decoder(request_uuid=request_uuid) # reordering tuples (+ selecting attributes) final_rows = [] showable_selection = [x for x in models if (not x.is_hidden) or x._is_function] part6_starttime = current_milli_time() # selecting attributes if all_selectable_are_functions(models): final_row = [] for selection in showable_selection: value = selection._function._function(rows) final_row += [value] final_row = map(lambda x: deconverter.desimplify(x), final_row) return [final_row] else: for row in rows: final_row = [] for selection in showable_selection: if selection._is_function: value = selection._function._function(rows) final_row += [value] else: current_table_name = find_table_name(selection._model) key = current_table_name if not is_novabase(row) and has_attribute(row, key): value = get_attribute(row, key) else: value = row if value is not None: if selection._attributes != "*": final_row += [get_attribute(value, selection._attributes)] else: final_row += [value] # final_row = map(lambda x: deconverter.desimplify(x), final_row) final_row = map(lambda x: LazyValue(x, request_uuid), final_row) if len(showable_selection) == 1: final_rows += final_row else: final_rows += [final_row] part7_starttime = current_milli_time() query_information = """{"building_query": %s, "loading_objects": %s, "building_tuples": %s, "filtering_tuples": %s, "reordering_columns": %s, "selecting_attributes": %s, "description": "%s", "timestamp": %i}""" % ( part2_starttime - part1_starttime, part3_starttime - part2_starttime, part4_starttime - part3_starttime, part5_starttime - part4_starttime, part6_starttime - part5_starttime, part7_starttime - part6_starttime, """{\\"models\\": \\"%s\\", \\"criterions\\": \\"%s\\"}""" % (models, criterions), current_milli_time() ) logging.info(query_information) if file_logger_enabled: file_logger.info(query_information) return final_rows