def merge(self, original_id, duplicate_id, replace=None, update=None, main=True): """ Merge a duplicate record into its original and remove the duplicate, updating all references in the database. @param original_id: the ID of the original record @param duplicate_id: the ID of the duplicate record @param replace: list fields names for which to replace the values in the original record with the values of the duplicate @param update: dict of {field:value} to update the final record @param main: internal indicator for recursive calls @status: work in progress @todo: de-duplicate components and link table entries @note: virtual references (i.e. non-SQL, without foreign key constraints) must be declared in the table configuration of the referenced table like: s3db.configure(tablename, referenced_by=[(tablename, fieldname)]) This does not apply for list:references which will be found automatically. @note: this method can only be run from master resources (in order to find all components). To merge component records, you have to re-define the component as a master resource. @note: CLI calls must db.commit() """ self.main = main db = current.db resource = self.resource table = resource.table tablename = resource.tablename # Check for master resource if resource.parent: self.raise_error("Must not merge from component", SyntaxError) # Check permissions auth = current.auth has_permission = auth.s3_has_permission permitted = has_permission("update", table, record_id = original_id) and \ has_permission("delete", table, record_id = duplicate_id) if not permitted: self.raise_error("Operation not permitted", auth.permission.error) # Load all models s3db = current.s3db if main: s3db.load_all_models() if db._lazy_tables: # Must roll out all lazy tables to detect dependencies for tn in db._LAZY_TABLES.keys(): db[tn] # Get the records original = None duplicate = None query = table._id.belongs([original_id, duplicate_id]) if "deleted" in table.fields: query &= (table.deleted != True) rows = db(query).select(table.ALL, limitby=(0, 2)) for row in rows: record_id = row[table._id] if str(record_id) == str(original_id): original = row original_id = row[table._id] elif str(record_id) == str(duplicate_id): duplicate = row duplicate_id = row[table._id] msg = "Record not found: %s.%s" if original is None: self.raise_error(msg % (tablename, original_id), KeyError) if duplicate is None: self.raise_error(msg % (tablename, duplicate_id), KeyError) # Find all single-components single = Storage() for alias in resource.components: component = resource.components[alias] if not component.multiple: single[component.tablename] = component # Is this a super-entity? is_super_entity = table._id.name != "id" and \ "instance_type" in table.fields # Find all references referenced_by = list(table._referenced_by) # Append virtual references virtual_references = s3db.get_config(tablename, "referenced_by") if virtual_references: referenced_by.extend(virtual_references) # Find and append list:references for t in db: for f in t: ftype = str(f.type) if ftype[:14] == "list:reference" and \ ftype[15:15+len(tablename)] == tablename: referenced_by.append((t._tablename, f.name)) update_record = self.update_record delete_record = self.delete_record fieldname = self.fieldname # Update all references define_resource = s3db.resource for referee in referenced_by: if isinstance(referee, Field): tn, fn = referee.tablename, referee.name else: tn, fn = referee se = s3db.get_config(tn, "super_entity") if is_super_entity and \ (isinstance(se, (list, tuple)) and tablename in se or \ se == tablename): # Skip instance types of this super-entity continue # Reference field must exist if tn not in db or fn not in db[tn].fields: continue rtable = db[tn] if tn in single: component = single[tn] if component.link is not None: component = component.link if fn == component.fkey: # Single component => must reduce to one record join = component.get_join() pkey = component.pkey lkey = component.lkey or component.fkey # Get the component records query = (table[pkey] == original[pkey]) & join osub = db(query).select(limitby=(0, 1)).first() query = (table[pkey] == duplicate[pkey]) & join dsub = db(query).select(limitby=(0, 1)).first() ctable = component.table if dsub is None: # No duplicate => skip this step continue elif not osub: # No original => re-link the duplicate dsub_id = dsub[ctable._id] data = {lkey: original[pkey]} success = update_record(ctable, dsub_id, dsub, data) elif component.linked is not None: # Duplicate link => remove it dsub_id = dsub[component.table._id] delete_record(ctable, dsub_id) else: # Two records => merge them osub_id = osub[component.table._id] dsub_id = dsub[component.table._id] cresource = define_resource(component.tablename) cresource.merge(osub_id, dsub_id, replace=replace, update=update, main=False) continue # Find the foreign key rfield = rtable[fn] ktablename, key, multiple = s3_get_foreign_key(rfield) if not ktablename: if str(rfield.type) == "integer": # Virtual reference key = table._id.name else: continue # Find the referencing records if multiple: query = rtable[fn].contains(duplicate[key]) else: query = rtable[fn] == duplicate[key] rows = db(query).select(rtable._id, rtable[fn]) # Update the referencing records for row in rows: if not multiple: data = {fn: original[key]} else: keys = [k for k in row[fn] if k != duplicate[key]] if original[key] not in keys: keys.append(original[key]) data = {fn: keys} update_record(rtable, row[rtable._id], row, data) # Merge super-entity records super_entities = resource.get_config("super_entity") if super_entities is not None: if not isinstance(super_entities, (list, tuple)): super_entities = [super_entities] for super_entity in super_entities: super_table = s3db.table(super_entity) if not super_table: continue superkey = super_table._id.name skey_o = original[superkey] if not skey_o: msg = "No %s found in %s.%s" % (superkey, tablename, original_id) current.log.warning(msg) s3db.update_super(table, original) skey_o = original[superkey] if not skey_o: continue skey_d = duplicate[superkey] if not skey_d: msg = "No %s found in %s.%s" % (superkey, tablename, duplicate_id) current.log.warning(msg) continue sresource = define_resource(super_entity) sresource.merge(skey_o, skey_d, replace=replace, update=update, main=False) # Merge and update original data data = Storage() if replace: for k in replace: fn = fieldname(k) if fn and fn in duplicate: data[fn] = duplicate[fn] if update: for k, v in update.items(): fn = fieldname(k) if fn in table.fields: data[fn] = v if len(data): r = None p = Storage([(fn, "__deduplicate_%s__" % fn) for fn in data if table[fn].unique and \ table[fn].type == "string" and \ data[fn] == duplicate[fn]]) if p: r = Storage([(fn, original[fn]) for fn in p]) update_record(table, duplicate_id, duplicate, p) update_record(table, original_id, original, data) if r: update_record(table, duplicate_id, duplicate, r) # Delete the duplicate if not is_super_entity: self.merge_realms(table, original, duplicate) delete_record(table, duplicate_id, replaced_by=original_id) # Success return True
def describe(self, field): """ Construct a field description for the schema @param field: a Field instance @return: the field description as JSON-serializable dict """ fieldtype = str(field.type) SUPPORTED_FIELD_TYPES = set(self.SUPPORTED_FIELD_TYPES) # Check if foreign key superkey = False reftype = None if fieldtype[:9] == "reference": s3db = current.s3db is_foreign_key = True # Get referenced table/field name ktablename, key = s3_get_foreign_key(field)[:2] # Get referenced table ktable = current.s3db.table(ktablename) if not ktable: return None if "instance_type" in ktable.fields: # Super-key tablename = str(field).split(".", 1)[0] supertables = s3db.get_config(tablename, "super_entity") if not supertables: supertables = set() elif not isinstance(supertables, (list, tuple)): supertables = [supertables] if ktablename in supertables and key == ktable._id.name: # This is the super-id of the instance table => skip return None else: # This is a super-entity reference fieldtype = "objectkey" # @todo: add instance types if limited in validator superkey = True reftype = (ktablename,) # []) else: # Regular foreign key # Store schema reference references = self._references if ktablename not in references: references[ktablename] = set() else: is_foreign_key = False ktablename = None # Check that field type is supported if fieldtype in SUPPORTED_FIELD_TYPES or is_foreign_key: supported = True else: supported = False if not supported: return None # Create a field description description = {"type": fieldtype, "label": str(field.label), } # Add type for super-entity references (=object keys) if reftype: description["reftype"] = reftype # Add field options to description options = self.get_options(field, lookup=ktablename) if options: description["options"] = options # Add default value to description default = self.get_default(field, lookup=ktablename, superkey=superkey) if default: description["default"] = default # Add readable/writable settings if False (True is assumed) if not field.readable: description["readable"] = False if not field.writable: description["writable"] = False if hasattr(field.widget, "mobile"): description["widget"] = field.widget.mobile # Add required flag if True (False is assumed) if self.is_required(field): description["required"] = True # @todo: add tooltip to description # @todo: if field.represent is a base-class S3Represent # (i.e. no custom lookup, no custom represent), # and its field list is not just "name" => pass # that field list as description["represent"] # Add field's mobile settings to description (Dynamic Fields) msettings = hasattr(field, "s3_settings") and \ field.s3_settings and \ field.s3_settings.get("mobile") if msettings: description["settings"] = msettings return description
def describe(self, field): """ Construct a field description for the schema @param field: a Field instance @return: the field description as JSON-serializable dict """ fieldtype = str(field.type) SUPPORTED_FIELD_TYPES = set(self.SUPPORTED_FIELD_TYPES) # Check if foreign key if fieldtype[:9] == "reference": # Skip super-entity references until supported by mobile client key = s3_get_foreign_key(field)[1] if key and key != "id": return None is_foreign_key = True # Store schema reference lookup = fieldtype[10:].split(".")[0] references = self._references if lookup not in references: references[lookup] = set() else: is_foreign_key = False lookup = None # Check that field type is supported if fieldtype in SUPPORTED_FIELD_TYPES or is_foreign_key: supported = True else: supported = False if not supported: return None # Create a field description description = { "type": fieldtype, "label": str(field.label), } # Add field settings to description settings = self.settings(field) if settings: description["settings"] = settings # Add field options to description options = self.get_options(field, lookup=lookup) if options: description["options"] = options # Add default value to description default = self.get_default(field, lookup=lookup) if default: description["default"] = default # @todo: add tooltip to description return description
def merge(self, original_id, duplicate_id, replace=None, update=None, main=True): """ Merge a duplicate record into its original and remove the duplicate, updating all references in the database. @param original_id: the ID of the original record @param duplicate_id: the ID of the duplicate record @param replace: list fields names for which to replace the values in the original record with the values of the duplicate @param update: dict of {field:value} to update the final record @param main: internal indicator for recursive calls @status: work in progress @todo: de-duplicate components and link table entries @note: virtual references (i.e. non-SQL, without foreign key constraints) must be declared in the table configuration of the referenced table like: s3db.configure(tablename, referenced_by=[(tablename, fieldname)]) This does not apply for list:references which will be found automatically. @note: this method can only be run from master resources (in order to find all components). To merge component records, you have to re-define the component as a master resource. @note: CLI calls must db.commit() """ self.main = main db = current.db resource = self.resource table = resource.table tablename = resource.tablename # Check for master resource if resource.parent: self.raise_error("Must not merge from component", SyntaxError) # Check permissions auth = current.auth has_permission = auth.s3_has_permission permitted = has_permission("update", table, record_id = original_id) and \ has_permission("delete", table, record_id = duplicate_id) if not permitted: self.raise_error("Operation not permitted", auth.permission.error) # Load all models s3db = current.s3db if main: s3db.load_all_models() if db._lazy_tables: # Must roll out all lazy tables to detect dependencies for tn in db._LAZY_TABLES.keys(): db[tn] # Get the records original = None duplicate = None query = table._id.belongs([original_id, duplicate_id]) if "deleted" in table.fields: query &= (table.deleted != True) rows = db(query).select(table.ALL, limitby=(0, 2)) for row in rows: record_id = row[table._id] if str(record_id) == str(original_id): original = row original_id = row[table._id] elif str(record_id) == str(duplicate_id): duplicate = row duplicate_id = row[table._id] msg = "Record not found: %s.%s" if original is None: self.raise_error(msg % (tablename, original_id), KeyError) if duplicate is None: self.raise_error(msg % (tablename, duplicate_id), KeyError) # Find all single-components of this resource # (so that their records can be merged rather than just re-linked) # NB this is only reliable as far as the relevant component # declarations have actually happened before calling merge: # Where that happens in another controller (or customise_*) # than the one merge is being run from, those components may # be treated as multiple instead! single = {} hooks = s3db.get_hooks(table)[1] if hooks: for alias, hook in hooks.items(): if hook.multiple: continue component = resource.components.get(alias) if not component: # E.g. module disabled continue ctablename = component.tablename if ctablename in single: single[ctablename].append(component) else: single[ctablename] = [component] # Is this a super-entity? is_super_entity = table._id.name != "id" and \ "instance_type" in table.fields # Find all references referenced_by = list(table._referenced_by) # Append virtual references virtual_references = s3db.get_config(tablename, "referenced_by") if virtual_references: referenced_by.extend(virtual_references) # Find and append list:references for t in db: for f in t: ftype = str(f.type) if ftype[:14] == "list:reference" and \ ftype[15:15+len(tablename)] == tablename: referenced_by.append((t._tablename, f.name)) update_record = self.update_record delete_record = self.delete_record fieldname = self.fieldname # Update all references define_resource = s3db.resource for referee in referenced_by: if isinstance(referee, Field): tn, fn = referee.tablename, referee.name else: tn, fn = referee se = s3db.get_config(tn, "super_entity") if is_super_entity and \ (isinstance(se, (list, tuple)) and tablename in se or \ se == tablename): # Skip instance types of this super-entity continue # Reference field must exist if tn not in db or fn not in db[tn].fields: continue rtable = db[tn] if tn in single: for component in single[tn]: if component.link is not None: component = component.link if fn == component.fkey: # Single component => must reduce to one record join = component.get_join() pkey = component.pkey lkey = component.lkey or component.fkey # Get the component records query = (table[pkey] == original[pkey]) & join osub = db(query).select(limitby=(0, 1)).first() query = (table[pkey] == duplicate[pkey]) & join dsub = db(query).select(limitby=(0, 1)).first() ctable = component.table ctable_id = ctable._id if dsub is None: # No duplicate => skip this step continue elif not osub: # No original => re-link the duplicate dsub_id = dsub[ctable_id] data = {lkey: original[pkey]} update_record(ctable, dsub_id, dsub, data) elif component.linked is not None: # Duplicate link => remove it dsub_id = dsub[ctable_id] delete_record(ctable, dsub_id) else: # Two records => merge them osub_id = osub[ctable_id] dsub_id = dsub[ctable_id] cresource = define_resource(component.tablename) cresource.merge(osub_id, dsub_id, replace = replace, update = update, main = False, ) # Find the foreign key rfield = rtable[fn] ktablename, key, multiple = s3_get_foreign_key(rfield) if not ktablename: if str(rfield.type) == "integer": # Virtual reference key = table._id.name else: continue # Find the referencing records if multiple: query = rtable[fn].contains(duplicate[key]) else: query = rtable[fn] == duplicate[key] rows = db(query).select(rtable._id, rtable[fn]) # Update the referencing records for row in rows: if not multiple: data = {fn:original[key]} else: keys = [k for k in row[fn] if k != duplicate[key]] if original[key] not in keys: keys.append(original[key]) data = {fn:keys} update_record(rtable, row[rtable._id], row, data) # Merge super-entity records super_entities = resource.get_config("super_entity") if super_entities is not None: if not isinstance(super_entities, (list, tuple)): super_entities = [super_entities] for super_entity in super_entities: super_table = s3db.table(super_entity) if not super_table: continue superkey = super_table._id.name skey_o = original[superkey] if not skey_o: msg = "No %s found in %s.%s" % (superkey, tablename, original_id) current.log.warning(msg) s3db.update_super(table, original) skey_o = original[superkey] if not skey_o: continue skey_d = duplicate[superkey] if not skey_d: msg = "No %s found in %s.%s" % (superkey, tablename, duplicate_id) current.log.warning(msg) continue sresource = define_resource(super_entity) sresource.merge(skey_o, skey_d, replace=replace, update=update, main=False) # Merge and update original data data = Storage() if replace: for k in replace: fn = fieldname(k) if fn and fn in duplicate: data[fn] = duplicate[fn] if update: for k, v in update.items(): fn = fieldname(k) if fn in table.fields: data[fn] = v if len(data): r = None p = Storage([(fn, "__deduplicate_%s__" % fn) for fn in data if table[fn].unique and \ table[fn].type == "string" and \ data[fn] == duplicate[fn]]) if p: r = Storage([(fn, original[fn]) for fn in p]) update_record(table, duplicate_id, duplicate, p) update_record(table, original_id, original, data) if r: update_record(table, duplicate_id, duplicate, r) # Delete the duplicate if not is_super_entity: self.merge_realms(table, original, duplicate) delete_record(table, duplicate_id, replaced_by=original_id) # Success return True
def describe(self, field): """ Construct a field description for the schema @param field: a Field instance @return: the field description as JSON-serializable dict """ fieldtype = str(field.type) SUPPORTED_FIELD_TYPES = set(self.SUPPORTED_FIELD_TYPES) # Check if foreign key if fieldtype[:9] == "reference": # Skip super-entity references until supported by mobile client key = s3_get_foreign_key(field)[1] if key and key != "id": return None is_foreign_key = True # Store schema reference lookup = fieldtype[10:].split(".")[0] references = self._references if lookup not in references: references[lookup] = set() else: is_foreign_key = False lookup = None # Check that field type is supported if fieldtype in SUPPORTED_FIELD_TYPES or is_foreign_key: supported = True else: supported = False if not supported: return None # Create a field description description = {"type": fieldtype, "label": str(field.label), } # Add field settings to description settings = self.settings(field) if settings: description["settings"] = settings # Add field options to description options = self.get_options(field, lookup=lookup) if options: description["options"] = options # Add default value to description default = self.get_default(field, lookup=lookup) if default: description["default"] = default # @todo: add tooltip to description return description