def put(self): if len(self.tag) > 0: tag = OtherTag.all().filter("tag =", self.tag).filter("owned_by =", self.contact_ref.owned_by).get() # If tag name is not in DB it is added if not tag: tag = OtherTag(tag=self.tag, owned_by=self.contact_ref.owned_by) tag.put() else: tag = None try: self.entity.tag = tag self.entity.text = self.text self.entity.put() except AttributeError: # prepare database object for new person self.entity = Other(contact_ref=self.contact_ref, text=self.text, tag=tag) self.entity.put()
def prepare_list_of_other_tags(): """prepares a list of previously used tags in a data structure ready for the template use""" taglist = [] for tag in OtherTag.all(): taglist.append(tag.tag) return taglist
def post(self): """Function is called asynchronously to import data sets to the DB and delete existing data. """ login_user = LoginUser.get(self.request.get("login_user", None)) status = memcache.get('import_status') if not status: logging.critical("Failed to retrieve import status from memcache.") self.error(500) return data = memcache.get('import_data') if not data: logging.critical("Failed to retrieve import data from memcache.") self.error(500) return logging.info("Retrieved %d bytes for processing. user=%s" % (len(data),login_user.me.name) ) memcache.set('import_status', "Parsing import data.", time=10) format=self.request.get("format", None) if format == 'JSON': dbdump = json.loads(data) else: dbdump = yaml.load(data) # purge DB logging.info("Import task starts deleting data...") contact_entries = db.Query(Contact,keys_only=True) contact_entries.filter("owned_by =", login_user) count = 0 delete_contacts = [] for c in contact_entries: # delete all dependent data q_t = db.Query(Take2,keys_only=True) q_t.filter("contact_ref =", c) db.delete(q_t) q_i = db.Query(Search,keys_only=True) q_i.filter("contact_ref =", c) db.delete(q_i) count = count +1 memcache.set('import_status', "Deleting data: %d deleted." % (count), time=3) # remember for bulk delete except the one which is the login_user's Person if c != login_user.me: delete_contacts.append(c) db.delete(delete_contacts) logging.info("Import task deleted %d contact datasets" % (count)) # dictionary will be filled with a reference to the freshly created person # key using the former key as stored in the dbdump. Needed later for resolving # the owned by references. old_key_to_new_key = {} link_to_references = [] take2_entries = [] count = 0.0 for contact in dbdump: memcache.set('import_status', "Importing data: %3.0f%% done." % ((count/len(dbdump))*100.0), time=3) logging.debug("Import type: %s name: %s id: %s attic: %s" % (contact['type'], contact['name'] if 'name' in contact else '<no name>', contact['id'] if 'id' in contact else '<no id>', contact['attic'] if 'attic' in contact else '<no attic flag>')) if contact['type'] == "person": entry = Person(name=contact['name']) if 'lastname' in contact: entry.lastname = lastname=contact['lastname'] if 'birthday' in contact: year,month,day = contact['birthday'].split('-') entry.birthday = FuzzyDate(day=int(day),month=int(month),year=int(year)) if 'nickname' in contact: entry.nickname = contact['nickname'] if contact['type'] == "company": entry = Company(name=contact['name']) # importer owns all the data entry.owned_by = login_user if 'attic' in contact: entry.attic = contact['attic'] if 'timestamp' in contact: dt,us= contact['timestamp'].split(".") entry.timestamp = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S") entry.put() # remember the key from the imported file for later dependency resolve if 'key' in contact: old_key_to_new_key[contact['key']] = entry.key() count = count+1 # check for all take2 objects for classname in ['email','link','web','address','mobile','other']: if classname in contact: for m in contact[classname]: obj = None if classname == 'mobile': obj = Mobile(mobile=m['mobile'], contact_ref=entry) if classname == 'email': obj = Email(email=m['email'], contact_ref=entry) if classname == 'web': if not m['web'].startswith("http://"): m['web'] = 'http://'+m['web'] obj = Web(web=m['web'], contact_ref=entry) if classname == 'other': # look for existing tag in DB tag = OtherTag.all().filter("tag =", m['what']).get() if not tag: tag = OtherTag(tag=m['what']) tag.put() obj = Other(tag=tag, text=m['text'], contact_ref=entry) if classname == 'link': # save the link_to key from the imported data in the link_to # property for rater resolve link_to_references.append((entry.key(),m['link_to'])) if classname == 'address': obj = Address(adr=m['adr'], contact_ref=entry) if 'location_lat' in m and 'location_lon' in m: obj.location = db.GeoPt(lat=float(m['location_lat']),lon=float(m['location_lon'])) if 'landline_phone' in m: obj.landline_phone = m['landline_phone'] if 'country' in m and m['country'] != "": country = Country.all().filter("country =", m['country']).get() # If country name is not in DB it is added if not country: country = Country(country=m['country']) country.put() obj.country = country.key() if obj: # common fields if 'timestamp' in m: dt,us= m['timestamp'].split(".") obj.timestamp = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S") if 'attic' in m: obj.attic = m['attic'] take2_entries.append(obj) memcache.set('import_status', "Store dependent entries.", time=30) # # Resolve (if possible) the reference of the LoginUser to his/her own Person entry # for t2 in take2_entries: if t2.class_name() == "Email": if t2.email == login_user.user.email(): # throw away existing login_user Person login_user.me.delete() login_user.put() login_user.me = t2.contact_ref login_user.put() logging.info("Resolved LoginUsers Person: %s using email: %s" % (t2.contact_ref.name, t2.email)) # # Back references to people # for parent,child_old_key in link_to_references: # find child's new key key = old_key_to_new_key[child_old_key] # update child with back reference child = Contact.get(key) child.middleman_ref = parent child.put() # # Bulk store new entries # logging.info("Import task added %d contacts. Now store their %d dependent datasets" % (count,len(take2_entries))) db.put(take2_entries) logging.info("Import task done.") # make sure that all indices have to be re-built memcache.flush_all()