def get_login_user(): """Find the account which represents the currently logged in and authenticated user If user is not authenticated, returns None """ authenticated_user = users.get_current_user() if not authenticated_user: return None q_me = LoginUser.all() q_me.filter('user =', authenticated_user) me = q_me.fetch(3) if len(me) > 0: if len(me) > 1: logging.critical ("more than one person with google account: %s [%s]" % (authenticated_user.nickname(),authenticated_user.user_id())) me = me[0] else: logging.warning ("No Person registered for login_user %s" % authenticated_user) logging.warning ("Trying email as user_id instead...") me = LoginUser.all().filter("user_id =", authenticated_user.email()).get() if not me: logging.warning ("Email not recognized either.") return None logging.warning ("Email worked.") # Check if login_user has a vaild person attached try: if me.me: logging.debug("Found login user person: %s" % (me.me.name)) except db.ReferencePropertyResolveError: logging.critical ("Login user: %s has an invalid reference to Person" % (str(me.key()))) return None return me
def get(self): """Function is called by cron to build a contact index Call with a key to build index for this entity. """ if not users.is_current_user_admin(): logging.critical("UpdateIndex called by non-admin") self.error(500) return key = self.request.get("key", None) logging.info("Update index tables.") if key: con = Contact.get(Key(key)) if con: update_index(con) # update dependant take2 entries for t2 in Take2.all().filter("contact_ref =", con): update_index(t2) # update parent login_user user = LoginUser.all().filter("me =", con).get() if user: update_index(user) return else: t2 = Take2.get(Key(key)) if t2: update_index(t2) return logging.info("Could not find key: %s" % (key)) return # Go through the tables which contribute to the index for table in [LoginUser,Contact,Address]: batch = [] for obj in table.all(): res = update_index(obj, batch=True) if res: batch.extend(res) # bulk db operation db.put(batch) logging.info("%d updates." % (len(batch))) self.response.headers['Content-Type'] = "text/plain" self.response.out.write("/index done.")
def initial_user_setup(auth_user, person): """Runs in a single transaction and sets up the LoginUser and its representation as a Person""" login_user = LoginUser(user=auth_user, user_id=auth_user.email(), location=db.GeoPt(0,0)) login_user.put() person.owned_by = login_user person.parent = login_user person.put() # save the reference to person in login_user login_user.me = person.entity login_user.put()
def post(self): """Function is called asynchronously to import data sets to the DB and delete existing data. """ login_user = LoginUser.get(self.request.get("login_user", None)) status = memcache.get('import_status') if not status: logging.critical("Failed to retrieve import status from memcache.") self.error(500) return data = memcache.get('import_data') if not data: logging.critical("Failed to retrieve import data from memcache.") self.error(500) return logging.info("Retrieved %d bytes for processing. user=%s" % (len(data),login_user.me.name) ) memcache.set('import_status', "Parsing import data.", time=10) format=self.request.get("format", None) if format == 'JSON': dbdump = json.loads(data) else: dbdump = yaml.load(data) # purge DB logging.info("Import task starts deleting data...") contact_entries = db.Query(Contact,keys_only=True) contact_entries.filter("owned_by =", login_user) count = 0 delete_contacts = [] for c in contact_entries: # delete all dependent data q_t = db.Query(Take2,keys_only=True) q_t.filter("contact_ref =", c) db.delete(q_t) q_i = db.Query(Search,keys_only=True) q_i.filter("contact_ref =", c) db.delete(q_i) count = count +1 memcache.set('import_status', "Deleting data: %d deleted." % (count), time=3) # remember for bulk delete except the one which is the login_user's Person if c != login_user.me: delete_contacts.append(c) db.delete(delete_contacts) logging.info("Import task deleted %d contact datasets" % (count)) # dictionary will be filled with a reference to the freshly created person # key using the former key as stored in the dbdump. Needed later for resolving # the owned by references. old_key_to_new_key = {} link_to_references = [] take2_entries = [] count = 0.0 for contact in dbdump: memcache.set('import_status', "Importing data: %3.0f%% done." % ((count/len(dbdump))*100.0), time=3) logging.debug("Import type: %s name: %s id: %s attic: %s" % (contact['type'], contact['name'] if 'name' in contact else '<no name>', contact['id'] if 'id' in contact else '<no id>', contact['attic'] if 'attic' in contact else '<no attic flag>')) if contact['type'] == "person": entry = Person(name=contact['name']) if 'lastname' in contact: entry.lastname = lastname=contact['lastname'] if 'birthday' in contact: year,month,day = contact['birthday'].split('-') entry.birthday = FuzzyDate(day=int(day),month=int(month),year=int(year)) if 'nickname' in contact: entry.nickname = contact['nickname'] if contact['type'] == "company": entry = Company(name=contact['name']) # importer owns all the data entry.owned_by = login_user if 'attic' in contact: entry.attic = contact['attic'] if 'timestamp' in contact: dt,us= contact['timestamp'].split(".") entry.timestamp = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S") entry.put() # remember the key from the imported file for later dependency resolve if 'key' in contact: old_key_to_new_key[contact['key']] = entry.key() count = count+1 # check for all take2 objects for classname in ['email','link','web','address','mobile','other']: if classname in contact: for m in contact[classname]: obj = None if classname == 'mobile': obj = Mobile(mobile=m['mobile'], contact_ref=entry) if classname == 'email': obj = Email(email=m['email'], contact_ref=entry) if classname == 'web': if not m['web'].startswith("http://"): m['web'] = 'http://'+m['web'] obj = Web(web=m['web'], contact_ref=entry) if classname == 'other': # look for existing tag in DB tag = OtherTag.all().filter("tag =", m['what']).get() if not tag: tag = OtherTag(tag=m['what']) tag.put() obj = Other(tag=tag, text=m['text'], contact_ref=entry) if classname == 'link': # save the link_to key from the imported data in the link_to # property for rater resolve link_to_references.append((entry.key(),m['link_to'])) if classname == 'address': obj = Address(adr=m['adr'], contact_ref=entry) if 'location_lat' in m and 'location_lon' in m: obj.location = db.GeoPt(lat=float(m['location_lat']),lon=float(m['location_lon'])) if 'landline_phone' in m: obj.landline_phone = m['landline_phone'] if 'country' in m and m['country'] != "": country = Country.all().filter("country =", m['country']).get() # If country name is not in DB it is added if not country: country = Country(country=m['country']) country.put() obj.country = country.key() if obj: # common fields if 'timestamp' in m: dt,us= m['timestamp'].split(".") obj.timestamp = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S") if 'attic' in m: obj.attic = m['attic'] take2_entries.append(obj) memcache.set('import_status', "Store dependent entries.", time=30) # # Resolve (if possible) the reference of the LoginUser to his/her own Person entry # for t2 in take2_entries: if t2.class_name() == "Email": if t2.email == login_user.user.email(): # throw away existing login_user Person login_user.me.delete() login_user.put() login_user.me = t2.contact_ref login_user.put() logging.info("Resolved LoginUsers Person: %s using email: %s" % (t2.contact_ref.name, t2.email)) # # Back references to people # for parent,child_old_key in link_to_references: # find child's new key key = old_key_to_new_key[child_old_key] # update child with back reference child = Contact.get(key) child.middleman_ref = parent child.put() # # Bulk store new entries # logging.info("Import task added %d contacts. Now store their %d dependent datasets" % (count,len(take2_entries))) db.put(take2_entries) logging.info("Import task done.") # make sure that all indices have to be re-built memcache.flush_all()
def get(self): if not users.is_current_user_admin(): logging.critical("UpdateIndex called by non-admin") self.error(500) return fix = True if self.request.get("fix", "False") == "True" else False # look for LoginUser with invalid Person attached logging.info("Check LoginUser") err = False for obj in LoginUser.all(): try: if not obj.me: logging.critical("LoginUser %d has no Person attached" % ((obj.key().id()))) err = True except db.ReferencePropertyResolveError: logging.critical("LoginUser %d has invalid Person reference" % ((obj.key().id()))) err = True if err: # check for dependent datasets count = Contact.all().filter("owned_by =", obj).count() logging.critical("LoginUser %d has %d dependant datasets" % (obj.key().id(),count)) if fix: obj.delete() logging.info("%d deleted" % obj.key().id()) err = False logging.info("Check Contact") err = False for obj in Contact.all(): try: if not obj.owned_by: logging.critical("Contact '%s' %d has no reference to owner" % (obj.name,obj.key().id())) err = True except db.ReferencePropertyResolveError: logging.critical("Contact '%s' %d has invalid reference to owner" % (obj.name,obj.key().id())) count = LoginUser.all().filter("me =", obj).count() if count: logging.critical("... but owner has reference!") err = True if err: # check for dependent datasets count = Take2.all().filter("contact_ref =", obj).count() logging.critical("Contact '%s' has %d dependent datasets" % (obj.name, count)) if fix: obj.delete() logging.info("%d deleted" % obj.key().id()) err = False logging.info("Check Take2") err = False for obj in Take2.all(): try: if not obj.contact_ref: logging.critical("Take2 has no reference to owner %s" % (obj.key().id())) err = True except db.ReferencePropertyResolveError: logging.critical("Take2 has invalid reference to owner %s" % (obj.key().id())) err = True if err: if fix: obj.delete() logging.info("%d deleted" % obj.key().id()) # location in address shall be set to default if obj.class_name() == 'Address' and not obj.location: logging.error("Address has null location %s. Fixed." % (obj.key().id())) obj.location=db.GeoPt(lon=0.0, lat=0.0) obj.put() err = False logging.info("Check SearchIndex") err = False for obj in SearchIndex.all(): try: if not obj.contact_ref: logging.critical("SearchIndex %d has no reference to owner" % (obj.key().id())) err = True except db.ReferencePropertyResolveError: logging.critical("SearchIndex %d has invalid reference to owner" % (obj.key().id())) err = True if err: if fix: obj.delete() logging.info("%d deleted" % obj.key().id()) err = False logging.info("Check GeoIndex") err = False for obj in GeoIndex.all(): try: if not obj.contact_ref: logging.critical("GeoIndex %d has no reference to owner" % (obj.key().id())) err = True except db.ReferencePropertyResolveError: logging.critical("GeoIndex %d has invalid reference to owner" % (obj.key().id())) err = True if err: if fix: obj.delete() logging.info("%d deleted" % obj.key().id()) err = False self.response.headers['Content-Type'] = "text/plain" self.response.out.write("/fix done.")