def create_props_list (self, ce, gcid_tag=None): """ce has to be an object of type ContactEntry. This routine forms and returns an array of tuples that can be passed to SetProps() of MAPI gcid_tag is the named property tag used to store the gcid. FIXME: This routine needs to be more data driven so that adding additional fields becomes a breeze, and editing one place will impact both the outlook side as well as the google side. Currently this routine is, in some sense, an inverse of gc_wrapper.py:create_contact_entry() routine... """ # There are a few message properties that are sort of 'expected' to be # set. Most are set automatically by the store provider or the # transport provider. However some have to be set by the client; so, # let's do the honors. More on this here: # http://msdn.microsoft.com/en-us/library/cc839866(v=office.12).aspx # http://msdn.microsoft.com/en-us/library/cc839595(v=office.12).aspx # props = [(mapitags.PR_MESSAGE_CLASS, "IPM.Contact")] if gcid_tag is None: gcid_tag = self.cf.prop_tags.valu('GOUT_PR_GCID') # if ce.name: # if ce.name.full_name: # props.append((mapitags.PR_DISPLAY_NAME, # ce.name.full_name.text)) # # We need to work harder to set the File As member, without # # which... the shiny new entry will look a bit odd. # fileas_prop_tag = self.cf.prop_tags.valu('GOUT_PR_FILE_AS') # props.append((fileas_prop_tag, ce.name.full_name.text)) # if ce.name.family_name: # props.append((mapitags.PR_SURNAME, # ce.name.family_name.text)) # if ce.name.given_name: # props.append((mapitags.PR_GIVEN_NAME, # ce.name.given_name.text)) # if ce.name.name_prefix: # props.append((mapitags.PR_DISPLAY_NAME_PREFIX, # ce.name.name_prefix.text)) # if ce.name.name_suffix: # # It is not clear where to map this. So let's leave this for # # now # pass # # Notes field # if ce.content and ce.content.text: # props.append((mapitags.PR_BODY, ce.content.text)) # A reference to the contact entry's ID in Google's database. Recall # that this ID is not constant. Everytime it is edited it changes - # this is Google's way of ensuring there is no crossfire across apps if ce.link and gcid_tag: gcid = utils.get_link_rel(ce.link, 'edit') props.append((gcid_tag, gcid))
def prep_sync_lists (self, destid, sl, updated_min=None, cnt=0): """See the documentation in folder.Folder""" pname = sl.get_pname() conf = self.get_config() pdb1id = conf.get_profile_db1(pname) oldi = conf.get_itemids(pname) newi = self.get_itemids(pname, destid) kss = newi.keys() for x, y in oldi.iteritems(): if not x in kss and not y in kss: logging.debug('Del Google Contact: %s:%s', x, y) if pdb1id == self.get_dbid(): sl.add_del(x, y) else: sl.add_del(y,x) logging.info('Querying Google for status of Contact Entries...') stag = conf.make_sync_label(pname, destid) ## FIXME: The following commented out code appears very fishy. I am ## not able to recall why these two have to be used in sorted order. I ## am pretty sure there was some sense behind it, but as of now db1 ## and db2 are not really being used; so the code works even without ## this "sorted" behaviour... Hm, this really should go, but I am ## being conservative here and leving the stuff commented out so we ## can come back to it later if required. # ## Sort the DBIds so dest1 has the 'lower' ID # db1 = self.get_db().get_dbid() # if db1 > destid: # db2 = db1 # db1 = destid # else: # db2 = destid if not updated_min: updated_min = conf.get_last_sync_stop(pname) # FIXME: We are fetching the group feed a second time. Ideally we # shoul dbe able to everything we want with the feed already fetched # above. This has a performance implication for groups with a large # number of items. Will fix this once functionality is validated. feed = self._get_group_feed(updated_min=updated_min, showdeleted='false') logging.info('Response recieved from Google. Processing...') if not feed.entry: logging.info('No entries in feed.') for x in kss: sl.add_unmod(x) return skip = 0 etag_cnt = 0 for i, entry in enumerate(feed.entry): gcid = utils.get_link_rel(entry.link, 'edit') gcid = GCContact.normalize_gcid(gcid) olid = get_udp_by_key(entry.user_defined_field, stag) etag = entry.etag epd = entry.deleted name = None if entry.name: if entry.name.full_name: name = entry.name.full_name.text elif entry.name.family_name: name = entry.name.family_name.text elif entry.name.given_name: name = entry.name.given_name.text if epd: if olid: pass # We will trust our own delete logic... # sl.add_del(gcid) else: # Deleted before it got synched. Get on with life skip += 1 continue else: if olid: logging.debug('Modified Google Contact: %20s %s', name, gcid) sl.add_mod(gcid, olid) else: logging.debug('New Google Contact: %20s %s', name, gcid) sl.add_new(gcid) if etag: sl.add_etag(gcid, etag) etag_cnt += 1 else: sl.add_entry(gcid) for x in kss: if not x in sl.get_news() and not x in sl.get_mods(): sl.add_unmod(x) logging.debug('Total Contacts : %5d', len(newi)) logging.debug('num with etags : %5d', etag_cnt) logging.debug('num del bef sync : %5d', skip)
bid, err_str, entry.id) else: ## We could just print a more detailed error for all ## cases. Should do some time FIXME. logging.error('Sync failed for bid %s: %s: %s', bid, err_str, entry.id) else: if op == 'query': con = entry # We could build and return array for all cases, but # why waste memory... cons.append(con) elif op in ['insert', 'update']: con = self.get_con(bid) orig = self.get_orig(bid) gcid = utils.get_link_rel(entry.link, 'edit') gcid = GCContact.normalize_gcid(gcid) orig.update_sync_tags(self.sync_tag, gcid) cons.append(orig) t = None if op == 'insert': t = 'created' elif op == 'update': t = 'updated' if t: logging.info('Successfully %s gmail entry for %30s (%s)', t, con.get_name(), orig.get_itemid()) return success, cons
def process_batch_response (self, resp): """resp is the response feed obtained from a batch operation to google. This routine will walk through the batch response entries, and make note in the outlook database for succesful sync, or handle errors appropriately. Returns a tuple (success, cons) where success is a boolean to know if all the entries had successful operation, and an array of contact items from the batch operation""" op = self.get_operation() cons = [] success = True for entry in resp.entry: bid = entry.batch_id.text if entry.batch_id else None if not entry.batch_status: # There is something seriously wrong with this request. self.handle_interrupted_feed(str(resp)) success = False continue code = int(entry.batch_status.code) reason = entry.batch_status.reason if code != SYNC_OK and code != SYNC_CREATED: # FIXME this code path needs to be tested properly err = sync_status_str(code) err_str = '' if err is None else ('Code: %s' % err) err_str = 'Reason: %s. %s' % (reason, err_str) success = False if op == 'insert' or op == 'update': try: name = self.get_con(bid).get_disp_name() except Exception, e: name = "WTH!" logging.error('Upload to Google failed for: %s: %s', name, err_str) elif op == 'Writeback olid': logging.error('Could not complete sync for: %s: %s: %s', bid, err_str, entry.id) else: ## We could just print a more detailed error for all ## cases. Should do some time FIXME. logging.error('Sync failed for bid %s: %s: %s', bid, err_str, entry.id) else: if op == 'query': con = entry # We could build and return array for all cases, but # why waste memory... cons.append(con) elif op in ['insert', 'update']: con = self.get_con(bid) orig = self.get_orig(bid) gcid = utils.get_link_rel(entry.link, 'edit') gcid = GCContact.normalize_gcid(gcid) orig.update_sync_tags(self.sync_tag, gcid) cons.append(orig) t = None if op == 'insert': t = 'created' elif op == 'update': t = 'updated' if t: logging.info('Successfully %s gmail entry for %30s (%s)', t, con.get_disp_name(), orig.get_itemid())
def prep_sync_lists (self, destid, sl, updated_min=None, cnt=0): """See the documentation in folder.Folder""" pname = sl.get_pname() conf = self.get_config() logging.info('Querying Google for status of Contact Entries...') stag = conf.make_sync_label(pname, destid) ## Sort the DBIds so dest1 has the 'lower' ID db1 = self.get_db().get_dbid() if db1 > destid: db2 = db1 db1 = destid else: db2 = destid if not updated_min: updated_min = conf.get_last_sync_stop(pname) feed = self._get_group_feed(updated_min=updated_min, showdeleted='false') logging.info('Response recieved from Google. Processing...') if not feed.entry: logging.info('No entries in feed.') return skip = 0 etag_cnt = 0 for i, entry in enumerate(feed.entry): gcid = utils.get_link_rel(entry.link, 'edit') gcid = GCContact.normalize_gcid(gcid) olid = get_udp_by_key(entry.user_defined_field, stag) etag = entry.etag epd = entry.deleted name = None if entry.name.full_name: name = entry.name.full_name.text elif entry.name.family_name: name = entry.name.family_name.text elif entry.name.given_name: name = entry.name.given_name.text if epd: if olid: sl.add_del(gcid, olid) else: # Deleted before it got synched. Get on with life skip += 1 continue else: if olid: logging.debug('Modified Google Contact: %20s %s', name, gcid) sl.add_mod(gcid, olid) else: logging.debug('New Google Contact: %20s %s', name, gcid) sl.add_new(gcid) if etag: sl.add_etag(gcid, etag) etag_cnt += 1 else: sl.add_entry(gcid) logging.debug('num with etags : %5d', etag_cnt) logging.debug('num del bef sync : %5d', skip) logging.info('Note: Stats for Google Contacts are only for the ' 'changeset since the last synk. In particular the total ' 'count is NOT the total number of contacts in your folder!')
def process_batch_response(self, resp): """resp is the response feed obtained from a batch operation to google. This routine will walk through the batch response entries, and make note in the outlook database for succesful sync, or handle errors appropriately. Returns a tuple (success, cons) where success is a boolean to know if all the entries had successful operation, and an array of contact items from the batch operation""" op = self.get_operation() cons = [] success = True for entry in resp.entry: bid = entry.batch_id.text if entry.batch_id else None if not entry.batch_status: # There is something seriously wrong with this request. self.handle_interrupted_feed(str(resp)) success = False continue code = int(entry.batch_status.code) reason = entry.batch_status.reason if code != SYNC_OK and code != SYNC_CREATED: # FIXME this code path needs to be tested properly err = sync_status_str(code) err_str = '' if err is None else ('Code: %s' % err) err_str = 'Reason: %s. %s' % (reason, err_str) success = False if op == 'insert' or op == 'update': try: name = self.get_con(bid).get_disp_name() except Exception, e: name = "WTH!" logging.error('Upload to Google failed for: %s: %s', name, err_str) elif op == 'Writeback olid': logging.error('Could not complete sync for: %s: %s: %s', bid, err_str, entry.id) else: ## We could just print a more detailed error for all ## cases. Should do some time FIXME. logging.error('Sync failed for bid %s: %s: %s', bid, err_str, entry.id) else: if op == 'query': con = entry # We could build and return array for all cases, but # why waste memory... cons.append(con) elif op in ['insert', 'update']: con = self.get_con(bid) orig = self.get_orig(bid) gcid = utils.get_link_rel(entry.link, 'edit') gcid = GCContact.normalize_gcid(gcid) orig.update_sync_tags(self.sync_tag, gcid) cons.append(orig) t = None if op == 'insert': t = 'created' elif op == 'update': t = 'updated' if t: logging.info( 'Successfully %s gmail entry for %30s (%s)', t, con.get_disp_name(), orig.get_itemid())