def _snarf_names_from_parse_res (self, pr): n = pr['firstname'] if n and n != 'nil': self.set_firstname(chompq(n)) n = pr['lastname'] if n and n != 'nil': self.set_lastname(chompq(n)) try: affix = pr['affix'] if affix and affix != 'nil': str_re = self.get_store().get_str_re() affix = re.findall(str_re, affix) self.set_suffix(chompq(affix[0])) if len(affix) > 1: aff = demjson.encode([chompq(x) for x in affix[1:]]) self.add_custom('affix', aff) except KeyError, e: ## FIXME: There should be a better way to handle the format ## differences.... for now we'll put up with the hacks affix = self.get_custom('affix') if affix: affix = demjson.decode(affix) if len(affix) > 0: self.set_suffix(affix[0]) affix = affix[1:] if len(affix) > 0: aff = demjson.encode(affix) aff = string.replace(aff, '"', r'\"') self.add_custom('affix', aff)
def _snarf_names_from_parse_res (self, pr): n = pr['firstname'] if n and n != 'nil': self.set_firstname(unesc_str(chompq(n))) n = pr['lastname'] if n and n != 'nil': self.set_lastname(unesc_str(chompq(n))) try: affix = pr['affix'] if affix and affix != 'nil': str_re = self.get_store().get_str_re() affix = re.findall(str_re, affix) self.set_suffix(unesc_str(chompq(affix[0]))) if len(affix) > 1: aff = demjson.encode([unesc_str(chompq(x)) for x in affix[1:]]) ## FIXME: Do we need to escape the quotes in json encoding ## as in the except clause? self.add_custom('affix', aff) except KeyError, e: ## FIXME: There should be a better way to handle the format ## differences.... for now we'll put up with the hacks affix = self.get_custom('affix') if affix: affix = demjson.decode(affix) if len(affix) > 0: self.set_suffix(affix[0]) affix = affix[1:] if len(affix) > 0: aff = demjson.encode(affix) self.add_custom('affix', aff)
def _snarf_names_from_parse_res(self, pr): n = pr['firstname'] if n and n != 'nil': self.set_firstname(unesc_str(chompq(n))) n = pr['lastname'] if n and n != 'nil': self.set_lastname(unesc_str(chompq(n))) try: affix = pr['affix'] if affix and affix != 'nil': str_re = self.get_store().get_str_re() affix = re.findall(str_re, affix) self.set_suffix(unesc_str(chompq(affix[0]))) if len(affix) > 1: aff = demjson.encode( [unesc_str(chompq(x)) for x in affix[1:]]) ## FIXME: Do we need to escape the quotes in json encoding ## as in the except clause? self.add_custom('affix', aff) except KeyError, e: ## FIXME: There should be a better way to handle the format ## differences.... for now we'll put up with the hacks affix = self.get_custom('affix') if affix: affix = demjson.decode(affix) if len(affix) > 0: self.set_suffix(affix[0]) affix = affix[1:] if len(affix) > 0: aff = demjson.encode(affix) self.add_custom('affix', aff)
def _snarf_names_from_parse_res (self, pr): n = pr['firstname'] if n and n != 'nil': self.set_firstname(chompq(n)) n = pr['lastname'] if n and n != 'nil': self.set_lastname(chompq(n)) # FIXME: Just what the hell is an 'Affix'? Just use the first one and # ditch the rest. affix = pr['affix'] if affix and affix != 'nil': self.set_suffix(chompq(affix[0]))
def _get_company_as_string (self): comp1 = esc_str(self.get_company()) if not comp1: return 'nil' comp = copy.deepcopy(self.get_custom('company')) ver = self.get_store().get_file_format() ## FIXME: This is an egregious design violation, as noted earlier. We ## should move all such version specific conversions to pimdb_bb.el if ver == '6': if comp and len(comp) > 0: comp = demjson.decode(comp) comp = [chompq(x) for x in comp] else: comp = [] comp.insert(0, comp1) return unchompq('; '.join(comp)) elif ver == '7': if comp and len(comp) > 0: comp = demjson.decode(comp) comp.insert(0, unchompq(comp1)) else: comp = [unchompq(comp1)] return ('(' + ' '.join(comp) + ')')
def _snarf_emails_from_parse_res(self, pr): ems = pr['emails'] if ems: str_re = self.get_store().get_str_re() ems = re.findall(str_re, ems) ems = [chompq(x) for x in ems] domains = self.get_email_domains() for em in ems: if em == 'nil': continue home, work, other = classify_email_addr(em, domains) if home: self.add_email_home(em) elif work: self.add_email_work(em) elif other: self.add_email_other(em) else: self.add_email_work(em) if not self.get_email_prim(): self.set_email_prim(em)
def _get_company_as_string(self): comp1 = esc_str(self.get_company()) if not comp1: return 'nil' comp = copy.deepcopy(self.get_custom('company')) ver = self.get_store().get_file_format() ## FIXME: This is an egregious design violation, as noted earlier. We ## should move all such version specific conversions to pimdb_bb.el if ver == '6': if comp and len(comp) > 0: comp = demjson.decode(comp) comp = [chompq(x) for x in comp] else: comp = [] comp.insert(0, comp1) return unchompq('; '.join(comp)) elif ver == '7': if comp and len(comp) > 0: comp = demjson.decode(comp) comp.insert(0, unchompq(comp1)) else: comp = [unchompq(comp1)] return ('(' + ' '.join(comp) + ')')
def _snarf_emails_from_parse_res (self, pr): ems = pr['emails'] if ems: str_re = self.get_store().get_str_re() ems = re.findall(str_re, ems) ems = [chompq(x) for x in ems] domains = self.get_email_domains() for em in ems: if em == 'nil': continue home, work, other = classify_email_addr(em, domains) if home: self.add_email_home(em) elif work: self.add_email_work(em) elif other: self.add_email_other(em) else: self.add_email_work(em) if not self.get_email_prim(): self.set_email_prim(em)
def _snarf_emails_from_parse_res (self, pr): ems = pr['emails'] if ems: str_re = self.get_store().get_str_re() ems = re.findall(str_re, ems) ems = [chompq(x) for x in ems] domains = self.get_email_domains() for em in ems: if em == 'nil': continue home, work, other = self._classify_email_addr(em, domains) ## Note that the following implementation means if the same ## domain is specified in more than one category, it ends up ## being copied to every category. In effect this means when ## this is synched to google contacts, say, the GC entry will ## have the same email address twice for the record if home: self.add_email_home(em) elif work: self.add_email_work(em) elif other: self.add_email_other(em) else: self.add_email_work(em) if not self.get_email_prim(): self.set_email_prim(em)
def _snarf_emails_from_parse_res(self, pr): ems = pr['emails'] if ems: str_re = self.get_store().get_str_re() ems = re.findall(str_re, ems) ems = [chompq(x) for x in ems] domains = self.get_email_domains() for em in ems: if em == 'nil': continue home, work, other = self._classify_email_addr(em, domains) ## Note that the following implementation means if the same ## domain is specified in more than one category, it ends up ## being copied to every category. In effect this means when ## this is synched to google contacts, say, the GC entry will ## have the same email address twice for the record if home: self.add_email_home(em) elif work: self.add_email_work(em) elif other: self.add_email_other(em) else: self.add_email_work(em) if not self.get_email_prim(): self.set_email_prim(em)
def _snarf_names_from_parse_res(self, pr): n = pr['firstname'] if n and n != 'nil': self.set_firstname(chompq(n)) n = pr['lastname'] if n and n != 'nil': self.set_lastname(chompq(n)) # FIXME: Just what the hell is an 'Affix'? Just use the first one and # ditch the rest. try: affix = pr['affix'] if affix and affix != 'nil': self.set_suffix(chompq(affix[0])) except KeyError, e: ## FIXME: There should be a better way to handle the format ## differences.... for now we'll put up with the hacks self.set_suffix(None)
def _snarf_created_updated_from_parse_res(self, pr): """In file format ver 9 some fields were made first class citizens in the schema, which were parts of the notes field in earlier versions. Such fields need to be read and processed separately.""" ## For earlier versions of the file format this would have ## been handled already as part of the notes section. bbdb_ver = int(self.get_store().get_file_format()) if bbdb_ver < 9: return # FIXME: We may also want to read and use the native bbdbid... created_on = pr['createdon'] if created_on and created_on != 'nil': self.set_created(unesc_str(chompq(created_on))) last_updated = pr['lastupdated'] if last_updated and last_updated != 'nil': self.set_updated(unesc_str(chompq(last_updated)))
def _snarf_names_from_parse_res (self, pr): n = pr['firstname'] if n and n != 'nil': self.set_firstname(chompq(n)) n = pr['lastname'] if n and n != 'nil': self.set_lastname(chompq(n)) # FIXME: Just what the hell is an 'Affix'? Just use the first one and # ditch the rest. try: affix = pr['affix'] if affix and affix != 'nil': self.set_suffix(chompq(affix[0])) except KeyError, e: ## FIXME: There should be a better way to handle the format ## differences.... for now we'll put up with the hacks self.set_suffix(None)
def _snarf_created_updated_from_parse_res (self, pr): """In file format ver 9 some fields were made first class citizens in the schema, which were parts of the notes field in earlier versions. Such fields need to be read and processed separately.""" ## For earlier versions of the file format this would have ## been handled already as part of the notes section. bbdb_ver = int(self.get_store().get_file_format()) if bbdb_ver < 9: return # FIXME: We may also want to read and use the native bbdbid... created_on = pr['createdon'] if created_on and created_on != 'nil': self.set_created(unesc_str(chompq(created_on))) last_updated = pr['lastupdated'] if last_updated and last_updated != 'nil': self.set_updated(unesc_str(chompq(last_updated)))
def _snarf_phones_from_parse_res(self, pr): ph_re = self.get_store().get_ph_re() phs = re.findall(ph_re, pr["phones"]) if pr["phones"] else None if phs: for ph in phs: res = re.search(ph_re, "[" + ph[0] + "]") if res: resg = res.groupdict() if resg["structured"]: phnum = "+1 " + resg["structured"] else: phnum = chompq(resg["unstructured"]) label = chompq(resg["phlabel"]) self._classify_and_add_phone(label, (label, phnum)) else: logging.debug("Could not parse phone: %s", ph[0])
def _snarf_phones_from_parse_res (self, pr): ph_re = self.get_store().get_ph_re() phs = re.findall(ph_re, pr['phones']) if pr['phones'] else None if phs: for ph in phs: res = re.search(ph_re, '[' + ph[0] + ']') if res: resg = res.groupdict() if resg['structured']: phnum = '+1 ' + resg['structured'] else: phnum = chompq(resg['unstructured']) label = chompq(resg['phlabel']) self._classify_and_add_phone(label, (label, phnum)) else: logging.debug('Could not parse phone: %s', ph[0])
def _snarf_phones_from_parse_res(self, pr): ph_re = self.get_store().get_ph_re() phs = re.findall(ph_re, pr['phones']) if pr['phones'] else None if phs: for ph in phs: res = re.search(ph_re, '[' + ph[0] + ']') if res: resg = res.groupdict() if resg['structured']: phnum = '+1 ' + resg['structured'] else: phnum = chompq(resg['unstructured']) label = chompq(resg['phlabel']) self._classify_and_add_phone(label, (label, phnum)) else: logging.debug('Could not parse phone: %s', ph[0])
def _snarf_company_from_parse_res (self, pr): cs = pr['company'] if cs and cs != 'nil': ## The first company goes into the Company field, the rest we will ## push into the custom field (as aa json encoded string) str_re = self.get_store().get_str_re() cs = re.findall(str_re, cs) self.set_company(chompq(cs[0])) rest = cs[1:] if rest and len(rest) > 0: self.add_custom('company', demjson.encode(rest))
def _snarf_company_from_parse_res (self, pr): cs = pr['company'] if cs and cs != 'nil': ## The first company goes into the Company field, the rest we will ## push into the custom field (as aa json encoded string) str_re = self.get_store().get_str_re() cs = re.findall(str_re, cs) ## FIXME: This is an egregious hack. The right way to do this is ## to have field specific parsing routine in the BBPIMDB just like ## we have the regexes there. for now, let's move on with ugly ## hacks. ver = self.get_store().get_file_format() if ver == '6': cs = chompq(cs[0]).split('; ') self.set_company(unesc_str(chompq(cs[0]))) rest = cs[1:] if rest and len(rest) > 0: self.add_custom('company', demjson.encode(rest))
def _snarf_company_from_parse_res(self, pr): cs = pr['company'] if cs and cs != 'nil': ## The first company goes into the Company field, the rest we will ## push into the custom field (as aa json encoded string) str_re = self.get_store().get_str_re() cs = re.findall(str_re, cs) ## FIXME: This is an egregious hack. The right way to do this is ## to have field specific parsing routine in the BBPIMDB just like ## we have the regexes there. for now, let's move on with ugly ## hacks. ver = self.get_store().get_file_format() if ver == '6': cs = chompq(cs[0]).split('; ') self.set_company(unesc_str(chompq(cs[0]))) rest = cs[1:] if rest and len(rest) > 0: self.add_custom('company', demjson.encode(rest))
def _snarf_aka_from_parse_res (self, pr): aka = pr['aka'] if aka and aka != 'nil': str_re = self.get_store().get_str_re() aka = re.findall(str_re, aka) nick = aka[0] rest = aka[1:] if nick: self.set_nickname(unesc_str(chompq(nick))) if rest and len(rest) > 0: ## Note that 'rest' is an array, and it will not be possible ## to serialize it when sending to Google or saving to Outlook ## etc. So let's just encode it in json format - our goto ## solution for such problems. self.add_custom('aka', demjson.encode(rest))
def _snarf_aka_from_parse_res(self, pr): aka = pr['aka'] if aka and aka != 'nil': str_re = self.get_store().get_str_re() aka = re.findall(str_re, aka) nick = aka[0] rest = aka[1:] if nick: self.set_nickname(unesc_str(chompq(nick))) if rest and len(rest) > 0: ## Note that 'rest' is an array, and it will not be possible ## to serialize it when sending to Google or saving to Outlook ## etc. So let's just encode it in json format - our goto ## solution for such problems. self.add_custom('aka', demjson.encode(rest))
def _snarf_postal_from_parse_res(self, pr): adr_re = self.get_store().get_adr_re() str_re = self.get_store().get_str_re() addrs = re.findall(adr_re, pr['addrs']) for i, addr in enumerate(addrs): label, val = addr[:2] add = '[' + label + ' ' + val + ']' res = re.search(adr_re, add) if res: addict = { 'street': None, 'city': None, 'state': None, 'country': None, 'zip': None, } fields = res.groupdict() streets = fields['streets'] sts = re.findall(str_re, streets) sts = map(unesc_str, [chompq(x) for x in sts]) if sts: addict.update({'street': '\n'.join(sts)}) city = fields['city'] if city: addict.update({'city': unesc_str(chompq(city))}) state = fields['state'] if state: addict.update({'state': unesc_str(chompq(state))}) country = fields['country'] if country: addict.update({'country': unesc_str(chompq(country))}) pin = fields['zip'] if pin: addict.update({'zip': unesc_str(chompq(pin))}) self.add_postal(chompq(label), addict) if i == 0: self.set_postal_prim_label(label) else: logging.error('bb:snarf_postal(): Huh? No match for add %s.', add)
def _snarf_postal_from_parse_res (self, pr): adr_re = self.get_store().get_adr_re() str_re = self.get_store().get_str_re() addrs = re.findall(adr_re, pr['addrs']) for i, addr in enumerate(addrs): label, val = addr[:2] add = '[' + label + ' ' + val + ']' res = re.search(adr_re, add) if res: addict = {'street' : None, 'city' : None, 'state' : None, 'country' : None, 'zip' : None,} fields = res.groupdict() streets = fields['streets'] sts = re.findall(str_re, streets) sts = map(unesc_str, [chompq(x) for x in sts]) if sts: addict.update({'street' : '\n'.join(sts)}) city = fields['city'] if city: addict.update({'city' : unesc_str(chompq(city))}) state = fields['state'] if state: addict.update({'state' : unesc_str(chompq(state))}) country = fields['country'] if country: addict.update({'country' : unesc_str(chompq(country))}) pin = fields['zip'] if pin: addict.update({'zip' : unesc_str(chompq(pin))}) self.add_postal(chompq(label), addict) if i == 0: self.set_postal_prim_label(label) else: logging.error('bb:snarf_postal(): Huh? No match for add %s.', add)
def _snarf_postal_from_parse_res(self, pr): adr_re = self.get_store().get_adr_re() str_re = self.get_store().get_str_re() addrs = re.findall(adr_re, pr["addrs"]) for i, addr in enumerate(addrs): label, val = addr[:2] add = "[" + label + " " + val + "]" res = re.search(adr_re, add) if res: addict = {"street": None, "city": None, "state": None, "country": None, "zip": None} fields = res.groupdict() streets = fields["streets"] sts = re.findall(str_re, streets) sts = map(unesc_str, [chompq(x) for x in sts]) if sts: addict.update({"street": "\n".join(sts)}) city = fields["city"] if city: addict.update({"city": unesc_str(chompq(city))}) state = fields["state"] if state: addict.update({"state": unesc_str(chompq(state))}) country = fields["country"] if country: addict.update({"country": unesc_str(chompq(country))}) pin = fields["zip"] if pin: addict.update({"zip": unesc_str(chompq(pin))}) self.add_postal(chompq(label), addict) if i == 0: self.set_postal_prim_label(label) else: logging.error("bb:snarf_postal(): Huh? No match for add %s.", add)
def _snarf_notes_from_parse_res (self, pr): """Parse the BBDB Notes entry; this contains most of the good stuff... including sync tags and stuff.""" noted = self.get_notes_map() if not noted: logging.error('Error in Config file. No notes_map field for bb') return stag_re = self.get_store().get_sync_tag_re() note_re = self.get_store().get_note_re() notes = re.findall(note_re, pr['notes']) custom = {} self.set_bbdb_folder(None) # logging.debug('bb:snfpr:stag_re: %s', stag_re) # keys = [note[0] for note in notes] # logging.debug('bb:snfpr:Keys: %s', keys) for note in notes: (key, val) = note[:2] key = key.rstrip() val = unesc_str(chompq(val)) if key == noted['created']: self.set_created(val) elif key == noted['updated']: self.set_updated(val) elif key == noted['itemid']: self.set_itemid(val) elif key == noted['prefix']: self.set_prefix(val) elif key == noted['gender']: self.set_gender(val) elif key == noted['title']: self.set_title(val) elif key == noted['dept']: self.set_dept(val) elif re.search(noted['ims'], key): self._add_im(noted['ims'], key, val) elif key == noted['notes']: self.add_notes(val) elif key == noted['birthday']: if self._is_valid_date(val, noted['birthday']): self.set_birthday(val) elif key == noted['anniv']: if self._is_valid_date(val, noted['anniv']): self.set_anniv(val) elif re.search(stag_re, key): self.update_sync_tags(key.rstrip(), val) elif re.search(noted['web_home_re'], key): self.add_web_home(val) elif re.search(noted['web_work_re'], key): self.add_web_work(val) elif re.search(noted['middle_name'], key): self.set_middlename(val) elif re.search('affix', key): affix = demjson.decode(val) if len(affix) > 0: self.set_suffix(affix[0]) if len(affix) > 1: custom.update({key : demjson.encode(affix[1:])}) elif re.search(noted['folder'], key): self.set_bbdb_folder(val) else: ## The rest of the stuff go into the 'Custom' field... custom.update({key : val}) if len(custom.keys()) > 0: self.update_custom(custom)
def _snarf_notes_from_parse_res(self, pr): """Parse the BBDB Notes entry; this contains most of the good stuff... including sync tags and stuff.""" noted = self.get_notes_map() if not noted: logging.error('Error in Config file. No notes_map field for bb') return stag_re = self.get_store().get_sync_tag_re() note_re = self.get_store().get_note_re() notes = re.findall(note_re, pr['notes']) custom = {} self.set_bbdb_folder(None) # logging.debug('bb:snfpr:stag_re: %s', stag_re) # keys = [note[0] for note in notes] # logging.debug('bb:snfpr:Keys: %s', keys) for note in notes: (key, val) = note[:2] key = key.rstrip() val = unesc_str(chompq(val)) if key == noted['created']: self.set_created(val) elif key == noted['updated']: self.set_updated(val) elif key == noted['itemid']: self.set_itemid(val) elif key == noted['prefix']: self.set_prefix(val) elif key == noted['gender']: self.set_gender(val) elif key == noted['title']: self.set_title(val) elif key == noted['dept']: self.set_dept(val) elif re.search(noted['ims'], key): self._add_im(noted['ims'], key, val) elif key == noted['notes']: self.add_notes(val) elif key == noted['birthday']: if self._is_valid_date(val, noted['birthday']): self.set_birthday(val) elif key == noted['anniv']: if self._is_valid_date(val, noted['anniv']): self.set_anniv(val) elif re.search(stag_re, key): self.update_sync_tags(key.rstrip(), val) elif re.search(noted['web_home_re'], key): self.add_web_home(val) elif re.search(noted['web_work_re'], key): self.add_web_work(val) elif re.search(noted['middle_name'], key): self.set_middlename(val) elif re.search('affix', key): affix = demjson.decode(val) if len(affix) > 0: self.set_suffix(affix[0]) if len(affix) > 1: custom.update({key: demjson.encode(affix[1:])}) elif re.search(noted['folder'], key): self.set_bbdb_folder(val) else: ## The rest of the stuff go into the 'Custom' field... custom.update({key: val}) if len(custom.keys()) > 0: self.update_custom(custom)