def add_folder_to_provider(session, store_eid, folder_eid, folder_name): section = session.OpenProfileSection(pbGlobalProfileSectionGuid, None, 0) props = section.GetProps([ PR_ZC_CONTACT_STORE_ENTRYIDS, PR_ZC_CONTACT_FOLDER_ENTRYIDS, PR_ZC_CONTACT_FOLDER_NAMES_W ], 0) if PROP_TYPE(props[0].ulPropTag) == PT_ERROR: props[0].Value = [store_eid] else: props[0].Value.append(store_eid) props[0].ulPropTag = PR_ZC_CONTACT_STORE_ENTRYIDS if PROP_TYPE(props[1].ulPropTag) == PT_ERROR: props[1].Value = [folder_eid] else: props[1].Value.append(folder_eid) props[1].ulPropTag = PR_ZC_CONTACT_FOLDER_ENTRYIDS if PROP_TYPE(props[2].ulPropTag) == PT_ERROR: props[2].Value = [folder_name] else: props[2].Value.append(folder_name) props[2].ulPropTag = PR_ZC_CONTACT_FOLDER_NAMES_W section.SetProps(props)
def __init__(self, parent_mapiobj, mapiobj): # XXX rethink attributes, names.. add guidname..? self._parent_mapiobj = parent_mapiobj #: Property tag, e.g. 0x37001f for PR_SUBJECT self.proptag = mapiobj.ulPropTag if PROP_TYPE( mapiobj.ulPropTag ) == PT_ERROR and mapiobj.Value == MAPI_E_NOT_ENOUGH_MEMORY: if PROP_ID( self.proptag) == PROP_ID(PR_BODY_W): # avoid slow guessing self.proptag = PR_BODY_W mapiobj = SPropDelayedValue(parent_mapiobj, self.proptag) elif PROP_ID(self.proptag) in (PROP_ID(PR_RTF_COMPRESSED), PROP_ID(PR_HTML)): self.proptag = PROP_TAG(PT_BINARY, PROP_ID(self.proptag)) mapiobj = SPropDelayedValue(parent_mapiobj, self.proptag) else: # XXX possible to use above trick to infer all proptags? for proptype in (PT_BINARY, PT_UNICODE): # XXX slow, incomplete? proptag = (mapiobj.ulPropTag & 0xffff0000) | proptype try: HrGetOneProp( parent_mapiobj, proptag ) # XXX: Unicode issue?? calls GetProps([proptag], 0) self.proptag = proptag # XXX isn't it strange we can get here except MAPIErrorNotEnoughMemory: mapiobj = SPropDelayedValue(parent_mapiobj, proptag) self.proptag = proptag break except MAPIErrorNotFound: pass self.id_ = self.proptag >> 16 self.mapiobj = mapiobj self._value = None self.idname = REV_TAG.get(self.proptag) self.type_ = PROP_TYPE(self.proptag) self.typename = REV_TYPE.get(self.type_) self.named = False self.kind = None self.kindname = None self.guid = None self.name = None self.namespace = None if self.id_ >= 0x8000: # possible named prop try: lpname = self._parent_mapiobj.GetNamesFromIDs([self.proptag], None, 0)[0] if lpname: self.guid = bin2hex(lpname.guid) self.namespace = GUID_NAMESPACE.get(lpname.guid) self.name = lpname.id self.kind = lpname.kind self.kindname = 'MNID_STRING' if lpname.kind == MNID_STRING else 'MNID_ID' self.named = True except MAPIErrorNoSupport: # XXX user.props()? pass
def assert_check_columns(columns, ulTableFlags, description): nWrongType = 0 for ulPropTag in columns: if ((ulTableFlags & MAPI_UNICODE) and PROP_TYPE(ulPropTag) == PT_STRING8): nWrongType += 1 elif ((ulTableFlags & MAPI_UNICODE) == 0 and PROP_TYPE(ulPropTag) == PT_UNICODE): nWrongType += 1 assert nWrongType == 0, description % nWrongType
def __init__(self, parent_mapiobj, mapiobj): self._parent_mapiobj = parent_mapiobj #: MAPI proptag, for example 0x37001f for PR_SUBJECT_W self.proptag = mapiobj.ulPropTag if (PROP_TYPE(mapiobj.ulPropTag) == PT_ERROR and \ mapiobj.Value == MAPI_E_NOT_ENOUGH_MEMORY): # avoid slow guessing if PROP_ID(self.proptag) == PROP_ID(PR_BODY_W): self.proptag = PR_BODY_W mapiobj = SPropDelayedValue(parent_mapiobj, self.proptag) elif PROP_ID(self.proptag) in \ (PROP_ID(PR_RTF_COMPRESSED), PROP_ID(PR_HTML)): self.proptag = CHANGE_PROP_TYPE(self.proptag, PT_BINARY) mapiobj = SPropDelayedValue(parent_mapiobj, self.proptag) else: # TODO possible to use above trick to infer all proptags? # TODO slow, incomplete? for proptype in (PT_BINARY, PT_UNICODE): proptag = (mapiobj.ulPropTag & 0xffff0000) | proptype try: # TODO: Unicode issue?? calls GetProps([proptag], 0) HrGetOneProp(parent_mapiobj, proptag) # TODO how did we end up here, why is this possible self.proptag = proptag except MAPIErrorNotEnoughMemory: mapiobj = SPropDelayedValue(parent_mapiobj, proptag) self.proptag = proptag break except MAPIErrorNotFound: pass self.mapiobj = mapiobj self._value = None self.__lpname = None
def __init__(self, parent_mapiobj, mapiobj): self._parent_mapiobj = parent_mapiobj self.proptag = mapiobj.ulPropTag if PROP_TYPE( mapiobj.ulPropTag ) == PT_ERROR and mapiobj.Value == MAPI_E_NOT_ENOUGH_MEMORY: if PROP_ID( self.proptag) == PROP_ID(PR_BODY_W): # avoid slow guessing self.proptag = PR_BODY_W mapiobj = SPropDelayedValue(parent_mapiobj, self.proptag) elif PROP_ID(self.proptag) in (PROP_ID(PR_RTF_COMPRESSED), PROP_ID(PR_HTML)): self.proptag = CHANGE_PROP_TYPE(self.proptag, PT_BINARY) mapiobj = SPropDelayedValue(parent_mapiobj, self.proptag) else: # XXX possible to use above trick to infer all proptags? for proptype in (PT_BINARY, PT_UNICODE): # XXX slow, incomplete? proptag = (mapiobj.ulPropTag & 0xffff0000) | proptype try: HrGetOneProp( parent_mapiobj, proptag ) # XXX: Unicode issue?? calls GetProps([proptag], 0) self.proptag = proptag # XXX isn't it strange we can get here except MAPIErrorNotEnoughMemory: mapiobj = SPropDelayedValue(parent_mapiobj, proptag) self.proptag = proptag break except MAPIErrorNotFound: pass self.mapiobj = mapiobj self._value = None self.__lpname = None
def _get_fast(self, proptag, default=None, must_exist=False, capped=False): # in cache prop = self._cache.get(proptag) if prop is not None: proptype = PROP_TYPE(prop.proptag) value = prop.value if proptype == PT_ERROR and value == MAPI_E_NOT_FOUND: return default # mapi table cells are limited to 255 characters/bytes # TODO check other types if capped or not (proptype in (PT_UNICODE, PT_BINARY) and len(value) >= 255): return value # fallback to (slow) lookup try: return self.prop(proptag).value except NotFoundError: if must_exist: raise else: return default
def _proptag_to_name(proptag, store, proptype=False): lpname = store.mapiobj.GetNamesFromIDs([proptag], None, 0)[0] namespace = GUID_NAMESPACE.get(lpname.guid) name = lpname.id if proptype: type_ = REV_TYPE.get(PROP_TYPE(proptag)) return u'%s:%s:%s' % (namespace, name, type_) return u'%s:%s' % (namespace, name)
def bestbody(mapiobj): # apparently standardized method for determining original message type! tag = PR_NULL props = mapiobj.GetProps( [PR_BODY_W, PR_HTML, PR_RTF_COMPRESSED, PR_RTF_IN_SYNC], 0) if (props[3].ulPropTag != PR_RTF_IN_SYNC): # TODO why.. return tag # MAPI_E_NOT_ENOUGH_MEMORY indicates the property exists, # but has to be streamed if((props[0].ulPropTag == PR_BODY_W or \ (PROP_TYPE(props[0].ulPropTag) == PT_ERROR and \ props[0].Value == MAPI_E_NOT_ENOUGH_MEMORY)) and \ (PROP_TYPE(props[1].ulPropTag) == PT_ERROR and \ props[1].Value == MAPI_E_NOT_FOUND) and \ (PROP_TYPE(props[2].ulPropTag) == PT_ERROR and \ props[2].Value == MAPI_E_NOT_FOUND)): tag = PR_BODY_W # TODO why not just check MAPI_E_NOT_FOUND..? elif((props[1].ulPropTag == PR_HTML or \ (PROP_TYPE(props[1].ulPropTag) == PT_ERROR and \ props[1].Value == MAPI_E_NOT_ENOUGH_MEMORY)) and \ (PROP_TYPE(props[0].ulPropTag) == PT_ERROR and \ props[0].Value == MAPI_E_NOT_ENOUGH_MEMORY) and \ (PROP_TYPE(props[2].ulPropTag) == PT_ERROR and \ props[2].Value == MAPI_E_NOT_ENOUGH_MEMORY) and \ not props[3].Value): tag = PR_HTML elif((props[2].ulPropTag == PR_RTF_COMPRESSED or \ (PROP_TYPE(props[2].ulPropTag) == PT_ERROR and \ props[2].Value == MAPI_E_NOT_ENOUGH_MEMORY)) and (PROP_TYPE(props[0].ulPropTag) == PT_ERROR and \ props[0].Value == MAPI_E_NOT_ENOUGH_MEMORY) and (PROP_TYPE(props[1].ulPropTag) == PT_ERROR and \ props[1].Value == MAPI_E_NOT_FOUND) and \ props[3].Value): tag = PR_RTF_COMPRESSED return tag
def bestbody( mapiobj ): # XXX we may want to use the swigged version in libcommon, once available # apparently standardized method for determining original message type! tag = PR_NULL props = mapiobj.GetProps( [PR_BODY_W, PR_HTML, PR_RTF_COMPRESSED, PR_RTF_IN_SYNC], 0) if (props[3].ulPropTag != PR_RTF_IN_SYNC): # XXX why.. return tag # MAPI_E_NOT_ENOUGH_MEMORY indicates the property exists, but has to be streamed if ((props[0].ulPropTag == PR_BODY_W or (PROP_TYPE(props[0].ulPropTag) == PT_ERROR and props[0].Value == MAPI_E_NOT_ENOUGH_MEMORY)) and (PROP_TYPE(props[1].ulPropTag) == PT_ERROR and props[1].Value == MAPI_E_NOT_FOUND) and (PROP_TYPE(props[2].ulPropTag) == PT_ERROR and props[2].Value == MAPI_E_NOT_FOUND)): tag = PR_BODY_W # XXX why not just check MAPI_E_NOT_FOUND..? elif ((props[1].ulPropTag == PR_HTML or (PROP_TYPE(props[1].ulPropTag) == PT_ERROR and props[1].Value == MAPI_E_NOT_ENOUGH_MEMORY)) and (PROP_TYPE(props[0].ulPropTag) == PT_ERROR and props[0].Value == MAPI_E_NOT_ENOUGH_MEMORY) and (PROP_TYPE(props[2].ulPropTag) == PT_ERROR and props[2].Value == MAPI_E_NOT_ENOUGH_MEMORY) and not props[3].Value): tag = PR_HTML elif ((props[2].ulPropTag == PR_RTF_COMPRESSED or (PROP_TYPE(props[2].ulPropTag) == PT_ERROR and props[2].Value == MAPI_E_NOT_ENOUGH_MEMORY)) and (PROP_TYPE(props[0].ulPropTag) == PT_ERROR and props[0].Value == MAPI_E_NOT_ENOUGH_MEMORY) and (PROP_TYPE(props[1].ulPropTag) == PT_ERROR and props[1].Value == MAPI_E_NOT_FOUND) and props[3].Value): tag = PR_RTF_COMPRESSED return tag
def props(mapiobj, namespace=None): proptags = mapiobj.GetPropList(MAPI_UNICODE) sprops = mapiobj.GetProps(proptags, MAPI_UNICODE) sprops = [s for s in sprops if not \ (PROP_TYPE(s.ulPropTag) == PT_ERROR and s.Value == MAPI_E_NOT_FOUND)] props = [Property(mapiobj, sprop) for sprop in sprops] def prop_key(prop): # sort identically across servers if prop.named: return (prop.guid, prop.kind, prop.name) else: return ('', prop.proptag) for p in sorted(props, key=prop_key): if not namespace or p.namespace == namespace: yield p
def create_prop(self, mapiobj, proptag, value=None, proptype=None): if isinstance(proptag, int) or \ (isinstance(proptag, str) and ':' not in proptag): if isinstance(proptag, str): proptag2 = getattr(MAPI.Tags, proptag) else: proptag2 = proptag proptype2 = proptype or PROP_TYPE(proptag) else: # named property proptag2, proptype2, _, _ = \ _name_to_proptag(proptag, mapiobj, proptype) if proptype2 is None: # TODO exception too general? raise Error('Missing type to create named property') if value is None: if proptype2 in (PT_STRING8, PT_UNICODE): value = '' elif proptype2 == PT_BINARY: value = b'' elif proptype2 == PT_SYSTIME: value = unixtime(0) elif proptype2 & MV_FLAG: value = [] else: value = 0 else: if proptype2 == PT_SYSTIME: if value.tzinfo is None: value = _timezone._to_utc(value, _timezone.LOCAL) else: value = value.astimezone(_timezone.UTC) value = unixtime(calendar.timegm(value.utctimetuple())) # handle invalid type versus value. # For example proptype=PT_UNICODE and value=True try: mapiobj.SetProps([SPropValue(proptag2, value)]) _utils._save(mapiobj) except TypeError: raise Error('Could not create property, type and value did not match') return prop(self, mapiobj, proptag, proptype=proptype2)
def create_prop(self, mapiobj, proptag, value=None, proptype=None): # XXX selfie if _is_int(proptag) or \ (_is_str(proptag) and ':' not in proptag): if _is_str(proptag): proptag2 = getattr(MAPI.Tags, proptag) else: proptag2 = proptag proptype2 = proptype or PROP_TYPE(proptag) else: # named property proptag2, proptype2, _, _ = _name_to_proptag(proptag, mapiobj, proptype) if proptype2 is None: raise Error('Missing type to create named property' ) # XXX exception too general? if value is None: if proptype2 in (PT_STRING8, PT_UNICODE): value = u'' elif proptype2 == PT_BINARY: value = b'' elif proptype2 == PT_SYSTIME: value = unixtime(0) elif proptype2 & MV_FLAG: value = [] else: value = 0 else: if proptype2 == PT_SYSTIME: value = unixtime(time.mktime(value.timetuple())) # handle invalid type versus value. For example proptype=PT_UNICODE and value=True try: mapiobj.SetProps([SPropValue(proptag2, value)]) _utils._save(mapiobj) except TypeError: raise Error('Could not create property, type and value did not match') return prop(self, mapiobj, proptag, proptype=proptype2)
def stream(mapiobj, proptag): stream = mapiobj.OpenProperty(proptag, IID_IStream, 0, 0) if proptag == PR_RTF_COMPRESSED: stream = WrapCompressedRTFStream(stream, 0) block_size = 0x100000 # 1MB data = [] while True: temp = stream.Read(block_size) data.append(temp) if len(temp) < block_size: break data = b''.join(data) if PROP_TYPE(proptag) == PT_UNICODE: data = data.decode('utf-32le') # under windows them be utf-16le? return data
def restore_folder(self, folder, path, data_path, store, subtree, stats, user, server): """ restore (partial) folder """ folderprops = pickle_loads(open('%s/folder' % data_path, 'rb').read()) if not self.options.sourcekeys: self.log.debug('restoring folder %s', path) # restore name, container class for proptag in (PR_DISPLAY_NAME_W, PR_CONTAINER_CLASS_W): value = folderprops.get(proptag) if value is not None: folder[proptag] = value # load existing sourcekeys in folder, to check for duplicates existing = {} table = folder.mapiobj.GetContentsTable(0) table.SetColumns([PR_SOURCE_KEY, PR_EC_BACKUP_SOURCE_KEY, PR_ENTRYID], 0) for row in table.QueryRows(-1, 0): if PROP_TYPE(row[1].ulPropTag) != PT_ERROR: existing[_hex(row[1].Value)] = _hex(row[2].Value) else: existing[_hex(row[0].Value)] = _hex(row[2].Value) # now dive into 'items', and restore desired items with closing(dbopen(data_path+'/items')) as db_items: with closing(dbopen(data_path+'/index')) as db_index: index = dict((a, pickle_loads(b)) for (a,b) in db_index.iteritems()) # determine sourcekey(s) to restore sourcekeys = db_index.keys() if self.options.sourcekeys: sourcekeys = [sk for sk in self.options.sourcekeys if sk.encode('ascii') in sourcekeys] else: sourcekeys = [sk.decode('ascii') for sk in sourcekeys] # restore/delete each item for sourcekey2 in sourcekeys: sourcekey2a = sourcekey2.encode('ascii') with log_exc(self.log, stats): # differential delete if index[sourcekey2a].get(b'backup_deleted') and self.options.differential: if self.options.deletes == 'no': self.log.warning('skipping deleted item with sourcekey %s', sourcekey2) continue if sourcekey2a not in existing: self.log.warning('item with sourcekey %s already deleted', sourcekey2) continue # delete item self.log.debug('deleting item with sourcekey %s', sourcekey2) item = folder.item(entryid=existing[sourcekey2a]) folder.delete(item) continue # regular delete if(sourcekey2a not in db_items or \ (index[sourcekey2a].get(b'backup_deleted') and self.options.deletes in (None, 'no')) ): continue # date range check last_modified = index[sourcekey2a].get(b'last_modified') if(last_modified and \ ((self.options.period_begin and last_modified < self.options.period_begin) or \ (self.options.period_end and last_modified >= self.options.period_end)) ): continue # handle existing item entryid = existing.get(sourcekey2a) or existing.get(index[sourcekey2a][b'orig_sourcekey']) if entryid is not None: if self.options.differential or self.options.overwrite: folder.delete(folder.item(entryid=entryid)) else: self.log.warning('skipping duplicate item with sourcekey %s', sourcekey2) continue # restore item self.log.debug('restoring item with sourcekey %s', sourcekey2) data = zlib.decompress(db_items[sourcekey2a]) read = index[sourcekey2a].get(b'read') item = folder.create_item( loads=data, attachments=not self.options.skip_attachments, read=read, ) # store original item sourcekey or it is lost try: item.prop(PR_EC_BACKUP_SOURCE_KEY) except (MAPIErrorNotFound, kopano.NotFoundError): item.mapiobj.SetProps([SPropValue(PR_EC_BACKUP_SOURCE_KEY, _unhex(sourcekey2))]) item.mapiobj.SaveChanges(0) if self.options.sourcekeys: self.restored_sourcekeys.add(sourcekey2) stats['changes'] += 1 # store original folder sourcekey folder_sk = folderprops[PR_SOURCE_KEY] try: folder.prop(PR_EC_BACKUP_SOURCE_KEY) except (MAPIErrorNotFound, kopano.NotFoundError): folder.mapiobj.SetProps([SPropValue(PR_EC_BACKUP_SOURCE_KEY, folder_sk)]) folder.mapiobj.SaveChanges(0)
def restore(self, data_path): """ restore data from backup """ self.restored_sourcekeys = set() self.options.sourcekeys = [sk.upper() for sk in self.options.sourcekeys] # determine store to restore to self.log.info('starting restore of %s', data_path) username = os.path.split(data_path)[1] try: if self.options.users: store = self._store(self.options.users[0]) elif self.options.stores: store = self.server.store(self.options.stores[0]) else: store = self._store(username) except kopano.NotFoundError: store = None if not store: fatal('unable to open store (username: %s)' % username) user = store.user # determine stored and specified folders path_folder = folder_struct(data_path, self.options) paths = self.options.folders or sorted(path_folder.keys()) if self.options.recursive: paths = [path2 for path2 in path_folder for path in paths if (path2+'//').startswith(path+'/')] for path in paths: if path not in path_folder: fatal('no such folder: %s' % path) # start restore self.log.info('restoring to store %s', store.entryid) t0 = time.time() stats = {'changes': 0, 'errors': 0} # determine restore root if self.options.restore_root: restore_root = store.folder(self.options.restore_root, create=True) else: restore_root = store.subtree # check existing folders sk_folder = {} for folder in restore_root.folders(): orig_sk = folder.get(PR_EC_BACKUP_SOURCE_KEY) if orig_sk: sk_folder[orig_sk] = folder # restore specified (parts of) folders meta_folders = [] sks = set() for path in paths: fpath = path_folder[path] folderprops = pickle_loads(open('%s/folder' % fpath, 'rb').read()) folder_sk = folderprops[PR_SOURCE_KEY] # determine folder to restore if self.options.sourcekeys: with closing(dbopen(fpath+'/items')) as db: if not [sk for sk in self.options.sourcekeys if sk.encode('ascii') in db]: continue else: if self.options.deletes in (None, 'no') and folder_deleted(fpath): continue sks.add(folder_sk) folder = restore_root.get_folder(path) if (folder and not store.public and \ ((self.options.skip_junk and folder == store.junk) or \ (self.options.skip_deleted and folder == store.wastebasket))): continue # restore folder if self.options.only_meta: # TODO create empty folders with acls/rules, or skip non-existing folders? folder = restore_root.get_folder(path) else: # differential folder move folder = sk_folder.get(folder_sk) if folder and self.options.differential: restore_path = self.options.restore_root+'/'+path if self.options.restore_root else path restore_parent_path = '/'.join(UNESCAPED_SLASH_RE.split(restore_path)[:-1]) if folder.parent.path != restore_parent_path: newparent = store.get_folder(restore_parent_path) if newparent: self.log.info('moving folder %s to %s', folder.path, restore_path) folder.parent.move(folder, newparent) else: folder = restore_root.folder(path, create=True) if self.options.clean_folders: self.log.info('emptying folder %s', folder.path) folder.empty() self.restore_folder(folder, path, fpath, store, store.subtree, stats, user, self.server) if folder: meta_folders.append((folder, fpath)) # differential folder deletes if self.options.differential: for sk in set(sk_folder)-sks: path = sk_folder[sk].path parent = store.get_folder('/'.join(UNESCAPED_SLASH_RE.split(path)[:-1])) if parent: self.log.info('deleting folder %s', path) parent.delete(sk_folder[sk]) # restore folder-level metadata if not (self.options.sourcekeys or self.options.skip_meta): self.log.info('restoring metadata') for (folder, fpath) in meta_folders: folder.permissions_loads(open(fpath+'/acl', 'rb').read(), stats=stats) folder.rules_loads(open(fpath+'/rules', 'rb').read(), stats=stats) # restore store-level metadata (webapp/mapi settings) if user and not (self.options.folders or self.options.restore_root or self.options.skip_meta or self.options.sourcekeys): if os.path.exists('%s/store' % data_path): storeprops = pickle_loads(open('%s/store' % data_path, 'rb').read()) for proptag in SETTINGS_PROPTAGS: if PROP_TYPE(proptag) == PT_TSTRING: proptag = CHANGE_PROP_TYPE(proptag, PT_UNICODE) value = storeprops.get(proptag) if not value: continue store.mapiobj.SetProps([SPropValue(proptag, value)]) store.mapiobj.SaveChanges(KEEP_OPEN_READWRITE) if os.path.exists('%s/delegates' % data_path): store.delegations_loads(open('%s/delegates' % data_path, 'rb').read(), stats=stats) if os.path.exists('%s/acl' % data_path): store.permissions_loads(open('%s/acl' % data_path, 'rb').read(), stats=stats) for sourcekey in self.options.sourcekeys: if sourcekey not in self.restored_sourcekeys: self.log.error('could not restore sourcekey: %s', sourcekey) self.log.info('restore completed in %.2f seconds (%d changes, ~%.2f/sec, %d errors)', time.time()-t0, stats['changes'], stats['changes']/(time.time()-t0), stats['errors'])
def restriction(self, type_, store): if self.field: # determine proptag for term, eg 'subject' proptag = TYPE_KEYWORD_PROPMAP[type_][self.field] flag = None subobj = None recipient_type = None # property in sub-object (attachments/recipient): use sub-restriction if isinstance(proptag, tuple) and len(proptag) == 2: if(proptag[0]) == PR_MESSAGE_ATTACHMENTS: subobj, proptag = proptag elif(proptag[0]) == PR_MESSAGE_RECIPIENTS: subobj, recipient_type = proptag proptag = PR_DISPLAY_NAME_W # TODO email else: proptag, flag = proptag # named property: resolve local proptag elif isinstance(proptag, tuple) and len(proptag) == 4: proptag = store._name_id(proptag[:3]) | proptag[3] # comparison operator if self.op in ('<', '>', '>=', '<=', '<>'): if PROP_TYPE(proptag) == PT_SYSTIME: d = dateutil.parser.parse(self.value, dayfirst=True) d = datetime_to_filetime(d) restr = SPropertyRestriction( OP_RELOP[self.op], proptag, SPropValue(proptag, d) ) else: value = self.value unit = '' if [x for x in ('KB', 'MB', 'GB') if value.endswith(x)]: value, unit = value[:-2], value[-2:] if PROP_TYPE(proptag) in (PT_FLOAT, PT_DOUBLE): value = float(value) else: value = int(value) if unit == 'KB': value *= 1024 elif unit == 'MB': value *= 1024**2 elif unit == 'GB': value *= 1024**3 restr = SPropertyRestriction( OP_RELOP[self.op], proptag, SPropValue(proptag, value) ) # contains/equals operator elif self.op in (':', '='): if PROP_TYPE(proptag) == PT_UNICODE: restr = SContentRestriction( FL_SUBSTRING | FL_IGNORECASE, proptag, SPropValue(proptag, self.value) ) elif flag or PROP_TYPE(proptag) == PT_BOOLEAN: if flag: restr = SBitMaskRestriction( BMR_NEZ if self.value in ('yes', 'true') else BMR_EQZ, proptag, flag ) else: restr = SPropertyRestriction( RELOP_EQ, proptag, SPropValue(proptag, self.value in ('yes', 'true')) ) elif PROP_TYPE(proptag) == PT_MV_UNICODE: proptag2 = (proptag ^ PT_MV_UNICODE) | PT_UNICODE # funky! restr = SContentRestriction( FL_SUBSTRING | FL_IGNORECASE, proptag, SPropValue(proptag2, self.value) ) elif PROP_TYPE(proptag) in (PT_SHORT, PT_LONG, PT_LONGLONG, PT_FLOAT, PT_DOUBLE): conv = float if PROP_TYPE(proptag) in (PT_FLOAT, PT_DOUBLE) else int if '..' in self.value: val1, val2 = self.value.split('..') restr = SAndRestriction([ SPropertyRestriction( RELOP_GE, proptag, SPropValue(proptag, conv(val1)) ), SPropertyRestriction( RELOP_LT, proptag, SPropValue(proptag, conv(val2)) ) ]) else: restr = SPropertyRestriction( RELOP_EQ, proptag, SPropValue(proptag, conv(self.value)) ) elif PROP_TYPE(proptag) == PT_SYSTIME: if self.value == 'today': d = datetime.datetime.now().date() d2 = d + datetime.timedelta(days=1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'yesterday': d2 = datetime.datetime.now().date() d = d2 - datetime.timedelta(days=1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'this week': d2 = datetime.datetime.now() d = d2.date() - datetime.timedelta(days=d2.weekday()) restr = _interval_restriction(proptag, d, d2) elif self.value == 'this month': d2 = datetime.datetime.now() d = d2.date() - datetime.timedelta(days=d2.day-1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'last month': now = datetime.datetime.now() d2 = now.date() - datetime.timedelta(days=now.day-1) d = (d2 - datetime.timedelta(days=1)).replace(day=1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'this year': d2 = datetime.datetime.now() d = datetime.datetime(d2.year, 1, 1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'last year': now = datetime.datetime.now() d2 = datetime.datetime(now.year, 1, 1) d = datetime.datetime(d2.year-1, 1, 1) restr = _interval_restriction(proptag, d, d2) elif '..' in self.value: date1, date2 = self.value.split('..') # TODO hours etc d = dateutil.parser.parse(date1, dayfirst=True) d2 = dateutil.parser.parse(date2, dayfirst=True) restr = _interval_restriction(proptag, d, d2) else: d = dateutil.parser.parse(self.value, dayfirst=True) # TODO hours etc d2 = d + datetime.timedelta(days=1) restr = _interval_restriction(proptag, d, d2) # turn restriction into sub-restriction if subobj: if recipient_type is not None: restr = SAndRestriction([ restr, SPropertyRestriction( RELOP_EQ, PR_RECIPIENT_TYPE, SPropValue(PR_RECIPIENT_TYPE, recipient_type) ) ]) restr = SSubRestriction(subobj, restr) else: defaults = [(store._name_id(proptag[:3]) | proptag[3]) if isinstance(proptag, tuple) else proptag for proptag in DEFAULT_PROPTAGS[type_]] restr = SOrRestriction([ SContentRestriction( FL_SUBSTRING | FL_IGNORECASE, p, SPropValue(p, self.value) ) for p in defaults ]) if self.sign == '-': restr = SNotRestriction(restr) return restr
def type_(self): return PROP_TYPE(self.proptag)
def restore_folder(self, folder, path, data_path, store, subtree, stats, user, server): """ restore single folder (or item in folder) """ # check --sourcekey option (only restore specified item if it exists) if self.options.sourcekeys: with closing(dbopen(data_path + '/items')) as db: if not [ sk for sk in self.options.sourcekeys if sk.encode('ascii') in db ]: return else: self.log.debug('restoring folder %s', path) # restore container class folderprops = pickle_loads( open('%s/folder' % data_path, 'rb').read()) container_class = folderprops.get(PR_CONTAINER_CLASS_W) if container_class: folder.container_class = container_class # load existing sourcekeys in folder, to check for duplicates existing = set() table = folder.mapiobj.GetContentsTable(0) table.SetColumns([PR_SOURCE_KEY, PR_EC_BACKUP_SOURCE_KEY], 0) for row in table.QueryRows(-1, 0): if PROP_TYPE(row[1].ulPropTag) != PT_ERROR: existing.add(_hex(row[1].Value)) else: existing.add(_hex(row[0].Value)) # load entry from 'index', so we don't have to unpickle everything with closing(dbopen(data_path + '/index')) as db: index = dict((a, pickle_loads(b)) for (a, b) in db.iteritems()) # now dive into 'items', and restore desired items with closing(dbopen(data_path + '/items')) as db: # determine sourcekey(s) to restore sourcekeys = db.keys() if self.options.sourcekeys: sourcekeys = [ sk for sk in self.options.sourcekeys if sk.encode('ascii') in sourcekeys ] elif sys.hexversion >= 0x03000000: sourcekeys = [sk.decode('ascii') for sk in sourcekeys] for sourcekey2 in sourcekeys: sourcekey2a = sourcekey2.encode('ascii') with log_exc(self.log, stats): # date check against 'index' last_modified = index[sourcekey2a][b'last_modified'] if ((self.options.period_begin and last_modified < self.options.period_begin) or (self.options.period_end and last_modified >= self.options.period_end) or (index[sourcekey2a].get(b'backup_deleted') and self.options.deletes in (None, 'no'))): continue # check for duplicates if sourcekey2a in existing or index[sourcekey2a][ b'orig_sourcekey'] in existing: self.log.warning( 'skipping duplicate item with sourcekey %s', sourcekey2) else: # actually restore item self.log.debug('restoring item with sourcekey %s', sourcekey2) item = folder.create_item( loads=zlib.decompress(db[sourcekey2a]), attachments=not self.options.skip_attachments) # store original sourcekey or it is lost try: item.prop(PR_EC_BACKUP_SOURCE_KEY) except (MAPIErrorNotFound, kopano.NotFoundError): item.mapiobj.SetProps([ SPropValue(PR_EC_BACKUP_SOURCE_KEY, _unhex(sourcekey2)) ]) item.mapiobj.SaveChanges(0) stats['changes'] += 1
def restore(self, data_path): """ restore data from backup """ # determine store to restore to self.log.info('starting restore of %s', data_path) username = os.path.split(data_path)[1] try: if self.options.users: store = self._store(self.options.users[0]) elif self.options.stores: store = self.server.store(self.options.stores[0]) else: store = self._store(username) except kopano.NotFoundError: store = None if not store: fatal('unable to open store (username: %s)' % username) user = store.user # determine stored and specified folders path_folder = folder_struct(data_path, self.options) paths = self.options.folders or sorted(path_folder.keys()) if self.options.recursive: paths = [path2 for path2 in path_folder for path in paths if (path2+'//').startswith(path+'/')] for path in paths: if path not in path_folder: fatal('no such folder: %s' % path) # start restore self.log.info('restoring to store %s', store.entryid) t0 = time.time() stats = {'changes': 0, 'errors': 0} # restore specified (parts of) folders restored = [] for path in paths: fpath = path_folder[path] restore_path = _decode(self.options.restore_root)+'/'+path if self.options.restore_root else path if self.options.sourcekeys: with closing(dbopen(fpath+'/items')) as db: if not [sk for sk in self.options.sourcekeys if sk.encode('ascii') in db]: continue else: if self.options.deletes in (None, 'no') and folder_deleted(fpath): continue folder = store.subtree.get_folder(restore_path) if (folder and not store.public and \ ((self.options.skip_junk and folder == store.junk) or \ (self.options.skip_deleted and folder == store.wastebasket))): continue if not self.options.only_meta: folder = store.subtree.folder(restore_path, create=True) self.restore_folder(folder, path, fpath, store, store.subtree, stats, user, self.server) restored.append((folder, fpath)) # restore folder-level metadata if not (self.options.sourcekeys or self.options.skip_meta): self.log.info('restoring metadata') for (folder, fpath) in restored: load_acl(folder, user, self.server, open(fpath+'/acl', 'rb').read(), stats, self.log) load_rules(folder, user, self.server, open(fpath+'/rules', 'rb').read(), stats, self.log) # restore store-level metadata (webapp/mapi settings) if user and not self.options.folders and not self.options.restore_root and not self.options.skip_meta: if os.path.exists('%s/store' % data_path): storeprops = pickle_loads(open('%s/store' % data_path, 'rb').read()) for proptag in WEBAPP_SETTINGS + (PR_EC_OUTOFOFFICE_SUBJECT, PR_EC_OUTOFOFFICE_MSG, PR_EC_OUTOFOFFICE, PR_EC_OUTOFOFFICE_FROM, PR_EC_OUTOFOFFICE_UNTIL): if PROP_TYPE(proptag) == PT_TSTRING: proptag = CHANGE_PROP_TYPE(proptag, PT_UNICODE) value = storeprops.get(proptag) if not value: continue store.mapiobj.SetProps([SPropValue(proptag, value)]) store.mapiobj.SaveChanges(KEEP_OPEN_READWRITE) if os.path.exists('%s/delegates' % data_path): load_delegates(user, self.server, open('%s/delegates' % data_path, 'rb').read(), stats, self.log) if os.path.exists('%s/acl' % data_path): load_acl(store, user, self.server, open('%s/acl' % data_path, 'rb').read(), stats, self.log) self.log.info('restore completed in %.2f seconds (%d changes, ~%.2f/sec, %d errors)', time.time()-t0, stats['changes'], stats['changes']/(time.time()-t0), stats['errors'])
def type_(self): """Proptag type, for example 0x1f for PR_SUBJECT_W.""" return PROP_TYPE(self.proptag)
def prop_restriction(self, proptag, flag): # comparison operator if self.op in ('<', '>', '>=', '<=', '<>'): if PROP_TYPE(proptag) == PT_SYSTIME: d = dateutil.parser.parse(self.value) d = datetime_to_filetime(d) restr = SPropertyRestriction( OP_RELOP[self.op], proptag, SPropValue(proptag, d) ) else: value = self.value unit = '' if [x for x in ('KB', 'MB', 'GB') if value.endswith(x)]: value, unit = value[:-2], value[-2:] value = int(value) if unit == 'KB': value *= 1024 elif unit == 'MB': value *= 1024**2 elif unit == 'GB': value *= 1024**3 restr = SPropertyRestriction( OP_RELOP[self.op], proptag, SPropValue(proptag, value) ) # contains/equals operator elif self.op in (':', '='): if PROP_TYPE(proptag) == PT_UNICODE: restr = SContentRestriction( FL_SUBSTRING | FL_IGNORECASE, proptag, SPropValue(proptag, self.value) ) elif flag or PROP_TYPE(proptag) == PT_BOOLEAN: if flag: restr = SBitMaskRestriction( BMR_NEZ if self.value in ('yes', 'true') else BMR_EQZ, proptag, flag ) else: restr = SPropertyRestriction( RELOP_EQ, proptag, SPropValue(proptag, self.value in ('yes', 'true')) ) elif PROP_TYPE(proptag) == PT_MV_UNICODE: proptag2 = (proptag ^ PT_MV_UNICODE) | PT_UNICODE # funky! restr = SContentRestriction( FL_SUBSTRING | FL_IGNORECASE, proptag, SPropValue(proptag2, self.value) ) elif PROP_TYPE(proptag) in (PT_SHORT, PT_LONG, PT_LONGLONG, PT_FLOAT, PT_DOUBLE): conv = float if PROP_TYPE(proptag) in (PT_FLOAT, PT_DOUBLE) else int if '..' in self.value: val1, val2 = self.value.split('..') restr = SAndRestriction([ SPropertyRestriction( RELOP_GE, proptag, SPropValue(proptag, conv(val1)) ), SPropertyRestriction( RELOP_LT, proptag, SPropValue(proptag, conv(val2)) ) ]) else: restr = SPropertyRestriction( RELOP_EQ, proptag, SPropValue(proptag, conv(self.value)) ) elif PROP_TYPE(proptag) == PT_SYSTIME: if self.value == 'today': d = datetime.datetime.now().date() d2 = d + datetime.timedelta(days=1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'yesterday': d2 = datetime.datetime.now().date() d = d2 - datetime.timedelta(days=1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'this week': d2 = datetime.datetime.now() d = d2.date() - datetime.timedelta(days=d2.weekday()) restr = _interval_restriction(proptag, d, d2) elif self.value == 'this month': d2 = datetime.datetime.now() d = d2.date() - datetime.timedelta(days=d2.day-1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'last month': now = datetime.datetime.now() d2 = now.date() - datetime.timedelta(days=now.day-1) d = (d2 - datetime.timedelta(days=1)).replace(day=1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'this year': d2 = datetime.datetime.now() d = datetime.datetime(d2.year, 1, 1) restr = _interval_restriction(proptag, d, d2) elif self.value == 'last year': now = datetime.datetime.now() d2 = datetime.datetime(now.year, 1, 1) d = datetime.datetime(d2.year-1, 1, 1) restr = _interval_restriction(proptag, d, d2) elif '..' in self.value: date1, date2 = self.value.split('..') # TODO hours etc d = dateutil.parser.parse(date1) d2 = dateutil.parser.parse(date2) restr = _interval_restriction(proptag, d, d2) else: d = dateutil.parser.parse(self.value) # TODO hours etc d2 = d + datetime.timedelta(days=1) restr = _interval_restriction(proptag, d, d2) return restr