def storage_model_with_files(): """ Creates simple storage model for testing. Tree: child_1 child_1_1 child_1_1_1 child_1_2 child_2 child_2_1 """ storage_model = Node(name=None) def create_props(size, is_dir): if is_dir: version_id = IS_DIR else: version_id = 1 return { MODIFIED_DATE: dt.datetime.now(), SIZE: size, IS_DIR: is_dir, VERSION_ID: version_id } child_1 = storage_model.add_child(name='child_1', props=create_props(0, True)) child_2 = storage_model.add_child(name='child_2', props=create_props(0, True)) child_1_1 = child_1.add_child(name='child_1_1', props=create_props(MBYTE, False)) child_1.add_child(name='child_1_2', props=create_props(MBYTE, False)) child_2.add_child(name='child_2_1', props=create_props(MBYTE, False)) child_1_1.add_child(name='child_1_1_1', props=create_props(MBYTE, False)) return storage_model
def fromfile(cls, location): """Load state from a dump file. :param location: path to the dump file. :type location: str """ logger.info("Trying to load synchronization state from '%s'...", location) try: with open(location, 'rb') as state_file: state = pickle.load(state_file) logger.info("Unpickled model from '%s'!", location) except FileNotFoundError: logger.warning("Can't find file '%s'! Using empty model instead!", location, exc_info=True) state = Node(name=None) except BaseException: logger.warning( "Can't load model from '%s'! Using empty model instead!", location, exc_info=True) state = Node(name=None) version = state.props.get('model_version', 0) logger.debug('Loaded model with version %d.', version) return state
def test_state_empty_storage_provider_empty_local(sync_engine_tester): """Scenario: empty storage provider some file local. That should result in no action Tests as well the internal state of the syncengine, (remote_tree_fetched, local_tree_fetched) """ sync_engine_tester.sync_engine.init() # first, only the remote tree gets fetched, otherwise the encryptionwrapper # don't know share ids sync_engine_tester.assert_expected_tasks( [FetchFileTreeTask(CSP_1.storage_id)]) sync_engine_tester.ack_fetch_tree_task(CSP_1.storage_id, Node(name=None)) assert sync_engine_tester.sync_engine.remote_tree_fetched assert not sync_engine_tester.sync_engine.local_tree_fetched sync_engine_tester.assert_expected_tasks( [FetchFileTreeTask(FILESYSTEM_ID)]) sync_engine_tester.ack_fetch_tree_task(FILESYSTEM_ID, Node(name=None)) assert not sync_engine_tester.sync_engine.local_tree_fetched assert not sync_engine_tester.sync_engine.remote_tree_fetched sync_engine_tester.assert_expected_tasks([]) assert sync_engine_tester.sync_engine.state == SyncEngineState.RUNNING
def test_parent_rename_node(): """ Renames a node and changes it's parent to test the path functionality :return: """ node1 = Node(name="aaaa", props={"a": "b", "c": "d"}) node2 = Node(name="bbbb", props={"a": "b", "c": "d"}) node3 = Node(name="cccc", props={"a": "b", "c": "d"}) assert node1.path == ['aaaa'] assert node2.path == ['bbbb'] assert node3.path == ['cccc'] node2.parent = node1 node3.parent = node2 assert node2.path == ['aaaa', 'bbbb'] assert node3.path == ['aaaa', 'bbbb', 'cccc'] node2.name = 'dddd' assert node2.path == ['aaaa', 'dddd'] assert node3.path == ['aaaa', 'dddd', 'cccc'] assert node1.get_node(['dddd']) == node2
def test_delete_delete(): """ Tests if 2 consequent delete on the same child will not raise exceptions """ root_node = Node(name=None) child = root_node.add_child('test') child.delete() child.delete()
def config_to_tree(filter_config): """ build a tree as helper to filter """ filter_tree = Node(name=None) for filter_obj in filter_config: filter_tree.get_node_safe(filter_obj['path']).props.update( {'children': filter_obj['children']}) return filter_tree
def test_iter_up(): """ Tests the """ root = Node(name=None) assert list(root.iter_up) == [root] child1 = root.add_child('test') assert list(child1.iter_up) == [child1, root] child2 = child1.add_child('test') assert list(child2.iter_up) == [child2, child1, root]
def test_node_props_cls(): """Check that Node correctly uses a custom props type passed in.""" class MyFancyProps: def __init__(self, *args, **kwargs): self.on_update = blinker.Signal() test_node = Node(None, props_cls=MyFancyProps) assert type(test_node.props) == MyFancyProps test_node.props = None assert type(test_node.props) == MyFancyProps
def test_node_equals_simple(): """ Checks the node equals method :return: """ node1 = Node(name="aaaa", props={"a": "b", "c": "d"}) node2 = Node(name="aaaa", props={"a": "b", "c": "d"}) node3 = Node(name="bbbb", props={"a": "b", "c": "d"}) node4 = Node(name="aaaa", props={"a": "b", "c": "e"}) assert node1 == node2 assert node1 != node3 assert node2 == node4
def test_tree_performance(flat, test_size): """ Tests the trees add performance. :param test_size How many node to create :param flat is set to False it is limited to the maximum recursion depth of python. """ # create a deep storage model storage_model = Node(name=None) parent = storage_model start = time.time() for cnt in range(test_size): name = 'child_{}'.format(cnt) node = parent.add_child(name=name) # node.props['version_id'] = 0 # node.props['is_dir'] = False if not flat: parent = node dur = time.time() - start size = asizeof(storage_model, limit=test_size + 1) print('creating {} nodes took {}, thats {}/per 100 nodes'.format( test_size, dur, dur / (test_size / 100))) print('creating {} nodes took {} Mbytes, thats {} bytes/per node'.format( test_size, size / 1024 / 1024, size / test_size)) start = time.time() set(storage_model) print(time.time() - start)
def test_state_empty_local(sync_engine_tester, storage_model_with_files): """Scenario: empty storage provider some file local. That should result in downloads""" sync_engine_tester.sync_engine.init() # first, only the remote tree gets fetched, otherwise the encryptionwrapper # don't know share ids sync_engine_tester.assert_expected_tasks( [FetchFileTreeTask(CSP_1.storage_id)]) sync_engine_tester.ack_fetch_tree_task(CSP_1.storage_id, storage_model_with_files) sync_engine_tester.assert_expected_tasks( [FetchFileTreeTask(FILESYSTEM_ID)]) sync_engine_tester.ack_fetch_tree_task(FILESYSTEM_ID, Node(name=None)) expected_tasks = [] for node in storage_model_with_files: if node.parent is not None: if not node.props[IS_DIR]: sync_task = DownloadSyncTask( path=node.path, source_storage_id=CSP_1.storage_id, source_version_id=1) else: sync_task = CreateDirSyncTask( path=node.path, target_storage_id=FILESYSTEM_ID, source_storage_id=CSP_1.storage_id) expected_tasks.append(sync_task) assert sync_engine_tester.sync_engine.state == SyncEngineState.RUNNING sync_engine_tester.assert_expected_tasks(expected_tasks)
def root(request): """ Creates a default Root node :return: Node """ if request.param == 'standard': return Node(name=None) return IndexingNode(name=None, indexes=['_id'])
def test_state_sync_deletes(sync_engine_tester, csp_dir_model, empty_storage_id, not_empty_storage_id): """State Sync after files have been removed from local or remote -> trigger delete """ # we put two times the same model into the syncengine sync_engine_tester.sync_engine.merge_storage_to_sync_model( storage_model=csp_dir_model, storage_id=FILESYSTEM_ID) sync_engine_tester.sync_engine.merge_storage_to_sync_model( storage_model=csp_dir_model, storage_id=CSP_1.storage_id) # we simulate that everything was in sync before for node in sync_engine_tester.sync_engine.root_node: if node.parent is not None: node.props['equivalents'] = \ {'new': {FILESYSTEM_ID: node.props[STORAGE][FILESYSTEM_ID][VERSION_ID], CSP_1.storage_id: node.props[STORAGE][CSP_1.storage_id][VERSION_ID]}} sync_engine_tester.sync_engine.init() # now we initalize the sync engine and ack either one with an empty tree # the local one need to be acked last if empty_storage_id == FILESYSTEM_ID: sync_engine_tester.ack_fetch_tree_task(not_empty_storage_id, csp_dir_model) sync_engine_tester.ack_fetch_tree_task(empty_storage_id, Node(name=None)) else: sync_engine_tester.ack_fetch_tree_task(empty_storage_id, Node(name=None)) sync_engine_tester.ack_fetch_tree_task(not_empty_storage_id, csp_dir_model) expected_tasks = [] for node in csp_dir_model: if node.parent is None: continue sync_task = DeleteSyncTask(node.path, not_empty_storage_id, node.props[VERSION_ID]) expected_tasks.append(sync_task) sync_engine_tester.assert_expected_tasks(expected_tasks)
def test_get_tree_one_node(): """ tests adding one single node """ storage_mock = mock.Mock() storage_mock.get_tree_children = mock.Mock( return_value=[('hello', {'is_dir': False})]) storage_mock.filter_tree = Node(name=None) tree = BasicStorage.get_tree(storage_mock) assert tree.get_node(['hello']).props == {'is_dir': False}
def __init__(self, storage_metrics, task_sink, model=None): super().__init__(self) if model is not None: self.root_node = model else: self.root_node = Node(name=None) self.state = SyncEngineState.STOPPED self.task_sink = task_sink #: a iterable of StorageMetrics self.storage_metrics = storage_metrics # this is used for the local state transition between STATE_SYNC->RUNNING self.local_tree_fetched = False self.remote_tree_fetched = False #: :class:`blinker.Signal` is called if the props of the node change #: IMPORTANT: the signal handlers run in the same context as the #: sync_engine. Be aware of blocking calls etc. self.on_node_props_change = Signal()
def test_get_shared_folders(): """Test the return the correct lists based on different internal model structures.""" gdrive = MagicMock() gdrive.model = Node(name=None) # No shared folder folder_a = gdrive.model.add_child('folder_a', { 'shared': False, '_id': 'id_of_folder_a', 'other': 'stuff' }) assert GoogleDrive.get_shared_folders(gdrive) == [] # one shared folder folder_b = folder_a.add_child( 'folder_b', { 'shared': True, '_id': 'id_of_folder_b', 'other': 'stuff of b', '_shared_with': {'*****@*****.**', '*****@*****.**'} }) shared_folder_b = SharedFolder( path=['folder_a', 'folder_b'], share_id='id_of_folder_b', sp_user_ids={'*****@*****.**', '*****@*****.**'}) assert GoogleDrive.get_shared_folders(gdrive) == [shared_folder_b] # two shared folders but one is the child of the other folder_b.add_child( 'folder_c', { 'shared': True, '_id': 'id_of_folder_c', 'other': 'stuff of c', '_shared_with': {'*****@*****.**', '*****@*****.**'} }) assert GoogleDrive.get_shared_folders(gdrive) == [shared_folder_b] # added a third shared folder folder_a.add_child( 'folder_d', { 'shared': True, '_id': 'id_of_folder_d', 'other': 'stuff of d', '_shared_with': {'*****@*****.**', '*****@*****.**'} }) shared_folder_d = SharedFolder( path=['folder_a', 'folder_d'], share_id='id_of_folder_d', sp_user_ids={'*****@*****.**', '*****@*****.**'}) assert sorted(GoogleDrive.get_shared_folders(gdrive)) == sorted( [shared_folder_d, shared_folder_b])
def test_get_storage_path(): """ creates a small tree and sees if sync-rule inheritance works """ root = Node(name=None) child1 = root.add_child('child1') metrics = child1.props.setdefault(STORAGE, {}).setdefault(FILESYSTEM_ID, {}) metrics[DISPLAY_NAME] = "CHILD1" metrics = child1.props.setdefault(STORAGE, {}).setdefault(CSP_1.storage_id, {}) metrics[DISPLAY_NAME] = "Child1" child2 = child1.add_child('child2') metrics = child2.props.setdefault(STORAGE, {}).setdefault(CSP_1.storage_id, {}) metrics[DISPLAY_NAME] = "Child2" path = get_storage_path(child2, FILESYSTEM_ID, CSP_1.storage_id) assert ["CHILD1", "Child2"] == path
def test_has_child(): ''' Tests has child function. ''' root_node = Node(name=None) assert not root_node.has_child('test') root_node.add_child('test') assert root_node.has_child('test')
def init_with_files(self, files, equivalent=True, file_size=MBYTE): """Init sync engine with a set of files :param files: a list of paths(lists) to populate it with """ tree = Node(name=None) for file in files: tree.get_node_safe(file).props.update({ VERSION_ID: 1, SIZE: file_size }) for node in tree: if node.parent is not None and VERSION_ID not in node.props: # all nodes without a version id are auto generated by get_node_safe and are dirs # (implicitly created directories) node.props[IS_DIR] = True node.props[VERSION_ID] = IS_DIR else: node.props[IS_DIR] = False self.sync_engine.merge_storage_to_sync_model(tree, FILESYSTEM_ID) self.sync_engine.merge_storage_to_sync_model(tree, CSP_1.storage_id) if equivalent: for node in tree: if node.parent is None: continue self.sync_engine.root_node.get_node(node.path).props['equivalents'] = \ {'new': {FILESYSTEM_ID: node.props[VERSION_ID], CSP_1.storage_id: node.props[VERSION_ID]}} self.sync_engine._sync_state() self.sync_engine.state = SyncEngineState.RUNNING return tree
def test_get_tree_two_children(): """ tests adding a binary tree with the depth of 4 """ # this enabled a recursive call of get_tree storage_mock = mock.Mock(spec=BasicStorage) storage_mock.get_tree = partial(BasicStorage.get_tree, storage_mock) storage_mock.filter_tree = Node(name=None) storage_mock.get_tree_children = mock.Mock(side_effect=BinaryTreeGetter(2)) tree = BasicStorage.get_tree(storage_mock) assert tree.get_node(['a']).props == {'is_dir': True} assert tree.get_node(['b']).props == {'is_dir': True} assert tree.get_node(['a', 'a']).props == {'is_dir': True} assert tree.get_node(['a', 'a', 'a']).props == {'is_dir': False}
def test_set_operations(): """ check if the set operations are only effected by the path """ root_node1 = Node(name=None) root_node2 = Node(name=None) root_node1.add_child("that child") child2 = root_node2.add_child("that child") child3 = child2.add_child("Hulla") assert {child3} == set(root_node2) - set(root_node1) child2.props = {'hola': 'drio'} assert {child3} == set(root_node2) - set(root_node1)
def test_big_balanced_tree(default_root): """ Create a huge tree to check performance and test a small intersection """ create_equal_distributed_tree(default_root, 50) other_root = Node(None) create_equal_distributed_tree(other_root, 4) all_items = set(default_root) all_other_items = set(other_root) # the root plus 4 items assert len(all_items.intersection(all_other_items)) == 5 print(len(all_items))
def test_external_props(): """ tests if an external properties object is taken even if it evaluates to false""" class CustomProps: # pylint: disable=missing-docstring def __init__(self, node, *args, **kwargs): self.node = node def update(self, *args, **kwargs): pass myfanzyprops = CustomProps(node=None) node = Node(None, props=myfanzyprops, props_cls=CustomProps) assert node.props.node is node assert isinstance(node.props, CustomProps)
def test_iter_up_existing(): """ Tests the iter_up_existing generator """ root = Node(name=None) assert list(root.iter_up_existing(['node'])) == [root] assert list(root.iter_up_existing(['node', 'bla', 'foo'])) == [root] child1 = root.add_child('test') assert list(root.iter_up_existing(['test', 'node'])) == [child1, root] assert list(child1.iter_up_existing(['node'])) == [child1] child2 = child1.add_child('test') assert list(root.iter_up_existing(['test', 'test', 'node'])) == [child2, child1, root] assert list(child1.iter_up_existing(['test', 'node'])) == [child2, child1] assert list(child2.iter_up_existing(['node'])) == [child2]
def test_update_storage_props_delete(): """Check if the special DELETE constant works do delete properties.""" props = { 'version_id': 17, 'modified_date': datetime.datetime.now(), 'size': 23, 'display_name': 'TestNode', 'is_dir': False, 'shared': True, 'share_id': 12345, 'public_share': True } node = Node(name="testnode", parent=None, props={syncfsm.STORAGE: { CSP_1.storage_id: props }}) assert node.props[STORAGE][CSP_1.storage_id] == props event_props = { 'version_id': 17, 'modified_date': datetime.datetime.now(), 'size': 23, 'display_name': 'TestNode', 'is_dir': False, 'shared': False, 'share_id': DELETE, 'public_share': DELETE } update_storage_props(CSP_1.storage_id, node, event_props) del event_props['share_id'] del event_props['public_share'] assert node.props[STORAGE][CSP_1.storage_id] == event_props # if a mandatory valie is set to DELETE the props do not change with contextlib.suppress(KeyError): update_storage_props(CSP_1.storage_id, node, {'version_id': DELETE}) assert node.props[STORAGE][CSP_1.storage_id] == event_props
def __init__(self, event_sink, storage_id, storage_cache_dir=None, storage_cache_version=None, default_model=None): """ :param event_sink: In productive use this is the SyncEngine. Might be any other class implementing the event handling methods. :param storage_id: The storage_id of this storage. :param storage_cache_dir the directory to store model caches in :param storage_cache_version the version of the storage cache to load (older versions will be ignored if present = new model with new version will be generated :raise bushn.AuthenticationError in case the authentication data is wrong :raise """ # settig storage id and events sink self.storage_id = storage_id self._event_sink = event_sink # The display name for the user, if available a username should be displayed, otherwise # default to email address. self._storage_user_name = None # setting cache dir (will be used to store model caches) self.cache_dir = storage_cache_dir # None permutable default model if default_model is None: default_model = Node(name=None) # read model cache if present and set it as internal model -> if no model is # present: generate new nodel using default node passed (e.g. Node, # or IndexedNode) dependent on what the specific service needs self.tree = load_model(cache_dir=self.cache_dir, default=default_model, version=storage_cache_version) # private member for offline handling # DO NOT ACCESS DIRECTLY - use self.offline instead self._offline = False # directories to sync self.filter_tree = bushn.Node(name=None)
def test_hash_function(): """ check if the hash function is only effected by the path :return: None """ root_node1 = Node(name=None) root_node2 = Node(name=None) assert hash(root_node1) == hash(root_node2) child1 = root_node1.add_child("that child") assert hash(child1) != hash(root_node1) child2 = root_node2.add_child("that child") assert hash(child1) == hash(child2) child3 = child2.add_child("Hulla") assert hash(child3) != hash(child2)
def test_get_tree_filter(): """ tests the tree with a a filter set on selected_sync_dirs """ # this enabled a recursive call of get_tree storage_mock = mock.Mock(spec=BasicStorage) storage_mock.get_tree = partial(BasicStorage.get_tree, storage_mock) storage_mock.get_tree_children = mock.Mock(side_effect=BinaryTreeGetter(2)) storage_mock.filter_tree = Node(name=None) storage_mock.filter_tree.add_child('a').add_child('a').add_child('a') tree = BasicStorage.get_tree(storage_mock) # b should be filtered since it is a directory assert not tree.has_child('b') assert not tree.get_node(['a']).has_child('b') # c should not be filtered since it is a file in a flat-observed directory assert tree.has_child('c') assert tree.get_node(['a', 'c']) assert tree.get_node(['a', 'a']).has_child('a') assert tree.get_node(['a', 'a']).has_child('b') assert tree.get_node(['a', 'a']).has_child('c')
def test_get_node_safe(): """ Tests get_node_safe """ root = Node(None) path = ['a', 'b', 'c', 'd'] node = root.get_node_safe(path) assert path == node.path path = ['a', 'b', 'f'] node = root.get_node_safe(path) assert path == node.path path = ['bb'] node = root.get_node_safe(path) assert root.get_node(['bb']) is node assert path == node.path
def test_node_equals_recursive(): """ Compares two nodes and their children :return: """ node1 = Node(name="aaaa", props={"a": "b", "c": "d"}) node2 = Node(name="aaaa", props={"a": "b", "c": "d"}) node1.add_child("bbbb", props={"a": "b", "c": "d"}) node2.add_child("aaaa", props={"a": "b", "c": "e"}) assert node1.subtree_equals(node2) is False node3 = Node(name="aaaa", props={"a": "b", "c": "d"}) node4 = Node(name="aaaa", props={"a": "b", "c": "d"}) node3.add_child("bbbb", props={"a": "b", "c": "d"}) node41 = node4.add_child("bbbb", props={"a": "b", "c": "d"}) assert node3.subtree_equals(node4) assert not node3.subtree_equals(node41)