Пример #1
0
    def merge_storage_to_sync_model(self, storage_model, storage_id):
        """
        Merges storage model into existing sync model with necessary attributes.
        """
        # create and add every node to sync model
        for storage_node in storage_model:
            sync_node = self.root_node.get_node_safe(
                normalize_path(storage_node.path))
            sync_node.props.setdefault(STORAGE, {})[storage_id] = {}

            if storage_node.parent is None:
                # the rest from here on is not needed for the root, but the other values are
                # used to mark if the tree has been retrieved
                continue

            if DISPLAY_NAME not in storage_node.props:
                storage_node.props[DISPLAY_NAME] = storage_node.name

            update_storage_props(storage_id=storage_id,
                                 node=sync_node,
                                 props=storage_node.props)

            if storage_node.parent is not None:
                # normalize storage model, to normalize the model, so the deletion operation below
                # is working
                storage_node.name = normalize_path_element(storage_node.name)

        for node in set(self.root_node) - set(storage_model):
            storages = node.props.get(STORAGE, {})
            if storage_id in storages:
                del storages[storage_id]
Пример #2
0
def normalize_path(path):
    """
    Normalizes the given path with unicode NFC and lower case
    :param path: the path
     :return: the normalized path
    """
    new_path = []
    for elem in path:
        new_path.append(normalize_path_element(elem))
    return new_path
Пример #3
0
    def storage_create(self, storage_id, path, event_props):
        """
        Handler for a Create Event on a storage
        :param event_props: dictionary for the properties of an event with the
         keys [modified_date, is_dir]
        :param storage_id: the storage
        :param path: the path
        :return
        """
        name = path[-1]
        normed_path = normalize_path(path)
        logger.debug("Got create message for %s(%s)", path, normed_path)

        # Ensure full path structure has entries for the current storage
        # tree nodes need normed path, display name gets unnormalized version.
        curr_path = []
        for elem in path[:-1]:
            curr_path.append(normalize_path_element(elem))
            node = self.root_node.get_node_safe(curr_path)
            storage = node.props.setdefault(STORAGE,
                                            {}).setdefault(storage_id, {})
            storage[DISPLAY_NAME] = elem

        event_props[DISPLAY_NAME] = name

        fsm = self.get_default_fsm(path=normed_path)

        node = self.root_node.get_node_safe(normed_path)

        # update storage props and send blinker signal
        old_props = copy.deepcopy(node.props)
        update_storage_props(storage_id, node, event_props)
        if old_props != node.props:
            # props changed -> send signal
            self.on_node_props_change.send(self,
                                           storage_id=storage_id,
                                           old_props=old_props,
                                           node=node)
        if self.state == SyncEngineState.RUNNING:
            node.props[STORAGE][storage_id][EVENT_RECEIVED] = True
            fsm.e_created(csps=[self.storage_metrics],
                          node=node,
                          task_sink=self.issue_sync_task,
                          event_props=event_props,
                          storage_id=storage_id)
Пример #4
0
 def name(self):
     return normalize_path_element(self.display_name)
Пример #5
0
def sync_engine_with_files(request, storage_metrics):
    """
    creates a situation where file A/test_a.txt and B are in local
    file storage and on csp 1. Every future task should be executed on
    csp 1.
    """

    csps = storage_metrics
    tasklist = []

    actor = SyncEngine.start(csps=csps, task_sink=tasklist.append)
    actor._actor.initialized = True
    sync_engine = actor.proxy()

    sync_engine.storage_create(
        normalize_path_element(CSP_1_DISPLAY_NAME), [CSP_1_DISPLAY_NAME],
        dict(is_dir=True,
             modified_date=dt.datetime.now(),
             size=0,
             version_id='is_dir')).get()

    tasklist.clear()

    test_files = deepcopy(request.param)

    expected_task_list = []
    future = None
    # creates folder structure an creates expected tasks
    for test_file in test_files:

        current_path = []
        for path_elm in test_file.path[0:-1]:

            # assemble path while iterating it
            current_path.append(path_elm)
            # Create parent directory Event
            if len(current_path) > 1:
                sync_engine.storage_create(
                    FILESYSTEM_ID, current_path.copy(),
                    dict(is_dir=True,
                         modified_date=dt.datetime.now(),
                         storage_id=FILESYSTEM_ID,
                         size=0,
                         version_id='is_dir')).get()
                # Add Expected SyncTask
                expected_task_list.append(
                    CreateDirSyncTask(path=normalize_path(current_path.copy()),
                                      target_storage_id=csps[0].storage_id,
                                      source_storage_id=FILESYSTEM_ID))
        # create file
        future = sync_engine.storage_create(
            FILESYSTEM_ID, normalize_path(test_file.path),
            dict(modified_date=dt.datetime.now(),
                 is_dir=test_file.is_dir,
                 storage_id=FILESYSTEM_ID,
                 size=MBYTE,
                 version_id=test_file.version_id))

        # sync task depends on type of item
        if test_file.is_dir:
            expected_task_list.append(
                CreateDirSyncTask(path=normalize_path(test_file.path),
                                  target_storage_id=csps[0].storage_id,
                                  source_storage_id=FILESYSTEM_ID))
        else:
            expected_task_list.append(
                UploadSyncTask(path=normalize_path(test_file.path),
                               source_version_id=test_file.version_id,
                               target_storage_id=csps[0].storage_id))

    future.get()
    # check state of engine for each sub path
    # TODO:  assert sp folder does not have fsm
    for test_file in test_files:
        # sp folder does not have an fsm
        sub_paths = get_sub_paths(test_file.path)
        for sub_path in sub_paths[1:]:
            props = sync_engine.query(sub_path).get()
            assert props[SE_FSM].current == S_UPLOADING
    # assert if all expected tasks are in the tasklist

    assert set(expected_task_list) == set(tasklist)

    ack_all_tasks(tasklist, sync_engine.ack_task)
    # XXX: does the same thing as above. refactor?
    # # check state of engine for path
    # for test_file in test_files:
    #     sub_paths = get_sub_paths(test_file.path)
    #     for sub_path in sub_paths:
    #         props = sync_engine.query(sub_path).get()
    #         assert props[SE_FSM].current == S_UPLOADING

    csp_id = csps[0].storage_id
    for test_file in test_files:
        current_path = []
        for pathelm in test_file.path[:-1]:
            # assemble path while iterating it
            current_path.append(pathelm)
            # Create Directory Event
            sync_engine.storage_create(storage_id=csp_id,
                                       path=current_path,
                                       event_props=dict(
                                           modified_date=dt.datetime.now(),
                                           is_dir=True,
                                           storage_id=csp_id,
                                           version_id='is_dir',
                                           size=0)).get()
        if test_file.version_id == 'is_dir':
            vid = 'is_dir'
        else:
            vid = test_file.version_id * 2
        sync_engine.storage_create(
            storage_id=csp_id,
            path=test_file.path,
            event_props=dict(
                modified_date=dt.datetime.now(),
                is_dir=test_file.is_dir,
                storage_id=csp_id,
                version_id=vid,  # very secure hashing
                size=MBYTE)).get()

        # add the csp to the csp list
        test_file.csps.append(csps[0].storage_id)

    # check state of engine for each path
    for test_file in test_files:
        sub_paths = get_sub_paths(test_file.path)
        # sub_paths.reverse()
        for sub_path in sub_paths[1:]:
            props = sync_engine.query(sub_path).get()
            assert props[SE_FSM].current == S_SYNCED  # \
            # , \
            # 'path ' + str(sub_path) + ' not in sync state'
            assert MODIFIED_DATE in props[STORAGE][FILESYSTEM_ID]
            assert MODIFIED_DATE in props[STORAGE][csp_id]

    assert len(tasklist) == 0

    return_type = namedtuple(
        'StorageEngineWithFiles',
        ['test_files', 'sync_engine', 'csps', 'task_list'])

    yield return_type(test_files=test_files,
                      csps=csps,
                      task_list=tasklist,
                      sync_engine=sync_engine)

    sync_engine.stop()