Exemple #1
0
def test_path_normalize():
    """Check if normalize_path works"""
    path1 = ['a', 'b', 'c.txt']
    assert path1 == normalize_path(path1)

    path2 = ['A', 'b', 'C.txt']
    assert ['a', 'b', 'c.txt'] == normalize_path(path2)
Exemple #2
0
    def get_context_menu(self, selected_paths):
        """ returns a context menu for the selected path

        The menu contains two main menus:
            - Sharing
            - Displaying data (e.g. in browser)

        :param selected_paths: the paths selected in the shell
        """
        logger.debug('getting context menu for: %s', selected_paths)

        # result will be a list of dicts, each presenting a menu item
        result = []

        # only show context menu if only one item is selected
        if len(selected_paths) > 1:
            return result
        # convert all selected paths to cc format
        selected_path = normalize_path(
            fs_to_cc_path(selected_paths[0], self.config.sync_root))

        # if path is one of the storage folders -> no context menu
        if len(selected_path) < 2:
            return result

        # first element of path identifies the storage (=storage folder)
        path_root = selected_path[:1]

        link = self.sync_graph.get_synclink_by_displayname(
            display_name=path_root[0])
        if not link:
            return result
        # get all properties for the cc paths
        props = link.sync_engine.query(path_root).get(PYKKA_CALL_TIMEOUT)

        # getting storage ids, if none except file system -> no context menu
        storage_ids = props.get(STORAGE, {}).keys() - {FILESYSTEM_ID}
        if not storage_ids:
            return result

        # storage id of storage the path is on
        storage_id, = storage_ids
        storage = link.remote

        # getting items for sharing
        result.extend(
            create_sharing_menu_items(storage_id=storage_id, storage=storage))

        # getting items for displaying data
        result.extend(
            create_displaying_menu_items(storage_id=storage_id,
                                         storage=storage))

        return result
Exemple #3
0
def test_storage_create(sync_engine_tester):
    """Test if a create event writes diplay names to the node and it's parents.

    :param sync_engine_without_files:
    :return:
    """
    sync_engine_tester.init_with_files([])

    test_path = ['AaA', 'BbB', 'CCc.tXt']
    sync_engine_tester.sync_engine.storage_create(CSP_1.storage_id, test_path,
                                                  {
                                                      'is_dir': False,
                                                      'size': MBYTE,
                                                      'version_id': 66
                                                  })

    node = sync_engine_tester.sync_engine.root_node.get_node(
        normalize_path(test_path))
    result_path = syncfsm.get_storage_path(node, CSP_1.storage_id)
    assert test_path == result_path
Exemple #4
0
    def get_path_status(self, fs_path):
        """Return whether a path is syncing or not."""
        # convert string to internal path
        logger.debug('Trying to get status for: %s', fs_path)
        cc_path = fs_to_cc_path(fs_path, self.config.sync_root)
        cc_path_normalized = normalize_path(
            fs_to_cc_path(fs_path, self.config.sync_root))
        display_name = cc_path[0]

        from cc.configuration.helpers import get_storage_by_displayname
        storage_config = get_storage_by_displayname(self.config, display_name)
        if not storage_config:
            logger.info("Encountered path with no link.")
            return ITEM_STATUS_IGNORE
        link_id = 'local::{}'.format(storage_config.get('id', 'unknown'))
        logger.debug("Searching for link named '%s'", link_id)

        link = self.sync_graph.links.get(link_id, None)
        # if link is None path doesn't belong to a synclink -> doesn't need syncing -> ignore
        if link:
            logger.debug('Link found.')
            # checking if directory
            is_dir = os.path.isdir(fs_path)
            hashed_path = path_hash(link_id=link.link_id,
                                    path=cc_path_normalized[1:])
            # returning status based on current sync status
            tasks = self.sync_graph.bademeister.queue.path_has_tasks(
                hashed_path, is_dir)
            if tasks:
                logger.debug('Item %s syncing.', fs_path)
                return ITEM_STATUS_SYNCING
            else:
                logger.debug('Item %s synced.', fs_path)
                return ITEM_STATUS_SYNCED
        else:
            logger.debug('Item %s sync error.', fs_path)
            return ITEM_STATUS_IGNORE
def sync_engine_with_files(request, storage_metrics):
    """
    creates a situation where file A/test_a.txt and B are in local
    file storage and on csp 1. Every future task should be executed on
    csp 1.
    """

    csps = storage_metrics
    tasklist = []

    actor = SyncEngine.start(csps=csps, task_sink=tasklist.append)
    actor._actor.initialized = True
    sync_engine = actor.proxy()

    sync_engine.storage_create(
        normalize_path_element(CSP_1_DISPLAY_NAME), [CSP_1_DISPLAY_NAME],
        dict(is_dir=True,
             modified_date=dt.datetime.now(),
             size=0,
             version_id='is_dir')).get()

    tasklist.clear()

    test_files = deepcopy(request.param)

    expected_task_list = []
    future = None
    # creates folder structure an creates expected tasks
    for test_file in test_files:

        current_path = []
        for path_elm in test_file.path[0:-1]:

            # assemble path while iterating it
            current_path.append(path_elm)
            # Create parent directory Event
            if len(current_path) > 1:
                sync_engine.storage_create(
                    FILESYSTEM_ID, current_path.copy(),
                    dict(is_dir=True,
                         modified_date=dt.datetime.now(),
                         storage_id=FILESYSTEM_ID,
                         size=0,
                         version_id='is_dir')).get()
                # Add Expected SyncTask
                expected_task_list.append(
                    CreateDirSyncTask(path=normalize_path(current_path.copy()),
                                      target_storage_id=csps[0].storage_id,
                                      source_storage_id=FILESYSTEM_ID))
        # create file
        future = sync_engine.storage_create(
            FILESYSTEM_ID, normalize_path(test_file.path),
            dict(modified_date=dt.datetime.now(),
                 is_dir=test_file.is_dir,
                 storage_id=FILESYSTEM_ID,
                 size=MBYTE,
                 version_id=test_file.version_id))

        # sync task depends on type of item
        if test_file.is_dir:
            expected_task_list.append(
                CreateDirSyncTask(path=normalize_path(test_file.path),
                                  target_storage_id=csps[0].storage_id,
                                  source_storage_id=FILESYSTEM_ID))
        else:
            expected_task_list.append(
                UploadSyncTask(path=normalize_path(test_file.path),
                               source_version_id=test_file.version_id,
                               target_storage_id=csps[0].storage_id))

    future.get()
    # check state of engine for each sub path
    # TODO:  assert sp folder does not have fsm
    for test_file in test_files:
        # sp folder does not have an fsm
        sub_paths = get_sub_paths(test_file.path)
        for sub_path in sub_paths[1:]:
            props = sync_engine.query(sub_path).get()
            assert props[SE_FSM].current == S_UPLOADING
    # assert if all expected tasks are in the tasklist

    assert set(expected_task_list) == set(tasklist)

    ack_all_tasks(tasklist, sync_engine.ack_task)
    # XXX: does the same thing as above. refactor?
    # # check state of engine for path
    # for test_file in test_files:
    #     sub_paths = get_sub_paths(test_file.path)
    #     for sub_path in sub_paths:
    #         props = sync_engine.query(sub_path).get()
    #         assert props[SE_FSM].current == S_UPLOADING

    csp_id = csps[0].storage_id
    for test_file in test_files:
        current_path = []
        for pathelm in test_file.path[:-1]:
            # assemble path while iterating it
            current_path.append(pathelm)
            # Create Directory Event
            sync_engine.storage_create(storage_id=csp_id,
                                       path=current_path,
                                       event_props=dict(
                                           modified_date=dt.datetime.now(),
                                           is_dir=True,
                                           storage_id=csp_id,
                                           version_id='is_dir',
                                           size=0)).get()
        if test_file.version_id == 'is_dir':
            vid = 'is_dir'
        else:
            vid = test_file.version_id * 2
        sync_engine.storage_create(
            storage_id=csp_id,
            path=test_file.path,
            event_props=dict(
                modified_date=dt.datetime.now(),
                is_dir=test_file.is_dir,
                storage_id=csp_id,
                version_id=vid,  # very secure hashing
                size=MBYTE)).get()

        # add the csp to the csp list
        test_file.csps.append(csps[0].storage_id)

    # check state of engine for each path
    for test_file in test_files:
        sub_paths = get_sub_paths(test_file.path)
        # sub_paths.reverse()
        for sub_path in sub_paths[1:]:
            props = sync_engine.query(sub_path).get()
            assert props[SE_FSM].current == S_SYNCED  # \
            # , \
            # 'path ' + str(sub_path) + ' not in sync state'
            assert MODIFIED_DATE in props[STORAGE][FILESYSTEM_ID]
            assert MODIFIED_DATE in props[STORAGE][csp_id]

    assert len(tasklist) == 0

    return_type = namedtuple(
        'StorageEngineWithFiles',
        ['test_files', 'sync_engine', 'csps', 'task_list'])

    yield return_type(test_files=test_files,
                      csps=csps,
                      task_list=tasklist,
                      sync_engine=sync_engine)

    sync_engine.stop()