Esempio n. 1
0
def test_validator(mocker, mock_check_index, missing_files, invalid_checksums):
    mock_check_index.return_value = missing_files, invalid_checksums

    m, c = utils.validator('foo', 'bar', True)
    assert m == missing_files
    assert c == invalid_checksums
    mock_check_index.assert_called_once_with('foo', 'bar', True)
Esempio n. 2
0
def test_validator(mocker, mock_check_index, missing_files, invalid_checksums):
    mock_check_index.return_value = missing_files, invalid_checksums

    m, c = utils.validator("foo", "bar", False)
    assert m == missing_files
    assert c == invalid_checksums
    mock_check_index.assert_called_once_with("foo", "bar", False)
Esempio n. 3
0
def test_validator_already_validated(mocker, mock_validated, mock_check_index,
                                     mock_create_invalid,
                                     mock_create_validated):
    mock_validated.return_value = True

    m, c = utils.validator('foo', 'bar', 'baz', True)
    assert m == {}
    assert c == {}
    mock_validated.assert_called_once_with('baz')
    mock_check_index.assert_not_called()
    mock_create_invalid.assert_not_called()
    mock_create_validated.assert_not_called()
Esempio n. 4
0
    def validate(self, verbose=True):
        """Validate if the stored dataset is a valid version

        Args:
            verbose (bool): If False, don't print output

        Returns:
            missing_files (list): List of file paths that are in the dataset index
                but missing locally
            invalid_checksums (list): List of file paths that file exists in the dataset
                index but has a different checksum compare to the reference checksum

        """
        missing_files, invalid_checksums = utils.validator(
            self._index, self.data_home, verbose=verbose
        )
        return missing_files, invalid_checksums
Esempio n. 5
0
def test_validator(mocker, mock_validated, mock_check_index,
                   mock_create_invalid, mock_create_validated, missing_files,
                   invalid_checksums):
    mock_validated.return_value = False
    mock_check_index.return_value = missing_files, invalid_checksums

    m, c = utils.validator('foo', 'bar', 'baz', True)
    assert m == missing_files
    assert c == invalid_checksums
    mock_validated.assert_called_once_with('baz')
    mock_check_index.assert_called_once_with('foo', 'bar', True)

    if missing_files or invalid_checksums:
        mock_create_invalid.assert_called_once_with('baz', missing_files,
                                                    invalid_checksums)
    else:
        mock_create_validated.assert_called_once_with('baz')
Esempio n. 6
0
def validate(dataset_path, data_home=None):
    """Validate if the stored dataset is a valid version

    Args:
        dataset_path (str): MedleyDB melody dataset local path
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`

    Returns:
        missing_files (list): List of file paths that are in the dataset index
            but missing locally
        invalid_checksums (list): List of file paths that file exists in the dataset
            index but has a different checksum compare to the reference checksum

    """
    missing_files, invalid_checksums = utils.validator(INDEX, data_home,
                                                       dataset_path)
    return missing_files, invalid_checksums
Esempio n. 7
0
def validate(data_home=None, silence=False):
    """Validate if the stored dataset is a valid version
    Args:
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`
    Returns:
        missing_files (list): List of file paths that are in the dataset index
            but missing locally
        invalid_checksums (list): List of file paths that file exists in the dataset
            index but has a different checksum compare to the reference checksum
    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    missing_files, invalid_checksums = utils.validator(INDEX,
                                                       data_home,
                                                       silence=silence)
    return missing_files, invalid_checksums
Esempio n. 8
0
def validate(data_home=None, silence=False):
    """Validate if a local version of this dataset is consistent
    Args:
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`
    Returns:
        missing_files (list): List of file paths that are in the dataset index
            but missing locally
        invalid_checksums (list): List of file paths where the expected file exists locally
            but has a different checksum than the reference
    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    missing_files, invalid_checksums = utils.validator(DATA.index,
                                                       data_home,
                                                       silence=silence)
    return missing_files, invalid_checksums
Esempio n. 9
0
def validate(dataset_path, data_home=None):
    """Validate if a local version of this dataset is consistent

    Args:
        dataset_path (str): the Beatles dataset local path
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`

    Returns:
        missing_files (list): List of file paths that are in the dataset index
            but missing locally
        invalid_checksums (list): List of file paths where the expected file exists locally
            but has a different checksum than the reference

    """
    missing_files, invalid_checksums = utils.validator(
        INDEX, data_home, dataset_path
    )
    return missing_files, invalid_checksums