Example #1
0
    def test_line_search_wolfe2_bounds(self):
        # See gh-7475

        # For this f and p, starting at a point on axis 0, the strong Wolfe
        # condition 2 is met if and only if the step length s satisfies
        # |x + s| <= c2 * |x|
        f = lambda x: np.dot(x, x)
        fp = lambda x: 2 * x
        p = np.array([1, 0])

        # Smallest s satisfying strong Wolfe conditions for these arguments is 30
        x = -60 * p
        c2 = 0.5

        s, _, _, _, _, _ = ls.line_search_wolfe2(f, fp, x, p, amax=30, c2=c2)
        assert_line_wolfe(x, p, s, f, fp)

        s, _, _, _, _, _ = assert_warns(LineSearchWarning,
                                        ls.line_search_wolfe2, f, fp, x, p,
                                        amax=29, c2=c2)
        assert_(s is None)

        # s=30 will only be tried on the 6th iteration, so this won't converge
        assert_warns(LineSearchWarning, ls.line_search_wolfe2, f, fp, x, p,
                     c2=c2, maxiter=5)
Example #2
0
    def test_build_mapping_file(self):
        analysis = self._create_analyses_with_samples()
        samples = {4: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196']}

        npt.assert_warns(qdb.exceptions.QiitaDBWarning,
                         analysis._build_mapping_file, samples)
        obs = qdb.util.get_filepath_information(
            analysis.mapping_file)['fullpath']

        exp = self.get_fp("%s_analysis_mapping.txt" % analysis.id)
        self.assertEqual(obs, exp)

        obs = qdb.metadata_template.util.load_template_to_dataframe(
            obs, index='#SampleID')
        exp = qdb.metadata_template.util.load_template_to_dataframe(
            self.map_exp_fp, index='#SampleID')

        # assert_frame_equal assumes same order on the rows, thus sorting
        # frames by index
        obs.sort_index(inplace=True)
        exp.sort_index(inplace=True)
        # then sorting columns
        obs = obs.reindex(sorted(obs.columns), axis=1)
        exp = exp.reindex(sorted(exp.columns), axis=1)

        assert_frame_equal(obs, exp, check_like=True)
 def test_deprecated_io(self):
     fh = StringIO()
     npt.assert_warns(DeprecationWarning, self.ordination_results.to_file, fh)
     fh.seek(0)
     deserialized = npt.assert_warns(DeprecationWarning, OrdinationResults.from_file, fh)
     assert_ordination_results_equal(deserialized, self.ordination_results)
     self.assertTrue(type(deserialized) == OrdinationResults)
Example #4
0
def test_views_non_contiguous():
    A = np.arange(16).reshape((4, 4))
    A = A[::2, :]

    with all_warnings():
        assert_warns(RuntimeWarning, view_as_blocks, A, (2, 2))
        assert_warns(RuntimeWarning, view_as_windows, A, (2, 2))
Example #5
0
def test_cwt_parameters_in_names():

    for func in [pywt.ContinuousWavelet, pywt.DiscreteContinuousWavelet]:
        for name in ['fbsp', 'cmor', 'shan']:
            # additional parameters should be specified within the name
            assert_warns(FutureWarning, func, name)

        for name in ['cmor', 'shan']:
            # valid names
            func(name + '1.5-1.0')
            func(name + '1-4')

            # invalid names
            assert_raises(ValueError, func, name + '1.0')
            assert_raises(ValueError, func, name + 'B-C')
            assert_raises(ValueError, func, name + '1.0-1.0-1.0')

        # valid names
        func('fbsp1-1.5-1.0')
        func('fbsp1.0-1.5-1')
        func('fbsp2-5-1')

        # invalid name (non-integer order)
        assert_raises(ValueError, func, 'fbsp1.5-1-1')
        assert_raises(ValueError, func, 'fbspM-B-C')

        # invalid name (too few or too many params)
        assert_raises(ValueError, func, 'fbsp1.0')
        assert_raises(ValueError, func, 'fbsp1.0-0.4')
        assert_raises(ValueError, func, 'fbsp1-1-1-1')
Example #6
0
def test_fswavedecnresult():
    data = np.ones((32, 32))
    levels = (1, 2)
    result = pywt.fswavedecn(data, 'sym2', levels=levels)

    # can access the lowpass band via .approx or via __getitem__
    approx_key = (0, ) * data.ndim
    assert_array_equal(result[approx_key], result.approx)

    dkeys = result.detail_keys()
    # the approximation key shouldn't be present in the detail_keys
    assert_(approx_key not in dkeys)

    # can access all detail coefficients and they have matching ndim
    for k in dkeys:
        d = result[k]
        assert_equal(d.ndim, data.ndim)

    # can assign modified coefficients
    result[k] = np.zeros_like(d)

    # assigning a differently sized array raises a ValueError
    assert_raises(ValueError, result.__setitem__,
                  k, np.zeros(tuple([s + 1 for s in d.shape])))

    # warns on assigning with a non-matching dtype
    assert_warns(UserWarning, result.__setitem__,
                 k, np.zeros_like(d).astype(np.float32))

    # all coefficients are stacked into result.coeffs (same ndim)
    assert_equal(result.coeffs.ndim, data.ndim)
Example #7
0
    def test_artifact_post_req(self):
        # Create new prep template to attach artifact to
        pt = npt.assert_warns(
            QiitaDBWarning, PrepTemplate.create,
            pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S')
        self._files_to_remove.extend([fp for _, fp in pt.get_filepaths()])

        filepaths = {'raw_forward_seqs': 'uploaded_file.txt',
                     'raw_barcodes': 'update.txt'}
        obs = artifact_post_req(
            '*****@*****.**', filepaths, 'FASTQ', 'New Test Artifact', pt.id)
        exp = {'status': 'success',
               'message': ''}
        self.assertEqual(obs, exp)
        wait_for_prep_information_job(pt.id)

        # Test importing an artifact
        # Create new prep template to attach artifact to
        pt = npt.assert_warns(
            QiitaDBWarning, PrepTemplate.create,
            pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S')
        self._files_to_remove.extend([fp for _, fp in pt.get_filepaths()])

        obs = artifact_post_req(
            '*****@*****.**', {}, 'Demultiplexed', 'New Test Artifact 2',
            pt.id, 3)
        exp = {'status': 'success',
               'message': ''}
        self.assertEqual(obs, exp)

        wait_for_prep_information_job(pt.id)
        # Instantiate the artifact to make sure it was made and
        # to clean the environment
        a = Artifact(pt.artifact.id)
        self._files_to_remove.extend([fp for _, fp, _ in a.filepaths])
Example #8
0
    def test_categorical_warning(self):

        g = ag.FacetGrid(self.df, col="b")
        with warnings.catch_warnings():
            warnings.resetwarnings()
            warnings.simplefilter("always")
            npt.assert_warns(UserWarning, g.map, pointplot, "b", "x")
Example #9
0
def test_check_symmetric():
    arr_sym = np.array([[0, 1], [1, 2]])
    arr_bad = np.ones(2)
    arr_asym = np.array([[0, 2], [0, 2]])

    test_arrays = {
        "dense": arr_asym,
        "dok": sp.dok_matrix(arr_asym),
        "csr": sp.csr_matrix(arr_asym),
        "csc": sp.csc_matrix(arr_asym),
        "coo": sp.coo_matrix(arr_asym),
        "lil": sp.lil_matrix(arr_asym),
        "bsr": sp.bsr_matrix(arr_asym),
    }

    # check error for bad inputs
    assert_raises(ValueError, check_symmetric, arr_bad)

    # check that asymmetric arrays are properly symmetrized
    for arr_format, arr in test_arrays.items():
        # Check for warnings and errors
        assert_warns(UserWarning, check_symmetric, arr)
        assert_raises(ValueError, check_symmetric, arr, raise_exception=True)

        output = check_symmetric(arr, raise_warning=False)
        if sp.issparse(output):
            assert_equal(output.format, arr_format)
            assert_array_equal(output.toarray(), arr_sym)
        else:
            assert_array_equal(output, arr_sym)
def test_power_solver_warn():
    # messing up the solver to trigger warning

    pow_ = 0.69219411243824214 # from previous function
    nip = smp.NormalIndPower()
    # using nobs, has one backup (fsolve)
    nip.start_bqexp['nobs1'] = {'upp': 50, 'low': -20}
    val = nip.solve_power(0.1, nobs1=None, alpha=0.01, power=pow_, ratio=1,
                          alternative='larger')
    assert_almost_equal(val, 1600, decimal=4)
    assert_equal(nip.cache_fit_res[0], 1)
    assert_equal(len(nip.cache_fit_res), 3)

    # case that has convergence failure, and should warn
    nip.start_ttp['nobs1'] = np.nan

    from statsmodels.tools.sm_exceptions import ConvergenceWarning
    assert_warns(ConvergenceWarning, nip.solve_power, 0.1, nobs1=None,
                  alpha=0.01, power=pow_, ratio=1, alternative='larger')

    import warnings
    with warnings.catch_warnings():  # python >= 2.6
        warnings.simplefilter("ignore")
        val = nip.solve_power(0.1, nobs1=None, alpha=0.01, power=pow_, ratio=1,
                              alternative='larger')
        assert_equal(nip.cache_fit_res[0], 0)
        assert_equal(len(nip.cache_fit_res), 3)
Example #11
0
 def test_generator(self):
     with assert_warns(FutureWarning):
         hstack((np.arange(3) for _ in range(2)))
     if sys.version_info.major > 2:
         # map returns a list on Python 2
         with assert_warns(FutureWarning):
             hstack(map(lambda x: x, np.ones((3, 2))))
Example #12
0
    def test_invalid_raise_with_usecols(self):
        "Test invalid_raise with usecols"
        data = ["1, 1, 1, 1, 1"] * 50
        for i in range(5):
            data[10 * i] = "2, 2, 2, 2 2"
        data.insert(0, "a, b, c, d, e")
        mdata = StringIO("\n".join(data))
        kwargs = dict(delimiter=",", dtype=None, names=True, invalid_raise=False)
        # XXX: is there a better way to get the return value of the callable in
        # assert_warns ?
        ret = {}

        def f(_ret={}):
            _ret["mtest"] = np.ndfromtxt(mdata, usecols=(0, 4), **kwargs)

        assert_warns(ConversionWarning, f, _ret=ret)
        mtest = ret["mtest"]
        assert_equal(len(mtest), 45)
        assert_equal(mtest, np.ones(45, dtype=[(_, int) for _ in "ae"]))
        #
        mdata.seek(0)
        mtest = np.ndfromtxt(mdata, usecols=(0, 1), **kwargs)
        assert_equal(len(mtest), 50)
        control = np.ones(50, dtype=[(_, int) for _ in "ab"])
        control[[10 * _ for _ in range(5)]] = (2, 2)
        assert_equal(mtest, control)
Example #13
0
    def test_seq_repeat(self):
        # Test that basic sequences get repeated when multiplied with
        # numpy integers. And errors are raised when multiplied with others.
        # Some of this behaviour may be controversial and could be open for
        # change.
        accepted_types = set(np.typecodes["AllInteger"])
        deprecated_types = set('?')
        forbidden_types = (
            set(np.typecodes["All"]) - accepted_types - deprecated_types)
        forbidden_types -= set('V')  # can't default-construct void scalars

        for seq_type in (list, tuple):
            seq = seq_type([1, 2, 3])
            for numpy_type in accepted_types:
                i = np.dtype(numpy_type).type(2)
                assert_equal(seq * i, seq * int(i))
                assert_equal(i * seq, int(i) * seq)

            for numpy_type in deprecated_types:
                i = np.dtype(numpy_type).type()
                assert_equal(
                    assert_warns(DeprecationWarning, operator.mul, seq, i),
                    seq * int(i))
                assert_equal(
                    assert_warns(DeprecationWarning, operator.mul, i, seq),
                    int(i) * seq)

            for numpy_type in forbidden_types:
                i = np.dtype(numpy_type).type()
                assert_raises(TypeError, operator.mul, seq, i)
                assert_raises(TypeError, operator.mul, i, seq)
Example #14
0
    def test_build_mapping_file(self):
        new_id = qdb.util.get_count('qiita.filepath') + 1
        samples = {4: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196']}

        npt.assert_warns(qdb.exceptions.QiitaDBWarning,
                         self.analysis._build_mapping_file, samples)
        obs = self.analysis.mapping_file
        self.assertEqual(obs, self.map_fp)

        obs = qdb.metadata_template.util.load_template_to_dataframe(
            obs, index='#SampleID')
        exp = npt.assert_warns(
            qdb.exceptions.QiitaDBWarning,
            qdb.metadata_template.util.load_template_to_dataframe,
            self.map_exp_fp, index='#SampleID')
        assert_frame_equal(obs, exp)

        sql = """SELECT * FROM qiita.filepath
                 WHERE filepath=%s ORDER BY filepath_id"""
        obs = self.conn_handler.execute_fetchall(
            sql, ("%d_analysis_mapping.txt" % self.analysis.id,))

        exp = [[16, '1_analysis_mapping.txt', 9, '852952723', 1, 1],
               [new_id, '1_analysis_mapping.txt', 9, '1542374513', 1, 1]]
        self.assertItemsEqual(obs, exp)

        sql = """SELECT * FROM qiita.analysis_filepath
                 WHERE analysis_id=%s ORDER BY filepath_id"""
        obs = self.conn_handler.execute_fetchall(sql, (self.analysis.id,))
        exp = [[1L, 14L, 2L], [1L, 15L, None], [1L, new_id, None]]
Example #15
0
    def test_0d_arrays(self):
        unicode = type(u'')
        assert_equal(unicode(np.array(u'café', np.unicode_)), u'café')

        if sys.version_info[0] >= 3:
            assert_equal(repr(np.array('café', np.unicode_)),
                         "array('café', dtype='<U4')")
        else:
            assert_equal(repr(np.array(u'café', np.unicode_)),
                         "array(u'caf\\xe9', dtype='<U4')")
        assert_equal(str(np.array('test', np.str_)), 'test')

        a = np.zeros(1, dtype=[('a', '<i4', (3,))])
        assert_equal(str(a[0]), '([0, 0, 0],)')

        assert_equal(repr(np.datetime64('2005-02-25')[...]),
                     "array('2005-02-25', dtype='datetime64[D]')")

        assert_equal(repr(np.timedelta64('10', 'Y')[...]),
                     "array(10, dtype='timedelta64[Y]')")

        # repr of 0d arrays is affected by printoptions
        x = np.array(1)
        np.set_printoptions(formatter={'all':lambda x: "test"})
        assert_equal(repr(x), "array(test)")
        # str is unaffected
        assert_equal(str(x), "1")

        # check `style` arg raises
        assert_warns(DeprecationWarning, np.array2string,
                                         np.array(1.), style=repr)
        # but not in legacy mode
        np.array2string(np.array(1.), style=repr, legacy='1.13')
Example #16
0
    def test_to_phylip_no_positions(self):
        d1 = DNASequence('', id="d1")
        d2 = DNASequence('', id="d2")
        a = Alignment([d1, d2])

        with self.assertRaises(SequenceCollectionError):
            npt.assert_warns(UserWarning, a.to_phylip)
Example #17
0
    def test_multivariate_normal(self):
        np.random.seed(self.seed)
        mean = (0.123456789, 10)
        # Hmm... not even symmetric.
        cov = [[1, 0], [1, 0]]
        size = (3, 2)
        actual = np.random.multivariate_normal(mean, cov, size)
        desired = np.array(
            [
                [[-1.47027513018564449, 10.0], [-1.65915081534845532, 10.0]],
                [[-2.29186329304599745, 10.0], [-1.77505606019580053, 10.0]],
                [[-0.54970369430044119, 10.0], [0.29768848031692957, 10.0]],
            ]
        )
        np.testing.assert_array_almost_equal(actual, desired, decimal=15)

        # Check for default size, was raising deprecation warning
        actual = np.random.multivariate_normal(mean, cov)
        desired = np.array([-0.79441224511977482, 10.0])
        np.testing.assert_array_almost_equal(actual, desired, decimal=15)

        # Check that non positive-semidefinite covariance raises warning
        mean = [0, 0]
        cov = [[1, 1 + 1e-10], [1 + 1e-10, 1]]
        rng = np.random.multivariate_normal
        assert_warns(RuntimeWarning, np.random.multivariate_normal, mean, cov)
Example #18
0
def test_load_power_if_no_power():
    """ Test if a warning if raise if there is no power data. """

    currdir = os.path.dirname(os.path.abspath(__file__))
    filename = os.path.join(currdir, 'data', '2014-05-17-10-44-53.fit')

    assert_warns(UserWarning, load_power_from_fit, filename)
Example #19
0
def test_power_solver_warn():
    # messing up the solver to trigger warning
    # I wrote this with scipy 0.9,
    # convergence behavior of scipy 0.11 is different,
    # fails at a different case, but is successful where it failed before

    pow_ = 0.69219411243824214 # from previous function
    nip = smp.NormalIndPower()
    # using nobs, has one backup (fsolve)
    nip.start_bqexp['nobs1'] = {'upp': 50, 'low': -20}
    val = nip.solve_power(0.1, nobs1=None, alpha=0.01, power=pow_, ratio=1,
                          alternative='larger')
    import scipy
    if scipy.__version__ < '0.10':
        assert_almost_equal(val, 1600, decimal=4)
        assert_equal(nip.cache_fit_res[0], 1)
        assert_equal(len(nip.cache_fit_res), 3)

        # case that has convergence failure, and should warn
        nip.start_ttp['nobs1'] = np.nan

        from statsmodels.tools.sm_exceptions import ConvergenceWarning
        assert_warns(ConvergenceWarning, nip.solve_power, 0.1, nobs1=None,
                      alpha=0.01, power=pow_, ratio=1, alternative='larger')
        # this converges with scipy 0.11  ???
        # nip.solve_power(0.1, nobs1=None, alpha=0.01, power=pow_, ratio=1, alternative='larger')

        import warnings
        with warnings.catch_warnings():  # python >= 2.6
            warnings.simplefilter("ignore")
            val = nip.solve_power(0.1, nobs1=None, alpha=0.01, power=pow_, ratio=1,
                                  alternative='larger')
            assert_equal(nip.cache_fit_res[0], 0)
            assert_equal(len(nip.cache_fit_res), 3)
def test_continuous_error():
    """Test either if an error is raised when the target are continuous
    type"""

    # continuous case
    y = np.linspace(0, 1, 40)
    ann = AllKNN(random_state=RND_SEED)
    assert_warns(UserWarning, ann.fit, X, y)
def test_continuous_error():
    """Test either if an error is raised when the target are continuous
    type"""

    # continuous case
    y = np.linspace(0, 1, 5000)
    cc = ClusterCentroids(random_state=RND_SEED)
    assert_warns(UserWarning, cc.fit, X, y)
def test_continuous_error():
    """Test either if an error is raised when the target are continuous
    type"""

    # continuous case
    y = np.linspace(0, 1, 10)
    ros = RandomOverSampler(random_state=RND_SEED)
    assert_warns(UserWarning, ros.fit, X, y)
 def test_deprecations(self):
     # 2017-05-17, 1.13.0
     s = (2, 3, 4, 5)
     a = np.empty(s)
     with warnings.catch_warnings():
         warnings.simplefilter("always")
         assert_warns(DeprecationWarning, expand_dims, a, -6)
         assert_warns(DeprecationWarning, expand_dims, a, 5)
def test_continuous_error():
    """Test either if an error is raised when the target are continuous
    type"""

    # continuous case
    y = np.linspace(0, 1, 5000)
    ee = EasyEnsemble(random_state=RND_SEED)
    assert_warns(UserWarning, ee.fit, X, y)
Example #25
0
    def test_to_phylip_unequal_sequence_lengths(self):
        d1 = DNASequence('A-CT', id="d1")
        d2 = DNASequence('TTA', id="d2")
        d3 = DNASequence('.-AC', id="d3")
        a = Alignment([d1, d2, d3])

        with self.assertRaises(SequenceCollectionError):
            npt.assert_warns(UserWarning, a.to_phylip)
Example #26
0
    def test_grouped_distributions_insufficient_symbols(self):
        """grouped_distributions() should work even when there aren't
        enough symbols. We should capture a warning."""
        args = ('scatter', self.ValidTypicalData, [1, 4, 10, 11],
                ["T0", "T1", "T2", "T3"], ["Infants", "Children", "Teens"],
                ['^'], "x-axis label", "y-axis label", "Test")

        npt.assert_warns(RuntimeWarning, grouped_distributions, *args)
def test_continuous_error():
    """Test either if an error is raised when the target are continuous
    type"""

    # continuous case
    y = np.linspace(0, 1, 15)
    ncr = NeighbourhoodCleaningRule(random_state=RND_SEED)
    assert_warns(UserWarning, ncr.fit, X, y)
Example #28
0
def test_continuous_error():
    """Test either if an error is raised when the target are continuous
    type"""

    # continuous case
    y = np.linspace(0, 1, 15)
    nm1 = NearMiss(random_state=RND_SEED, version=VERSION_NEARMISS)
    assert_warns(UserWarning, nm1.fit, X, y)
Example #29
0
 def test_deprecated_io(self):
     fh = StringIO()
     npt.assert_warns(UserWarning, self.dm_3x3.to_file, fh)
     fh.seek(0)
     deserialized = npt.assert_warns(UserWarning,
                                     DissimilarityMatrix.from_file, fh)
     self.assertEqual(deserialized, self.dm_3x3)
     self.assertTrue(type(deserialized) == DissimilarityMatrix)
def test_continuous_error():
    """Test either if an error is raised when the target are continuous
    type"""

    # continuous case
    y = np.linspace(0, 1, 40)
    enn = RepeatedEditedNearestNeighbours(random_state=RND_SEED)
    assert_warns(UserWarning, enn.fit, X, y)
Example #31
0
 def test_exog_names_warning(self):
     mod = self.res.model
     mod1 = PoissonOffsetGMLE(mod.endog, mod.exog, offset=mod.offset)
     from numpy.testing import assert_warns
     mod1.data.xnames = mod1.data.xnames * 2
     assert_warns(ValueWarning, mod1.fit, disp=0)
Example #32
0
def check_warn_on_small_data():
    t, y, dy = _generate_data(20)
    model = LombScargleFast()
    assert_warns(UserWarning, model.score_frequency_grid, 0.8, 0.01, 40)
    model = LombScargleFast(silence_warnings=True)
    assert_no_warnings(model.score_frequency_grid, 0.8, 0.01, 40)
Example #33
0
 def test_full_default_dtype(self):
     assert_warns(FutureWarning, np.full, 1, 1)
     assert_warns(FutureWarning, np.full, 1, None)
     assert_no_warnings(np.full, 1, 1, float)
def test_open_view_warning():
    # opening many views (without deleting the SurfaceView objects)
    # should raise a warning about memory usage
    assert_warns(UserWarning, _open_views)
    assert_no_warnings(_open_one_view)
 def test_generator(self):
     with assert_warns(FutureWarning):
         dstack((np.arange(3) for _ in range(2)))
Example #36
0
def test_array_astype_warning(t):
    # test ComplexWarning when casting from complex to float or int
    a = np.array(10, dtype=np.complex_)
    assert_warns(np.ComplexWarning, a.astype, t)
    def test_nobonds_warns(self):
        self.u.bonds = TopologyGroup([])

        assert_warns(UserWarning, self.u.select_atoms,
                     'type 2 and bonded name N')
Example #38
0
 def validate_get_data_deprecated(self, imaker, params):
     # Check deprecated header API
     img = imaker()
     with assert_warns(DeprecationWarning):
         data = img.get_data()
     assert_array_equal(np.asanyarray(img.dataobj), data)
Example #39
0
def test_blended():
    fig, ax = plt.subplots()
    ax.axvline(0)
    assert_warns(UserWarning, fake_renderer_output, fig, FakeRenderer)
Example #40
0
    def test_recarray_views(self):
        a = np.array([(1, 'ABC'), (2, "DEF")],
                     dtype=[('foo', int), ('bar', 'S4')])
        b = np.array([1, 2, 3, 4, 5], dtype=np.int64)

        #check that np.rec.array gives right dtypes
        assert_equal(np.rec.array(a).dtype.type, np.record)
        assert_equal(type(np.rec.array(a)), np.recarray)
        assert_equal(np.rec.array(b).dtype.type, np.int64)
        assert_equal(type(np.rec.array(b)), np.recarray)

        #check that viewing as recarray does the same
        assert_equal(a.view(np.recarray).dtype.type, np.record)
        assert_equal(type(a.view(np.recarray)), np.recarray)
        assert_equal(b.view(np.recarray).dtype.type, np.int64)
        assert_equal(type(b.view(np.recarray)), np.recarray)

        #check that view to non-structured dtype preserves type=np.recarray
        r = np.rec.array(np.ones(4, dtype="f4,i4"))
        rv = r.view('f8').view('f4,i4')
        assert_equal(type(rv), np.recarray)
        assert_equal(rv.dtype.type, np.record)

        #check that getitem also preserves np.recarray and np.record
        r = np.rec.array(
            np.ones(4, dtype=[('a', 'i4'), ('b', 'i4'), ('c', 'i4,i4')]))
        assert_equal(r['c'].dtype.type, np.record)
        assert_equal(type(r['c']), np.recarray)

        # suppress deprecation warning in 1.12 (remove in 1.13)
        with assert_warns(FutureWarning):
            assert_equal(r[['a', 'b']].dtype.type, np.record)
            assert_equal(type(r[['a', 'b']]), np.recarray)

        #and that it preserves subclasses (gh-6949)
        class C(np.recarray):
            pass

        c = r.view(C)
        assert_equal(type(c['c']), C)

        # check that accessing nested structures keep record type, but
        # not for subarrays, non-void structures, non-structured voids
        test_dtype = [('a', 'f4,f4'), ('b', 'V8'), ('c', ('f4', 2)),
                      ('d', ('i8', 'i4,i4'))]
        r = np.rec.array([((1, 1), b'11111111', [1, 1], 1),
                          ((1, 1), b'11111111', [1, 1], 1)],
                         dtype=test_dtype)
        assert_equal(r.a.dtype.type, np.record)
        assert_equal(r.b.dtype.type, np.void)
        assert_equal(r.c.dtype.type, np.float32)
        assert_equal(r.d.dtype.type, np.int64)
        # check the same, but for views
        r = np.rec.array(np.ones(4, dtype='i4,i4'))
        assert_equal(r.view('f4,f4').dtype.type, np.record)
        assert_equal(r.view(('i4', 2)).dtype.type, np.int32)
        assert_equal(r.view('V8').dtype.type, np.void)
        assert_equal(r.view(('i8', 'i4,i4')).dtype.type, np.int64)

        #check that we can undo the view
        arrs = [np.ones(4, dtype='f4,i4'), np.ones(4, dtype='f8')]
        for arr in arrs:
            rec = np.rec.array(arr)
            # recommended way to view as an ndarray:
            arr2 = rec.view(rec.dtype.fields or rec.dtype, np.ndarray)
            assert_equal(arr2.dtype.type, arr.dtype.type)
            assert_equal(type(arr2), type(arr))
Example #41
0
def reconst_flow_core(flow):
    with TemporaryDirectory() as out_dir:
        data_path, bval_path, bvec_path = get_fnames('small_64D')
        volume, affine = load_nifti(data_path)
        mask = np.ones_like(volume[:, :, :, 0])
        mask_path = pjoin(out_dir, 'tmp_mask.nii.gz')
        save_nifti(mask_path, mask.astype(np.uint8), affine)

        reconst_flow = flow()
        for sh_order in [4, 6, 8]:
            if flow.get_short_name() == 'csd':

                reconst_flow.run(data_path,
                                 bval_path,
                                 bvec_path,
                                 mask_path,
                                 sh_order=sh_order,
                                 out_dir=out_dir,
                                 extract_pam_values=True)

            elif flow.get_short_name() == 'csa':

                reconst_flow.run(data_path,
                                 bval_path,
                                 bvec_path,
                                 mask_path,
                                 sh_order=sh_order,
                                 odf_to_sh_order=sh_order,
                                 out_dir=out_dir,
                                 extract_pam_values=True)

            gfa_path = reconst_flow.last_generated_outputs['out_gfa']
            gfa_data = load_nifti_data(gfa_path)
            npt.assert_equal(gfa_data.shape, volume.shape[:-1])

            peaks_dir_path =\
                reconst_flow.last_generated_outputs['out_peaks_dir']
            peaks_dir_data = load_nifti_data(peaks_dir_path)
            npt.assert_equal(peaks_dir_data.shape[-1], 15)
            npt.assert_equal(peaks_dir_data.shape[:-1], volume.shape[:-1])

            peaks_idx_path = \
                reconst_flow.last_generated_outputs['out_peaks_indices']
            peaks_idx_data = load_nifti_data(peaks_idx_path)
            npt.assert_equal(peaks_idx_data.shape[-1], 5)
            npt.assert_equal(peaks_idx_data.shape[:-1], volume.shape[:-1])

            peaks_vals_path = \
                reconst_flow.last_generated_outputs['out_peaks_values']
            peaks_vals_data = load_nifti_data(peaks_vals_path)
            npt.assert_equal(peaks_vals_data.shape[-1], 5)
            npt.assert_equal(peaks_vals_data.shape[:-1], volume.shape[:-1])

            shm_path = reconst_flow.last_generated_outputs['out_shm']
            shm_data = load_nifti_data(shm_path)
            # Test that the number of coefficients is what you would expect
            # given the order of the sh basis:
            npt.assert_equal(shm_data.shape[-1],
                             sph_harm_ind_list(sh_order)[0].shape[0])
            npt.assert_equal(shm_data.shape[:-1], volume.shape[:-1])

            pam = load_peaks(reconst_flow.last_generated_outputs['out_pam'])
            npt.assert_allclose(pam.peak_dirs.reshape(peaks_dir_data.shape),
                                peaks_dir_data)
            npt.assert_allclose(pam.peak_values, peaks_vals_data)
            npt.assert_allclose(pam.peak_indices, peaks_idx_data)
            npt.assert_allclose(pam.shm_coeff, shm_data)
            npt.assert_allclose(pam.gfa, gfa_data)

            bvals, bvecs = read_bvals_bvecs(bval_path, bvec_path)
            bvals[0] = 5.
            bvecs = generate_bvecs(len(bvals))

            tmp_bval_path = pjoin(out_dir, "tmp.bval")
            tmp_bvec_path = pjoin(out_dir, "tmp.bvec")
            np.savetxt(tmp_bval_path, bvals)
            np.savetxt(tmp_bvec_path, bvecs.T)
            reconst_flow._force_overwrite = True

            if flow.get_short_name() == 'csd':

                reconst_flow = flow()
                reconst_flow._force_overwrite = True
                reconst_flow.run(data_path,
                                 bval_path,
                                 bvec_path,
                                 mask_path,
                                 out_dir=out_dir,
                                 frf=[15, 5, 5])
                reconst_flow = flow()
                reconst_flow._force_overwrite = True
                reconst_flow.run(data_path,
                                 bval_path,
                                 bvec_path,
                                 mask_path,
                                 out_dir=out_dir,
                                 frf='15, 5, 5')
                reconst_flow = flow()
                reconst_flow._force_overwrite = True
                reconst_flow.run(data_path,
                                 bval_path,
                                 bvec_path,
                                 mask_path,
                                 out_dir=out_dir,
                                 frf=None)
                reconst_flow2 = flow()
                reconst_flow2._force_overwrite = True
                reconst_flow2.run(data_path,
                                  bval_path,
                                  bvec_path,
                                  mask_path,
                                  out_dir=out_dir,
                                  frf=None,
                                  roi_center=[5, 5, 5])
            else:
                with npt.assert_raises(BaseException):
                    npt.assert_warns(UserWarning,
                                     reconst_flow.run,
                                     data_path,
                                     tmp_bval_path,
                                     tmp_bvec_path,
                                     mask_path,
                                     out_dir=out_dir,
                                     extract_pam_values=True)

            # test parallel implementation
            reconst_flow = flow()
            reconst_flow._force_overwrite = True
            reconst_flow.run(data_path,
                             bval_path,
                             bvec_path,
                             mask_path,
                             out_dir=out_dir,
                             parallel=True,
                             nbr_processes=None)
            reconst_flow = flow()
            reconst_flow._force_overwrite = True
            reconst_flow.run(data_path,
                             bval_path,
                             bvec_path,
                             mask_path,
                             out_dir=out_dir,
                             parallel=True,
                             nbr_processes=2)
Example #42
0
    def test_grouped_distributions_insufficient_symbols(self):
        args = ('scatter', self.ValidTypicalData, [1, 4, 10, 11],
                ["T0", "T1", "T2", "T3"], ["Infants", "Children", "Teens"],
                ['^'], "x-axis label", "y-axis label", "Test")

        npt.assert_warns(RuntimeWarning, grouped_distributions, *args)
Example #43
0
    def test_validate_filepath_access_by_user(self):
        self._set_artifact_private()

        # shared has access to all study files and analysis files
        user = qdb.user.User('*****@*****.**')
        for i in [1, 2, 3, 4, 5, 9, 12, 15, 16, 17, 18, 19, 20, 21]:
            self.assertTrue(
                qdb.meta_util.validate_filepath_access_by_user(user, i))

        # Now shared should not have access to the study files
        qdb.study.Study(1).unshare(user)
        for i in [1, 2, 3, 4, 5, 9, 12, 17, 18, 19, 20, 21]:
            self.assertFalse(
                qdb.meta_util.validate_filepath_access_by_user(user, i))

        for i in [15, 16]:
            self.assertTrue(
                qdb.meta_util.validate_filepath_access_by_user(user, i))

        # Now shared should not have access to any files
        qdb.analysis.Analysis(1).unshare(user)
        for i in [1, 2, 3, 4, 5, 9, 12, 15, 16, 17, 18, 19, 20, 21]:
            self.assertFalse(
                qdb.meta_util.validate_filepath_access_by_user(user, i))

        # Now shared has access to public study files
        self._set_artifact_public()
        for i in [1, 2, 3, 4, 5, 9, 12, 17, 18, 19, 20, 21]:
            obs = qdb.meta_util.validate_filepath_access_by_user(user, i)
            if i < 3:
                self.assertFalse(obs)
            else:
                self.assertTrue(obs)

        # testing that if study.public_raw_download is true we get access
        qdb.study.Study(1).public_raw_download = True
        for i in [1, 2, 3]:
            obs = qdb.meta_util.validate_filepath_access_by_user(user, i)
            self.assertTrue(obs)
        qdb.study.Study(1).public_raw_download = False

        # Test that it doesn't break: if the SampleTemplate hasn't been added
        info = {
            "timeseries_type_id": 1,
            "metadata_complete": True,
            "mixs_compliant": True,
            "study_alias": "TestStudy",
            "study_description": "Description of a test study",
            "study_abstract": "No abstract right now...",
            "principal_investigator_id": 1,
            "lab_person_id": 1
        }
        study = qdb.study.Study.create(qdb.user.User('*****@*****.**'),
                                       "Test study", info)
        for i in [1, 2, 3, 4, 5, 9, 12, 17, 18, 19, 20, 21]:
            obs = qdb.meta_util.validate_filepath_access_by_user(user, i)
            if i < 3:
                self.assertFalse(obs)
            else:
                self.assertTrue(obs)

        # test in case there is a prep template that failed
        with qdb.sql_connection.TRN:
            qdb.sql_connection.TRN.add(
                "INSERT INTO qiita.prep_template (data_type_id) VALUES (2)")
            qdb.sql_connection.TRN.execute()
        for i in [1, 2, 3, 4, 5, 9, 12, 17, 18, 19, 20, 21]:
            obs = qdb.meta_util.validate_filepath_access_by_user(user, i)
            if i < 3:
                self.assertFalse(obs)
            else:
                self.assertTrue(obs)

        # admin should have access to everything
        admin = qdb.user.User('*****@*****.**')
        with qdb.sql_connection.TRN:
            qdb.sql_connection.TRN.add(
                "SELECT filepath_id FROM qiita.filepath")
            fids = qdb.sql_connection.TRN.execute_fetchflatten()
        for i in fids:
            self.assertTrue(
                qdb.meta_util.validate_filepath_access_by_user(admin, i))

        # testing access to a prep info file without artifacts
        # returning artifacts to private
        self._set_artifact_private()
        PT = qdb.metadata_template.prep_template.PrepTemplate
        md_dict = {
            'SKB8.640193': {
                'center_name': 'ANL',
                'center_project_name': 'Test Project',
                'ebi_submission_accession': None,
                'linkerprimersequence': 'GTGCCAGCMGCCGCGGTAA',
                'barcodesequence': 'GTCCGCAAGTTA',
                'run_prefix': "s_G1_L001_sequences",
                'platform': 'Illumina',
                'instrument_model': 'Illumina MiSeq',
                'library_construction_protocol': 'AAAA',
                'experiment_design_description': 'BBBB'
            }
        }
        md = pd.DataFrame.from_dict(md_dict, orient='index', dtype=str)
        # creating prep info on Study(1), which is our default Study
        pt = npt.assert_warns(qdb.exceptions.QiitaDBWarning, PT.create, md,
                              qdb.study.Study(1), "18S")
        for idx, _ in pt.get_filepaths():
            self.assertFalse(
                qdb.meta_util.validate_filepath_access_by_user(user, idx))

        # returning to original sharing
        PT.delete(pt.id)
        qdb.study.Study(1).share(user)
        qdb.analysis.Analysis(1).share(user)
        qdb.study.Study.delete(study.id)
Example #44
0
def test_misc():
    endog = lake.copy()
    exog = np.c_[np.ones_like(endog), np.arange(1, len(endog) + 1) * 1.0]

    # Test for warning if iterations fail to converge
    assert_warns(UserWarning, gls, endog, exog, order=(2, 0, 0), max_iter=0)
Example #45
0
 def test_tostring_matches_tobytes(self):
     arr = np.array(list(b"test\xFF"), dtype=np.uint8)
     b = arr.tobytes()
     with assert_warns(DeprecationWarning):
         s = arr.tostring()
     assert s == b
 def test_keepdims(self):
     with warnings.catch_warnings():
         warnings.filterwarnings('always', '', FutureWarning)
         assert_warns(FutureWarning, np.add.accumulate, [1], keepdims=True)
Example #47
0
 def test(self):
     a = np.arange(10)
     assert_warns(np.VisibleDeprecationWarning, np.rank, a)
Example #48
0
    def test_SparseCCA():

        np.random.seed(0)

        scca = SparseCCA(n_components=1)
        assert_raises(ValueError, scca.fit,
                      np.arange(20).reshape(10, 2),
                      np.arange(30).reshape(10, 3))
        assert_raises(ValueError, scca.fit,
                      np.arange(30).reshape(10, 3),
                      np.arange(10).reshape(10, 1))

        assert_raises(ValueError, scca.fit,
                      np.arange(30).reshape(10, 3), np.arange(10))

        scca = SparseCCA(n_components=20)
        assert_raises(ValueError, scca.fit,
                      np.arange(30).reshape(10, 3),
                      np.arange(30).reshape(10, 3))

        scca = SparseCCA(n_components=2)
        assert_warns(UserWarning, scca.fit, np.random.uniform(size=(10, 3)),
                     np.random.uniform(size=(10, 2)))

        ###

        n = 10000
        corr_signal = np.cos(np.arange(n)).reshape(-1, 1)
        X = np.c_[corr_signal, corr_signal, corr_signal,
                  np.random.normal(size=(n, 3))]
        Y = np.c_[np.random.normal(size=(n, 1)), corr_signal, corr_signal,
                  corr_signal,
                  np.random.normal(size=(n, 1))]

        scca = SparseCCA(n_components=1,
                         scale=False,
                         optimize_penalties='NOT_AN_OPTION')
        assert_raises(ValueError, scca.fit, X, Y)

        # maximum penalty
        scca = SparseCCA(n_components=1,
                         scale=False,
                         optimize_penalties=False,
                         penaltyxs=0,
                         penaltyys=0)
        _test_SparseCCA_fit_results(scca, X, Y)
        assert_allclose(scca.x_rotations_[[3, 4, 5]], 0)
        assert_allclose(scca.y_rotations_[[0, 4]], 0)
        max_penalties_covs = scca.covs_

        # no penalty
        scca.set_params(penaltyxs=1, penaltyys=1)
        _test_SparseCCA_fit_results(scca, X, Y)
        assert np.all(scca.x_rotations_[[3, 4, 5]] != 0)
        assert np.all(scca.y_rotations_[[0, 4]] != 0)
        no_penalties_covs = scca.covs_
        assert max_penalties_covs[0] <= no_penalties_covs[0]

        # cv
        scca = SparseCCA(n_components=1,
                         scale=False,
                         optimize_penalties='cv',
                         penaltyxs=[0, 1],
                         penaltyys=[0, 1],
                         verbose=True)
        scca.fit(X, Y)
Example #49
0
    def test_polyint(self):
        # check exceptions
        assert_raises(TypeError, poly.polyint, [0], .5)
        assert_raises(ValueError, poly.polyint, [0], -1)
        assert_raises(ValueError, poly.polyint, [0], 1, [0, 0])
        assert_raises(ValueError, poly.polyint, [0], lbnd=[0])
        assert_raises(ValueError, poly.polyint, [0], scl=[0])
        assert_raises(TypeError, poly.polyint, [0], axis=.5)
        with assert_warns(DeprecationWarning):
            poly.polyint([1, 1], 1.)

        # test integration of zero polynomial
        for i in range(2, 5):
            k = [0] * (i - 2) + [1]
            res = poly.polyint([0], m=i, k=k)
            assert_almost_equal(res, [0, 1])

        # check single integration with integration constant
        for i in range(5):
            scl = i + 1
            pol = [0] * i + [1]
            tgt = [i] + [0] * i + [1 / scl]
            res = poly.polyint(pol, m=1, k=[i])
            assert_almost_equal(trim(res), trim(tgt))

        # check single integration with integration constant and lbnd
        for i in range(5):
            scl = i + 1
            pol = [0] * i + [1]
            res = poly.polyint(pol, m=1, k=[i], lbnd=-1)
            assert_almost_equal(poly.polyval(-1, res), i)

        # check single integration with integration constant and scaling
        for i in range(5):
            scl = i + 1
            pol = [0] * i + [1]
            tgt = [i] + [0] * i + [2 / scl]
            res = poly.polyint(pol, m=1, k=[i], scl=2)
            assert_almost_equal(trim(res), trim(tgt))

        # check multiple integrations with default k
        for i in range(5):
            for j in range(2, 5):
                pol = [0] * i + [1]
                tgt = pol[:]
                for k in range(j):
                    tgt = poly.polyint(tgt, m=1)
                res = poly.polyint(pol, m=j)
                assert_almost_equal(trim(res), trim(tgt))

        # check multiple integrations with defined k
        for i in range(5):
            for j in range(2, 5):
                pol = [0] * i + [1]
                tgt = pol[:]
                for k in range(j):
                    tgt = poly.polyint(tgt, m=1, k=[k])
                res = poly.polyint(pol, m=j, k=list(range(j)))
                assert_almost_equal(trim(res), trim(tgt))

        # check multiple integrations with lbnd
        for i in range(5):
            for j in range(2, 5):
                pol = [0] * i + [1]
                tgt = pol[:]
                for k in range(j):
                    tgt = poly.polyint(tgt, m=1, k=[k], lbnd=-1)
                res = poly.polyint(pol, m=j, k=list(range(j)), lbnd=-1)
                assert_almost_equal(trim(res), trim(tgt))

        # check multiple integrations with scaling
        for i in range(5):
            for j in range(2, 5):
                pol = [0] * i + [1]
                tgt = pol[:]
                for k in range(j):
                    tgt = poly.polyint(tgt, m=1, k=[k], scl=2)
                res = poly.polyint(pol, m=j, k=list(range(j)), scl=2)
                assert_almost_equal(trim(res), trim(tgt))
 def assert_warns(self, *args, **kwargs):
     '''
     Fail unless the given callable throws the specified warning.
     '''
     return assert_warns(*args, **kwargs)
Example #51
0
 def test_warns_reps(self):
     # raises warning when reps is less than 1000
     x = np.arange(20)
     reps = 100
     assert_warns(RuntimeWarning, RV().test, x, x, reps=reps)
 def test_maximum(self):
     assert_warns(DeprecationWarning, np.ma.maximum, np.ma.array([1, 2]))
Example #53
0
def test_multichannel_warnings():
    img = data.astronaut()
    assert_warns(UserWarning, restoration.denoise_bilateral, img)
    assert_warns(UserWarning, restoration.denoise_nl_means, img)
 def test_generator(self):
     with assert_warns(FutureWarning):
         hstack((np.arange(3) for _ in range(2)))
     with assert_warns(FutureWarning):
         hstack(map(lambda x: x, np.ones((3, 2))))
Example #55
0
 def test_deriv_zero_warning(self):
     func = lambda x: x**2
     dfunc = lambda x: 2 * x
     assert_warns(RuntimeWarning, cc.newton, func, 0.0, dfunc)
Example #56
0
def test_unsupported_uncondensed_distance_matrix_linkage_warning():
    assert_warns(ClusterWarning, linkage, [[0, 1], [1, 0]])
Example #57
0
 def test_call_with_cast_to_complex_with_umfpack(self):
     use_solver(useUmfpack=True)
     solve = factorized(self.A)
     b = random.rand(4)
     for t in [np.complex64, np.complex128]:
         assert_warns(np.ComplexWarning, solve, b.astype(t))
Example #58
0
 def test_singular_with_umfpack(self):
     use_solver(useUmfpack=True)
     with suppress_warnings() as sup:
         sup.filter(RuntimeWarning, "divide by zero encountered in double_scalars")
         assert_warns(umfpack.UmfpackWarning, self._check_singular)
Example #59
0
def test_reconst_dki():
    with TemporaryDirectory() as out_dir:
        data_path, bval_path, bvec_path = get_fnames('small_101D')
        vol_img = nib.load(data_path)
        volume = vol_img.get_data()
        mask = np.ones_like(volume[:, :, :, 0])
        mask_img = nib.Nifti1Image(mask.astype(np.uint8), vol_img.affine)
        mask_path = pjoin(out_dir, 'tmp_mask.nii.gz')
        nib.save(mask_img, mask_path)

        dki_flow = ReconstDkiFlow()

        args = [data_path, bval_path, bvec_path, mask_path]

        dki_flow.run(*args, out_dir=out_dir)

        fa_path = dki_flow.last_generated_outputs['out_fa']
        fa_data = nib.load(fa_path).get_data()
        assert_equal(fa_data.shape, volume.shape[:-1])

        tensor_path = dki_flow.last_generated_outputs['out_dt_tensor']
        tensor_data = nib.load(tensor_path)
        assert_equal(tensor_data.shape[-1], 6)
        assert_equal(tensor_data.shape[:-1], volume.shape[:-1])

        ga_path = dki_flow.last_generated_outputs['out_ga']
        ga_data = nib.load(ga_path).get_data()
        assert_equal(ga_data.shape, volume.shape[:-1])

        rgb_path = dki_flow.last_generated_outputs['out_rgb']
        rgb_data = nib.load(rgb_path)
        assert_equal(rgb_data.shape[-1], 3)
        assert_equal(rgb_data.shape[:-1], volume.shape[:-1])

        md_path = dki_flow.last_generated_outputs['out_md']
        md_data = nib.load(md_path).get_data()
        assert_equal(md_data.shape, volume.shape[:-1])

        ad_path = dki_flow.last_generated_outputs['out_ad']
        ad_data = nib.load(ad_path).get_data()
        assert_equal(ad_data.shape, volume.shape[:-1])

        rd_path = dki_flow.last_generated_outputs['out_rd']
        rd_data = nib.load(rd_path).get_data()
        assert_equal(rd_data.shape, volume.shape[:-1])

        mk_path = dki_flow.last_generated_outputs['out_mk']
        mk_data = nib.load(mk_path).get_data()
        assert_equal(mk_data.shape, volume.shape[:-1])

        ak_path = dki_flow.last_generated_outputs['out_ak']
        ak_data = nib.load(ak_path).get_data()
        assert_equal(ak_data.shape, volume.shape[:-1])

        rk_path = dki_flow.last_generated_outputs['out_rk']
        rk_data = nib.load(rk_path).get_data()
        assert_equal(rk_data.shape, volume.shape[:-1])

        kt_path = dki_flow.last_generated_outputs['out_dk_tensor']
        kt_data = nib.load(kt_path)
        assert_equal(kt_data.shape[-1], 15)
        assert_equal(kt_data.shape[:-1], volume.shape[:-1])

        mode_path = dki_flow.last_generated_outputs['out_mode']
        mode_data = nib.load(mode_path).get_data()
        assert_equal(mode_data.shape, volume.shape[:-1])

        evecs_path = dki_flow.last_generated_outputs['out_evec']
        evecs_data = nib.load(evecs_path).get_data()
        assert_equal(evecs_data.shape[-2:], tuple((3, 3)))
        assert_equal(evecs_data.shape[:-2], volume.shape[:-1])

        evals_path = dki_flow.last_generated_outputs['out_eval']
        evals_data = nib.load(evals_path).get_data()
        assert_equal(evals_data.shape[-1], 3)
        assert_equal(evals_data.shape[:-1], volume.shape[:-1])

        bvals, bvecs = read_bvals_bvecs(bval_path, bvec_path)
        bvals[0] = 5.
        bvecs = generate_bvecs(len(bvals))

        tmp_bval_path = pjoin(out_dir, "tmp.bval")
        tmp_bvec_path = pjoin(out_dir, "tmp.bvec")
        np.savetxt(tmp_bval_path, bvals)
        np.savetxt(tmp_bvec_path, bvecs.T)
        dki_flow._force_overwrite = True
        npt.assert_warns(UserWarning,
                         dki_flow.run,
                         data_path,
                         tmp_bval_path,
                         tmp_bvec_path,
                         mask_path,
                         out_dir=out_dir,
                         b0_threshold=0)
def test_stack():
    # non-iterable input
    assert_raises(TypeError, stack, 1)

    # 0d input
    for input_ in [(1, 2, 3), [np.int32(1),
                               np.int32(2),
                               np.int32(3)],
                   [np.array(1), np.array(2),
                    np.array(3)]]:
        assert_array_equal(stack(input_), [1, 2, 3])
    # 1d input examples
    a = np.array([1, 2, 3])
    b = np.array([4, 5, 6])
    r1 = array([[1, 2, 3], [4, 5, 6]])
    assert_array_equal(np.stack((a, b)), r1)
    assert_array_equal(np.stack((a, b), axis=1), r1.T)
    # all input types
    assert_array_equal(np.stack(list([a, b])), r1)
    assert_array_equal(np.stack(array([a, b])), r1)
    # all shapes for 1d input
    arrays = [np.random.randn(3) for _ in range(10)]
    axes = [0, 1, -1, -2]
    expected_shapes = [(10, 3), (3, 10), (3, 10), (10, 3)]
    for axis, expected_shape in zip(axes, expected_shapes):
        assert_equal(np.stack(arrays, axis).shape, expected_shape)
    assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=2)
    assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=-3)
    # all shapes for 2d input
    arrays = [np.random.randn(3, 4) for _ in range(10)]
    axes = [0, 1, 2, -1, -2, -3]
    expected_shapes = [(10, 3, 4), (3, 10, 4), (3, 4, 10), (3, 4, 10),
                       (3, 10, 4), (10, 3, 4)]
    for axis, expected_shape in zip(axes, expected_shapes):
        assert_equal(np.stack(arrays, axis).shape, expected_shape)
    # empty arrays
    assert_(stack([[], [], []]).shape == (3, 0))
    assert_(stack([[], [], []], axis=1).shape == (0, 3))
    # out
    out = np.zeros_like(r1)
    np.stack((a, b), out=out)
    assert_array_equal(out, r1)
    # edge cases
    assert_raises_regex(ValueError, 'need at least one array', stack, [])
    assert_raises_regex(ValueError, 'must have the same shape', stack,
                        [1, np.arange(3)])
    assert_raises_regex(ValueError, 'must have the same shape', stack,
                        [np.arange(3), 1])
    assert_raises_regex(ValueError,
                        'must have the same shape',
                        stack, [np.arange(3), 1],
                        axis=1)
    assert_raises_regex(ValueError,
                        'must have the same shape',
                        stack, [np.zeros(
                            (3, 3)), np.zeros(3)],
                        axis=1)
    assert_raises_regex(ValueError, 'must have the same shape', stack,
                        [np.arange(2), np.arange(3)])
    # generator is deprecated
    with assert_warns(FutureWarning):
        result = stack((x for x in range(3)))
    assert_array_equal(result, np.array([0, 1, 2]))