Beispiel #1
0
def combine_nulls(parcellation, scale, spatnull, alpha):
    """
    Combines outputs of all simulations into single files for provided inputs

    Parameters
    ----------
    parcellation : str
        Name of parcellation to be used
    scale : str
        Scale of `parcellation` to be used
    spatnull : str
        Name of spin method to be used
    alpha : float
        Spatial autocorrelation parameter to be used
    """

    print(f'{spatnull} {alpha} {parcellation} {scale}')

    nulldir = SIMDIR / alpha / parcellation / 'nulls' / spatnull
    pvals_fn = nulldir / f'{scale}_nulls.csv'
    perms_fn = nulldir / f'{scale}_perms.csv'

    # only some of the spatial null models were run in serial mode; these are
    # the ones that are missing the top-level file and whose outputs we need to
    # combine. do that here.
    if not pvals_fn.exists():
        pvals, perms = np.zeros(N_SIM), np.zeros((N_PERM, N_SIM))
        for sim in range(N_SIM):
            pvals[sim] = \
                np.loadtxt(nulldir / 'pvals' / f'{scale}_nulls_{sim:04d}.csv')
            perms[:, sim] = \
                np.loadtxt(nulldir / 'pvals' / f'{scale}_perms_{sim:04d}.csv')
        putils.save_dir(pvals_fn, pvals, overwrite=False)
        putils.save_dir(perms_fn, perms, overwrite=False)
    else:
        pvals = np.loadtxt(pvals_fn)

    # grab the empirical correlations for each simulation---good to have
    if parcellation == 'vertex':
        x, y = simnulls.load_vertex_data(SIMDIR / alpha, n_sim=N_SIM)
    else:
        x, y = simnulls.load_parc_data(SIMDIR / alpha,
                                       parcellation,
                                       scale,
                                       n_sim=N_SIM)
    corrs = nnstats.efficient_pearsonr(x, y, nan_policy='omit')[0]

    return pd.DataFrame(
        dict(parcellation=parcellation,
             scale=scale,
             spatnull=spatnull,
             alpha=alpha,
             corr=corrs,
             sim=range(len(pvals)),
             pval=pvals))
Beispiel #2
0
def gen_permnets(data, networks, spins, fname):
    """
    Generates permuted network partitions of `data` and `networks` with `spins`

    Parameters
    ----------
    data : (R,) array_like
        Input data where `R` is regions
    networks : (R,) array_like
        Network labels for `R` regions
    spins : (R, P) array_like
        Spin resampling matrix where `R` is regions and `P` is the number of
        resamples
    fname : str or os.PathLike
        Filepath specifying where generated null distribution should be saved

    Returns
    -------
    permnets : (P, L) numpy.ndarray
        Permuted network means for `L` networks
    """

    data, networks = np.asarray(data), np.asarray(networks)

    # if the output file already exists just load that and return it
    fname = Path(fname)
    if fname.exists():
        return np.loadtxt(fname, delimiter=',')

    # if we were given a file for the resampling array, load it
    if isinstance(spins, (str, os.PathLike)):
        spins = simnulls.load_spins(spins, n_perm=10000)

    nets = np.trim_zeros(np.unique(networks))
    permnets = np.full((spins.shape[-1], len(nets)), np.nan)
    for n, spin in enumerate(spins.T):
        msg = f'{n:>5}/{spins.shape[-1]}'
        print(msg, end='\b' * len(msg), flush=True)

        spindata = data[spin]
        spindata[spin == -1] = np.nan

        # get the means of each network for each spin
        permnets[n] = _get_netmeans(spindata, networks, nets)

    print(' ' * len(msg) + '\b' * len(msg), end='', flush=True)
    putils.save_dir(fname, permnets)

    return permnets
Beispiel #3
0
def combine_shuffle(parcellation, scale, spatnull, alpha):
    """
    Combines outputs of all simulations into single files for provided inputs

    Parameters
    ----------
    parcellation : str
        Name of parcellation to be used
    scale : str
        Scale of `parcellation` to be used
    spatnull : str
        Name of spin method to be used
    alpha : float
        Spatial autocorrelation parameter to be used
    """

    nulldir = SIMDIR / alpha / parcellation / 'nulls' / spatnull
    pvals_fn = nulldir / f'{scale}_nulls_shuffle.csv'
    perms_fn = nulldir / f'{scale}_perms_shuffle.csv'

    # only some of the spatial null models were run in serial mode; these are
    # the ones that are missing the top-level file and whose outputs we need to
    # combine. do that here.
    if not pvals_fn.exists():
        pvals, perms = np.zeros(N_SIM), np.zeros((N_PERM, N_SIM))
        for sim in range(N_SIM):
            pvals[sim] = np.loadtxt(nulldir / 'pvals' /
                                    f'{scale}_nulls_shuffle_{sim:04d}.csv')
            perms[:, sim] = np.loadtxt(nulldir / 'pvals' /
                                       f'{scale}_perms_shuffle_{sim:04d}.csv')
        putils.save_dir(pvals_fn, pvals, overwrite=False)
        putils.save_dir(perms_fn, perms, overwrite=False)
    else:
        pvals = np.loadtxt(pvals_fn)

    # the "shuffled" p-values were generated so we could calculate the
    # Prob(p < 0.05) of each null / alpha combination (i.e., the FWER)
    prob = np.sum(pvals < 0.05) / len(pvals)

    return dict(parcellation=parcellation,
                scale=scale,
                spatnull=spatnull,
                alpha=alpha,
                prob=prob)
def gen_permcorrs(data, spins, fname):
    """
    Generates permuted correlations for `data` with `spins`

    Parameters
    ----------
    data : (R, T) array_like
        Input data where `R` is regions and `T` is neurosynth terms
    spins : (R, P) array_like
        Spin resampling matrix where `R` is regions and `P` is the number of
        resamples
    fname : str or os.PathLike
        Filepath specifying where generated null distribution should be saved

    Returns
    -------
    perms : (P, 1) numpy.ndarray
        Permuted correlations
    """

    data = np.asarray(data)

    fname = putils.pathify(fname)
    if fname.exists():
        return np.loadtxt(fname).reshape(-1, 1)

    if isinstance(spins, (str, os.PathLike)):
        spins = np.loadtxt(spins, delimiter=',', dtype='int32')

    permcorrs = np.zeros((spins.shape[-1], 1))
    for n, spin in enumerate(spins.T):
        msg = f'{n:>5}/{spins.shape[-1]}'
        print(msg, end='\b' * len(msg), flush=True)
        # this will only have False values when spintype == 'baum'
        mask = np.logical_and(spin != -1, np.all(~np.isnan(data), axis=1))
        # get the absolute max correlation from the null correlation matrix
        permcorrs[n] = _get_permcorr(data[mask], data[spin][mask])

    print(' ' * len(msg) + '\b' * len(msg), end='', flush=True)

    # save these to disk for later re-use
    putils.save_dir(fname, permcorrs)

    return permcorrs
def calc_moran(parcellation, scale, alpha):
    """
    Calculate's Moran's I of all simulations for provided inputs

    Parameters
    ----------
    parcellation : str
        Name of parcellation to be used
    scale : str
        Scale of `parcellation` to be used
    alpha : float
        Spatial autocorrelation parameter to be used

    Returns
    -------
    moran_fn : os.PathLike
        Path to generated file containing Moran's I for simulations
    """

    print(f'{time.ctime()}: {parcellation} {scale} {alpha}', flush=True)

    # filename for output
    moran_fn = (SIMDIR / alpha / parcellation / f'{scale}_moran.csv')

    if moran_fn.exists():
        return moran_fn

    # load simulated data
    alphadir = SIMDIR / alpha
    if parcellation == 'vertex':
        y = simnulls.load_vertex_data(alphadir, n_sim=N_SIM)[1]
    else:
        y = simnulls.load_parc_data(alphadir, parcellation, scale,
                                    n_sim=N_SIM)[1]

    dist = simnulls.load_full_distmat(y, DISTDIR, parcellation, scale)
    moran = simnulls.calc_moran(dist, np.asarray(y), n_jobs=N_PROC)
    putils.save_dir(moran_fn, np.atleast_1d(moran), overwrite=False)

    return moran_fn
def run_null(netclass, parc, scale, spintype):
    """
    Runs spatial permutation null model for given combination of inputs

    Parameters
    ----------
    netclass : {'vek', 'yeo'}
        Network partition to test
    parc : str
        Name of parcellation to be used
    scale : str
        Scale of `parcellation` to be used
    spintype : str
        Name of spin method to be used

    Returns
    -------
    stats : pd.DataFrame
        Generated statistics with columns ['parcellation', 'scale', 'spintype',
        'netclass', 'network', 'zscore', 'pval']
    """
    data = load_data(netclass, parc, scale)

    # run the damn thing
    print(f'Running {spintype:>9} spins for {scale}: ', end='', flush=True)
    out = HCPDIR / parc / 'nulls' / netclass / spintype / f'{scale}_nulls.csv'
    if out.exists():
        permnets = np.loadtxt(out, delimiter=',')
    elif spintype == 'cornblath':
        # even though we're working with parcellated data we need to project
        # that to the surface + spin the vertices, so let's load our
        # pre-generated vertex-level spins
        spins = SPDIR / 'vertex' / 'vazquez-rodriguez' / 'fsaverage5_spins.csv'

        # get annotation files (we need these to project parcels to surface)
        fetcher = getattr(nndata, f"fetch_{parc.replace('atl-', '')}")
        annotations = fetcher('fsaverage5', data_dir=ROIDIR)[scale]

        # pre-load the spins for this function (assumes `spins` is array)
        print('Pre-loading spins...', end='\b' * 20, flush=True)
        spins = np.loadtxt(spins, delimiter=',', dtype='int32')
        # generate "spun" data; permdata will be an (R, T, n_rotate) array
        # where `R` is regions and `T` is 1 (myelination)
        permdata = nnsurf.spin_data(np.asarray(data['myelin']),
                                    version='fsaverage5',
                                    lhannot=annotations.lh,
                                    rhannot=annotations.rh,
                                    spins=spins,
                                    n_rotate=spins.shape[-1],
                                    verbose=True)
        permnets = np.vstack([
            _get_netmeans(permdata[..., n], data['networks'])
            for n in range(spins.shape[-1])
        ])
        putils.save_dir(out, permnets)
    elif spintype in ['burt2018', 'burt2020']:
        surrdir = SURRDIR / parc / spintype / 'hcp'
        surrogates = get_surrogates(data['myelin'], surrdir, scale)
        permnets = np.vstack([
            _get_netmeans(surrogates[..., n], data['networks'])
            for n in range(surrogates.shape[-1])
        ])
        putils.save_dir(out, permnets)
    elif spintype == 'moran':
        surrogates = np.zeros((len(data['myelin']), 10000))
        for hemi, dist, idx in putils.yield_data_dist(DISTDIR, parc, scale,
                                                      data['myelin']):
            mrs = moran.MoranRandomization(joint=True,
                                           n_rep=10000,
                                           tol=1e-6,
                                           random_state=1234)
            mrs.fit(dist)
            surrogates[idx] = np.squeeze(mrs.randomize(hemi)).T

        permnets = np.vstack([
            _get_netmeans(surrogates[..., n], data['networks'])
            for n in range(surrogates.shape[-1])
        ])
        putils.save_dir(out, permnets)
    else:
        spins = SPDIR / parc / spintype / f'{scale}_spins.csv'
        permnets = gen_permnets(data['myelin'], data['networks'], spins, out)

    # now get the real network averages and compare to the permuted values
    real = _get_netmeans(data['myelin'], data['networks'])
    zscores, pvals = get_fwe(real, permnets)

    out = pd.DataFrame(
        dict(parcellation=parc,
             scale=scale,
             spintype=spintype,
             netclass=netclass,
             network=list(NET_CODES[netclass].keys()),
             zscore=zscores,
             pval=pvals))

    return out
Beispiel #7
0

if __name__ == '__main__':
    # get cammoun + schaefer parcellations
    parcellations = putils.get_cammoun_schaefer(data_dir=ROIDIR)

    # generate the vertex-level spins
    coords, hemi = nnsurf._get_fsaverage_coords('fsaverage5', 'sphere')

    fname = SPINDIR / 'vertex' / 'vazquez-rodriguez' / 'fsaverage5_spins.csv'
    if not fname.exists():
        print('Generating spins for fsaverage5 surface')
        spins = nnsurf.gen_spinsamples(coords, hemi, exact=False,
                                       n_rotate=10000, verbose=True,
                                       seed=1234, check_duplicates=False)
        putils.save_dir(fname, spins)

    fname = SPINDIR / 'vertex' / 'naive-nonpara' / 'fsaverage5_spins.csv'
    if not fname.exists():
        print('Generating naive permutations for fsaverage5 surface')
        rs = np.random.default_rng(1234)
        spins = np.column_stack([
            rs.permutation(len(coords)) for f in range(10000)
        ])
        putils.save_dir(fname, spins)

    # now pre-generate the parcellation spins for five methods. we can't
    # pre-generate the project-reduce-average method because that relies on the
    # data itself, but we _can_ use the above vertex-level spins for that
    for name, annotations in parcellations.items():
        print(f'PARCELLATION: {name}')
                    med = 'medial' if allow_med else 'nomedial'
                    out = DISTDIR / name / med / f'{scale}_{hemi}_dist.csv'
                    if out.exists():
                        continue
                    # when we want to disallow travel along the medial wall we
                    # can specify which labels in our parcellation belong to
                    # the medial wall and disallow travel along vertices
                    # belonging to those parcels
                    mlabels = None if allow_med else medial_labels
                    dist = surface.get_surface_distance(getattr(surf, hemi),
                                                        getattr(annot, hemi),
                                                        medial_labels=mlabels,
                                                        n_proc=N_PROC,
                                                        use_wb=False,
                                                        verbose=True)
                    putils.save_dir(out, dist)

    # get vertex distance matrix
    for hemi in ['lh', 'rh']:
        medial_path = medial / f'{hemi}.Medial_wall.label'
        for allow_med in [True, False]:
            med = 'medial' if allow_med else 'nomedial'
            out = DISTDIR / 'vertex' / med / f'fsaverage5_{hemi}_dist.csv'
            if out.exists():
                continue
            mpath = None if allow_med else medial_path
            # since we have no parcellation here we need to provide a file
            # that denotes which vertices belong to the medial wall (`mpath`)
            dist = surface.get_surface_distance(getattr(surf, hemi),
                                                medial=mpath,
                                                n_proc=N_PROC,
def run_null(parcellation, scale, spintype):
    """
    Runs spatial permutation null model for given combination of inputs

    Parameters
    ----------
    parcellation : str
        Name of parcellation to be used
    scale : str
        Scale of `parcellation` to be used
    spintype : str
        Name of spin method to be used

    Returns
    -------
    stats : pd.DataFrame
        Generated statistics with columns ['parcellation', 'scale', 'spintype',
        'n_sig']
    """

    nsdata = load_data(parcellation, scale)

    # run the damn thing
    print(f'Running {spintype:>9} spins for {scale}: ', end='', flush=True)
    out = NSDIR / parcellation / 'nulls' / spintype / f'{scale}_nulls.csv'
    if out.exists():
        permcorrs = np.loadtxt(out).reshape(-1, 1)
    elif spintype == 'cornblath':
        # even though we're working with parcellated data we need to project
        # that to the surface + spin the vertices, so let's load our
        # pre-generated vertex-level spins
        spins = SPDIR / 'vertex' / 'vazquez-rodriguez' / 'fsaverage5_spins.csv'

        # get annotation files
        fetcher = getattr(nndata, f"fetch_{parcellation.replace('atl-', '')}")
        annotations = fetcher('fsaverage5', data_dir=ROIDIR)[scale]

        # pre-load the spins for this function (assumes `spins` is array)
        # permdata will be an (R, T, n_rotate) array
        print('Pre-loading spins...', end='\b' * 20, flush=True)
        spins = np.loadtxt(spins, delimiter=',', dtype='int32')
        permdata = nnsurf.spin_data(nsdata, version='fsaverage5',
                                    lhannot=annotations.lh,
                                    rhannot=annotations.rh,
                                    spins=spins, n_rotate=spins.shape[-1],
                                    verbose=True)
        permcorrs = np.vstack([
            _get_permcorr(nsdata, permdata[..., n])
            for n in range(permdata.shape[-1])
        ])
        putils.save_dir(out, permcorrs)
    elif spintype in ['burt2018', 'burt2020']:
        surrdir = SURRDIR / parcellation / spintype / 'neurosynth'
        # generate the permuted data from the surrogate resampling arrays
        print('Generating surrogates...', end='\b' * 24, flush=True)
        permdata = get_surrogates(nsdata, surrdir, scale)
        permcorrs = np.vstack([
            _get_permcorr(nsdata, permdata[..., n])
            for n in range(permdata.shape[-1])
        ])
        putils.save_dir(out, permcorrs)
    elif spintype == 'moran':
        surrogates = np.zeros((*nsdata.shape, 10000))
        for hemi, dist, idx in putils.yield_data_dist(DISTDIR, parcellation,
                                                      scale, nsdata):
            mrs = moran.MoranRandomization(joint=True, n_rep=10000,
                                           tol=1e-6, random_state=1234)
            mrs.fit(dist)
            surrogates[idx] = mrs.randomize(hemi).transpose(1, 2, 0)

        permcorrs = np.vstack([
            _get_permcorr(nsdata, surrogates[..., n])
            for n in range(surrogates.shape[-1])
        ])
        putils.save_dir(out, permcorrs)
    else:
        spins = SPDIR / parcellation / spintype / f'{scale}_spins.csv'
        permcorrs = gen_permcorrs(nsdata, spins, out)

    nsdata = nsdata.dropna(axis=0, how='all')
    pvals = get_fwe(np.corrcoef(nsdata.T), permcorrs)

    out = pd.DataFrame(dict(
        parcellation=parcellation,
        scale=scale,
        spintype=spintype,
        n_sig=np.sum(np.triu(pvals < ALPHA, k=1))
    ), index=[0])

    return out
def run_null(parcellation, scale, spatnull, alpha):
    """
    Runs spatial null models for given combination of inputs

    Parameters
    ----------
    parcellation : str
        Name of parcellation to be used
    scale : str
        Scale of `parcellation` to be used
    spatnull : str
        Name of spin method to be used
    alpha : float
        Spatial autocorrelation parameter to be used
    """

    print(f'{time.ctime()}: {parcellation} {scale} {spatnull} {alpha} ',
          flush=True)

    # filenames (for I/O)
    spins_fn = SPDIR / parcellation / spatnull / f'{scale}_spins.csv'
    pvals_fn = (SIMDIR / alpha / parcellation / 'nulls' / spatnull
                / f'{scale}_nulls.csv')
    perms_fn = pvals_fn.parent / f'{scale}_perms.csv'

    if SHUFFLE:
        pvals_fn = pvals_fn.parent / f'{scale}_nulls_shuffle.csv'
        perms_fn = perms_fn.parent / f'{scale}_perms_shuffle.csv'

    if pvals_fn.exists() and perms_fn.exists():
        return

    # load simulated data
    alphadir = SIMDIR / alpha
    if parcellation == 'vertex':
        x, y = simnulls.load_vertex_data(alphadir, n_sim=N_SIM)
    else:
        x, y = simnulls.load_parc_data(alphadir, parcellation, scale,
                                       n_sim=N_SIM)

    # if we're computing info on SHUFFLED data, get the appropriate random `y`
    if SHUFFLE:
        y = _get_ysim(y, np.random.default_rng(1).permutation(N_SIM))

    # calculate the null p-values
    if spatnull == 'naive-para':
        pvals = nnstats.efficient_pearsonr(x, y, nan_policy='omit')[1]
        perms = np.array([np.nan])
    elif spatnull == 'cornblath':
        fn = SPDIR / 'vertex' / 'vazquez-rodriguez' / 'fsaverage5_spins.csv'
        x, y = np.asarray(x), np.asarray(y)
        spins = simnulls.load_spins(fn, n_perm=N_PERM)
        fetcher = getattr(nndata, f"fetch_{parcellation.replace('atl-', '')}")
        annot = fetcher('fsaverage5', data_dir=ROIDIR)[scale]
        out = Parallel(n_jobs=N_PROC, max_nbytes=None)(
            delayed(_cornblath)(x[:, sim], y[:, sim], spins, annot)
            for sim in putils.trange(x.shape[-1], desc='Running simulations')
        )
        pvals, perms = zip(*out)
    elif spatnull == 'baum':
        x, y = np.asarray(x), np.asarray(y)
        spins = simnulls.load_spins(spins_fn, n_perm=N_PERM)
        out = Parallel(n_jobs=N_PROC, max_nbytes=None)(
            delayed(_baum)(x[:, sim], y[:, sim], spins)
            for sim in putils.trange(x.shape[-1], desc='Running simulations')
        )
        pvals, perms = zip(*out)
    elif spatnull in ('burt2018', 'burt2020', 'moran'):
        xarr = np.asarray(x)
        out = Parallel(n_jobs=N_PROC, max_nbytes=None)(
            delayed(_genmod)(xarr[:, sim], _get_ysim(y, sim),
                             parcellation, scale, spatnull)
            for sim in putils.trange(x.shape[-1], desc='Running simulations')
        )
        pvals, perms = zip(*out)
    else:  # vazquez-rodriguez, vasa, hungarian, naive-nonparametric
        x, y = np.asarray(x), np.asarray(y)
        spins = simnulls.load_spins(spins_fn, n_perm=N_PERM)
        out = Parallel(n_jobs=N_PROC, max_nbytes=None)(
            delayed(simnulls.calc_pval)(x[:, sim], y[:, sim], y[spins, sim])
            for sim in putils.trange(x.shape[-1], desc='Running simulations')
        )
        pvals, perms = zip(*out)

    # save to disk
    putils.save_dir(perms_fn, np.atleast_1d(perms), overwrite=False)
    putils.save_dir(pvals_fn, np.atleast_1d(pvals), overwrite=False)
def run_null(parcellation, scale, spatnull, alpha, sim):
    """
    Runs spatial null models for given combination of inputs

    Parameters
    ----------
    parcellation : str
        Name of parcellation to be used
    scale : str
        Scale of `parcellation` to be used
    spatnull : str
        Name of spin method to be used
    alpha : float
        Spatial autocorrelation parameter to be used
    sim : int
        Which simulation to run
    """

    print(
        f'{time.ctime()}: {parcellation} {scale} {spatnull} {alpha} '
        f'sim-{sim} ',
        flush=True)

    # filenames (for I/O)
    spins_fn = SPDIR / parcellation / spatnull / f'{scale}_spins.csv'
    pvals_fn = (SIMDIR / alpha / parcellation / 'nulls' / spatnull / 'pvals' /
                f'{scale}_nulls_{sim:04d}.csv')
    perms_fn = pvals_fn.parent / f'{scale}_perms_{sim:04d}.csv'
    moran_fn = pvals_fn.parent / f'{scale}_moran_{sim:04d}.csv'

    # load simulated data
    alphadir = SIMDIR / alpha
    if parcellation == 'vertex':
        loadfn = functools.partial(simnulls.load_vertex_data, alphadir)
    else:
        loadfn = functools.partial(simnulls.load_parc_data, alphadir,
                                   parcellation, scale)
    x, y = loadfn(sim=sim)

    # if we're computing info on SHUFFLED data, get the appropriate random `y`
    if SHUFFLE:
        _, y = loadfn(sim=np.random.default_rng(1).permutation(N_SIM)[sim])
        pvals_fn = pvals_fn.parent / f'{scale}_nulls_shuffle_{sim:04d}.csv'
        perms_fn = pvals_fn.parent / f'{scale}_perms_shuffle_{sim:04d}.csv'
        moran_fn = pvals_fn.parent / f'{scale}_moran_shuffle_{sim:04d}.csv'

    # if we're going to run moran for this simulation, pre-load distmat
    if RUN_MORAN and not moran_fn.exists():
        dist = simnulls.load_full_distmat(y, DISTDIR, parcellation, scale)

    # calculate the null p-values
    nulls = None
    if pvals_fn.exists() and perms_fn.exists():
        pvals, perms = np.loadtxt(pvals_fn), np.loadtxt(perms_fn)
    elif spatnull == 'naive-para':
        pvals = nnstats.efficient_pearsonr(x, y, nan_policy='omit')[1]
        perms = np.array([np.nan])
    elif spatnull == 'cornblath':
        fn = SPDIR / 'vertex' / 'vazquez-rodriguez' / 'fsaverage5_spins.csv'
        x, y = np.asarray(x), np.asarray(y)
        spins = simnulls.load_spins(fn, n_perm=N_PERM)
        fetcher = getattr(nndata, f"fetch_{parcellation.replace('atl-', '')}")
        annot = fetcher('fsaverage5', data_dir=ROIDIR)[scale]
        nulls = nnsurf.spin_data(y,
                                 version='fsaverage5',
                                 lhannot=annot.lh,
                                 rhannot=annot.rh,
                                 spins=spins,
                                 n_rotate=spins.shape[-1])
        pvals, perms = simnulls.calc_pval(x, y, nulls)
    elif spatnull == 'baum':
        x, y = np.asarray(x), np.asarray(y)
        spins = simnulls.load_spins(spins_fn, n_perm=N_PERM)
        nulls = y[spins]
        nulls[spins == -1] = np.nan
        pvals, perms = simnulls.calc_pval(x, y, nulls)
    elif spatnull in ('burt2018', 'burt2020', 'moran'):
        nulls = make_surrogates(y, parcellation, scale, spatnull)
        pvals, perms = simnulls.calc_pval(x, y, nulls)
    else:  # vazquez-rodriguez, vasa, hungarian, naive-nonparametric
        x, y = np.asarray(x), np.asarray(y)
        spins = simnulls.load_spins(spins_fn, n_perm=N_PERM)
        nulls = y[spins]
        pvals, perms = simnulls.calc_pval(x, y, nulls)

    # save to disk
    putils.save_dir(perms_fn, np.atleast_1d(perms), overwrite=False)
    putils.save_dir(pvals_fn, np.atleast_1d(pvals), overwrite=False)

    # if we're running moran, do it now
    if RUN_MORAN and not moran_fn.exists() and nulls is not None:
        moran = simnulls.calc_moran(dist, nulls, n_jobs=N_PROC)
        putils.save_dir(moran_fn, np.atleast_1d(moran), overwrite=False)