def _partial_mp_gammai_sparse(batch, matrix, idx, n, A, B, verbose):
    """Parallel helper function for MP gammai for sparse similarity matrices. 
    
    Please do not directly use this function, but invoke via 
    mutual_proximity_gammai()
    """
    log = ConsoleLogging()
    S_mp = lil_matrix((len(batch), n), dtype=np.float32)
    
    for i, b in enumerate(batch):
        if verbose and ((batch[i]+1)%1000 == 0 or batch[i]+1 == n 
                        or i == len(batch)-1 or i == 0):
            log.message("MP_gammai_sparse: {} of {}. On {}.".format(
                batch[i]+1, n, mp.current_process().name, flush=True))
        j_idx = slice(b+1, n)
        
        if b+1 >= n:
            continue # nothing to do in the last row
        #if j_idx.size == 0:
        #    continue 
        
        # avoiding fancy indexing for efficiency reasons
        S_ij = matrix[b, j_idx].toarray().ravel() #Extract dense rows temporarily        
        p1 = _local_gamcdf(S_ij, A[b], B[b])
        del S_ij
        
        S_ji = matrix[j_idx, b].toarray().ravel() #for vectorization below.
        p2 = _local_gamcdf(S_ji, A[j_idx], B[j_idx])
        del S_ji

        val = (p1 * p2).ravel()
        S_mp[i, j_idx] = val 
        #need to mirror later!!   
    
    return batch, S_mp
def _partial_mp_emp_sparse(batch, matrix, idx, n, verbose):
    """Parallel helper function for MP empiric for sparse similarity matrices. 
    
    Please do not directly use this function, but invoke via 
    mutual_proximity_empiric()
    """
    log = ConsoleLogging()
    S_mp = lil_matrix((len(batch), n), dtype=np.float32)
    
    # TODO implement faster version from serial MP emp sparse
    for i, b in enumerate(batch):
        if verbose and ((batch[i]+1)%1000 == 0 or batch[i] == n-1 
                        or i == len(batch)-1 or i == 0):
            log.message("MP_empiric_sparse: {} of {}. On {}.".format(
                batch[i]+1, n, mp.current_process().name), flush=True)
        for j in range(b+1, n):
            d = matrix[b, j]
            if d > 0: 
                dI = matrix.getrow(b).toarray()
                dJ = matrix.getrow(j).toarray()
                # non-zeros elements
                nz = (dI > 0) & (dJ > 0) 
                S_mp[i, j] = (nz & (dI <= d) & (dJ <= d)).sum() / (nz.sum() - 1)
                # need to mirror later
            else:
                pass # skip zero entries
    
    return (batch, S_mp)
def _partial_mp_gaussi_sparse(batch, matrix, idx, n, mu, sd, verbose):
    """Parallel helper function for MP gaussi for sparse similarity matrices. 
    
    Please do not directly use this function, but invoke via 
    mutual_proximity_gaussi()
    """
    log = ConsoleLogging()
    Dmp = lil_matrix((len(batch), n), dtype=np.float32)
    
    #non-vectorized code
    for i, b in enumerate(batch):
        if verbose and ((batch[i]+1)%1000 == 0 or batch[i]+1==n 
                        or i==len(batch)-1 or i==0):
            log.message("MP_gaussi_sparse: {} of {}. On {}.".format(
                batch[i]+1, n, mp.current_process().name, flush=True))
        for j in range(b+1, n):
            if matrix[b, j] > 0:       
                p1 = norm.cdf(matrix[b, j], mu[b], sd[b])
                p2 = norm.cdf(matrix[j, b], mu[j], sd[j])
                Dmp[i, j] = (p1 * p2).ravel()
                                 
    return batch, Dmp
 def test_error(self):
     log = ConsoleLogging()
     log.error("Error")
     return self
 def test_warning(self):
     log = ConsoleLogging()
     log.warning("Warning")
     return self
 def test_message(self):
     log = ConsoleLogging()
     log.message("Message")
     return self