Example #1
0
File: p214.py Project: icot/euler
def phichain(n):
    buf = phi(n)
    nl = 1
    while buf > 1:
        buf = phi(buf)
        nl += 1
    return nl
Example #2
0
def updateVertex(current, node, grid, obs):
    if current.parent and lineOfSight(current.parent, node, obs) \
                    and current.lb <= phi(current, current.parent, node) <= current.ub \
                    and not pathTie(node, current):
        # Path 2
        # If in line of sight, we connect to the parent, it avoid unecessary grid turns
        new_g = current.parent.G + dist(current.parent, node)
        showPath2 = True
        if new_g < node.G:
            node.G = new_g
            node.parent = current.parent
            node.local = current
            neighbors = list(
                map(lambda nb: phi(node, current.parent, nb),
                    children(node, grid, obs, crossbar=True)))
            l = min(neighbors)
            h = max(neighbors)
            delta = phi(current, current.parent, node)
            node.lb = max(l, current.lb - delta)
            node.ub = min(h, current.ub - delta)
    else:
        # Path 1
        showPath2 = False
        new_g = current.G + dist(current, node)
        if new_g < node.G:
            node.G = new_g
            node.parent = current
            node.local = current
            node.lb = -45
            node.ub = 45

    return showPath2
Example #3
0
def _calc_rho(lb, ub, prot_heavies, water_ow, cutoff, sigma, gridpts, npts, rho_prot_bulk, rho_water_bulk):    
    cutoff_sq = cutoff**2
    sigma_sq = sigma**2
    block = ub - lb
    rho_prot_slice = numpy.zeros((block, npts), dtype=numpy.float32)
    rho_water_slice = numpy.zeros((block, npts), dtype=numpy.float32)
    rho_slice = numpy.zeros((block, npts), dtype=numpy.float32)

    # KD tree for nearest neighbor search
    tree = cKDTree(gridpts)

    # i is frame
    for i in xrange(block):

        # position of each atom at frame i
        for pos in prot_heavies[i]:

            #pos = atom.position
            # Indices of all gridpoints within cutoff of atom's position
            neighboridx = numpy.array(tree.query_ball_point(pos, cutoff))
            if neighboridx.size == 0:
                continue
            neighborpts = gridpts[neighboridx]

            dist_vectors = neighborpts[:, ...] - pos

            # Distance array between atom and neighbor grid points
            #distarr = scipy.spatial.distance.cdist(pos.reshape(1,3), neighborpts,
            #                                       'sqeuclidean').reshape(neighboridx.shape)

            phivals = phi(dist_vectors, sigma, sigma_sq, cutoff, cutoff_sq)

            rho_prot_slice[i, neighboridx] += phivals

        for pos in water_ow[i]:
            neighboridx = numpy.array(tree.query_ball_point(pos, cutoff))
            if neighboridx.size == 0:
                continue
            neighborpts = gridpts[neighboridx]

            dist_vectors = neighborpts[:, ...] - pos
            # Distance array between atom and neighbor grid points
            # distarr = scipy.spatial.distance.cdist(pos.reshape(1,3),
            #       neighborpts,'sqeuclidean').reshape(neighboridx.shape)

            phivals = phi(dist_vectors, sigma, sigma_sq, cutoff, cutoff_sq)

            rho_water_slice[i, neighboridx] += phivals

        # Can probably move this out of here and perform at end
        rho_slice[i, :] = rho_prot_slice[i, :]/rho_prot_bulk \
            + rho_water_slice[i, :]/rho_water_bulk

    return (rho_slice, lb, ub)
 def __init__(self,prm_add=[],prm_lin=[],prm_lik=[]):
     """ x : p[0]*x
     Constructor builds and store the biasing functions from input parameters
     """
     self.prm_add, self.prm_lin, self.prm_lik = prm_add,prm_lin,prm_lik
     self.bias = lambda w : lambda x : w*x
     self.lik = lambda l : lambda x : l/2+(1-l)*phi(x)
     a = [self.bias(p) for p in prm_add]
     super(LinAdditiveModel,self).__init__(a,prm_lin,self.lik(prm_lik))
 def bernD(self,df,d):
     """
     Response probability for a sequence implicitely described by df, d1,..,dtau
     """
     assert isinstance(d,list)
     alpha = np.zeros(df.shape)
     for a_,d_ in zip(self.a,d):
         alpha += a_(d_)
     return phi(df/self.s-alpha)
Example #6
0
def cumsum_test(bsq, reverse=False):
  n = len(bsq)
  st = 0
  z = -1
  bsqc = [ 2*x-1 for x in bsq ]
  for i in range(n):
    if reverse:
      st += bsqc[n-i-1]
    else:
      st += bsqc[i]
    if abs(st) > z:
      z = abs(st)
  t1 = sum([ phi(((4*k+1)*z)/sqrt(n)) - phi(((4*k-1)*z)/sqrt(n)) for k in
           range((-n/z+1)/4, (n/z-1)/4+1)])
  t2 = sum([ phi(((4*k+3)*z)/sqrt(n)) - phi(((4*k+1)*z)/sqrt(n)) for k in
           range((-n/z-3)/4, (n/z-1)/4+1)])
  p_value = 1 - t1 + t2
  reason  = "A random walk significantly deviates from its origin."
  return p_value, reason
 def bern(self,f1,f2,n_samp=None):
     """
     Response probability for a sequence of pure tones
     """
     df = f2-f1 # the tone interval in current trial
     alpha = np.zeros(f1.shape)
     for i_a,a_ in enumerate(self.a):
         d = np.zeros(f1.shape)
         d[i_a+1:] = f1[i_a+1:] - 0.5*(f1+f2)[:-(i_a+1)] # distance of f1 to previous trials
         alpha += a_(d)
     return phi(df/self.s-alpha)
Example #8
0
    def predict(self, X, bayes=False, logistic=True):
        betas = self.beta_samples
        if not bayes:
            betas = tf.reduce_mean(betas, axis=0, keep_dims=True)

        ## Linear regression
        if not logistic:
            # return mean and uncertainty
            return tf.reduce_mean(X @ tf.transpose(betas), axis=1)

        ## Logistic Regression
        # evaluate activations for each paramter
        a = tf.einsum('ND,SD->NS', X, betas)
        logistic_log_probs = utils.phi(tf.ones(a.shape, dtype=tf.float64), a)

        return tf.reduce_logsumexp(logistic_log_probs, axis=1) - tf.log(
            np.float64(int(betas.shape[0])))
Example #9
0
from utils import factorize, phi

limit = 10**6

for n in range(limit + 1, 2 * limit):
    if n % 2 == 0 or n % 5 == 0: continue
    a = phi(9 * n)
    if a <= limit: continue
    valid = True
    for i in range(2, int(a**0.5) + 1):
        if a % i: continue
        if pow(10, i, 9 * n) == 1 and i <= limit:
            valid = False
        f = a // i
        if pow(10, f, 9 * n) == 1 and f <= limit:
            valid = False

    if valid:
        print n
        break
Example #10
0
def pathTie(node, current):
    return False
    angle = abs(phi(current.parent, current, node))
    return math.isclose(angle, 180, abs_tol=1e-6)
Example #11
0
from utils import phi

limit = 10**6
res = 0
for n in range(2, limit+1):
    res += phi(n)
print(res)
Example #12
0
File: p69.py Project: icot/euler
def worker(n):
    return (n, n/phi(n))
Example #13
0
# Problem 69
from utils import phi

# real 0m46.118s
# user 0m45.483s
# sys  0m0.283s

max_val = 0
max_index = 0
for n in range(1,1000000):
    ratio = n/phi(n)
    if ratio > max_val:
        max_val = ratio
        max_index = n
    
print(max_index,max_val)
#%matplotlib inline
from pymc3 import  *
import numpy as np
import matplotlib.pyplot as plt
from utils import phi
import theano.tensor as tsr


# Generating synthetic data
N =100
x = np.random.randn(N)
s_true = 0.1
a_true = 0.1
b = phi(x/s_true-a_true)
y = np.random.rand(N)<b


def probit_phi(x):
    """ Probit transform assuming 0 mean and 1 sd """
    mu = 0;sd = 1;
    return 0.5 * (1 + tsr.erf((x - mu) / (sd * tsr.sqrt(2))))

with Model() as model: # model specifications in PyMC3 are wrapped in a with-statement
    # Define priors
    sigma = HalfCauchy('sigma', beta=10, testval=1.)
    intercept = Normal('Intercept', 0, sd=20)
    
    # Define likelihood
    likelihood = Bernoulli('y', p=probit_phi(x/sigma-intercept), observed=y)
    
    # Inference!
Example #15
0
def display(start=None,
            goal=None,
            grid=[],
            grid_obs=[],
            path=[],
            nodes=[],
            point=None,
            point2=None,
            showPath2=True,
            hold=False):
    print('  Plotting...')
    ax.clear()
    if len(grid) != 0:
        ax.set_xlim(-0.5, grid.shape[0])
        ax.set_ylim(-0.5, grid.shape[1])
    elif len(grid_obs) != 0:
        ax.set_xlim(-0.5, grid_obs.shape[0])
        ax.set_ylim(-0.5, grid_obs.shape[0])

    if len(grid_obs) != 0:
        obs = []
        x, y = np.mgrid[0:grid_obs.shape[0], 0:grid_obs.shape[1]]
        np.vectorize(
            lambda node, x, y: obs.append(patches.Rectangle([x, y], 1, 1))
            if node == Node.OBSTACLE else None)(grid_obs, x, y)
        # obs = [patches.Rectangle([x, y], w, h) for x, y, w, h in extractRect(grid_obs)]
        ax.add_collection(collections.PatchCollection(obs))

    lines = []
    for node in nodes:
        pt_list = []
        while node.local:
            pt_list.append([node.pos[0], node.pos[1]])
            node = node.local
        pt_list.append([node.pos[0], node.pos[1]])
        lines.append(pt_list)
    ax.add_collection(
        collections.LineCollection(lines,
                                   colors='green',
                                   alpha=1 if len(path) == 0 else .5))

    lines = []
    for node in nodes:
        pt_list = []
        while node.parent:
            pt_list.append([node.pos[0], node.pos[1]])
            node = node.parent
        pt_list.append([node.pos[0], node.pos[1]])
        lines.append(pt_list)
    ax.add_collection(
        collections.LineCollection(lines,
                                   colors='red',
                                   alpha=1 if len(path) == 0 else .5))

    if start is not None:
        ax.add_patch(
            patches.Circle(start.pos if isinstance(start, Node) else start,
                           .3,
                           linewidth=1,
                           facecolor='green'))
    if goal is not None:
        ax.add_patch(
            patches.Circle(goal.pos if isinstance(goal, Node) else goal,
                           .3,
                           linewidth=1,
                           facecolor='blue'))
    if point:
        ax.add_patch(
            patches.Circle(point.pos, .3, linewidth=1, facecolor='red'))
    if point2:
        ax.add_patch(
            patches.Circle(point2.pos, .2, linewidth=1, facecolor='magenta'))

    if point and point2 and lineOfSightNeighbors(point.pos, point2.pos,
                                                 grid_obs):
        ax.add_patch(
            patches.Arrow(point.pos[0],
                          point.pos[1],
                          point2.pos[0] - point.pos[0],
                          point2.pos[1] - point.pos[1],
                          .4,
                          facecolor='red'))

    if point and point2 and point.parent and lineOfSight(
            point.parent, point2, grid_obs) and showPath2:
        ax.add_patch(
            patches.Arrow(point.parent.pos[0],
                          point.parent.pos[1],
                          point2.pos[0] - point.parent.pos[0],
                          point2.pos[1] - point.parent.pos[1],
                          .3,
                          facecolor='magenta'))

    if point and point2 and point.parent:
        rect = []
        for pt in supercover(point.parent, point2):
            rect.append(
                patches.Rectangle([pt[0], pt[1]],
                                  1,
                                  1,
                                  facecolor='black',
                                  alpha=.1))
        ax.add_collection(
            collections.PatchCollection(rect, match_original=True))

    if point and point.parent and showPath2:
        mid_angle = phi([point.parent.pos[0] + 1, point.parent.pos[1]],
                        point.parent, point)
        ax.add_patch(
            patches.Wedge(point.parent.pos,
                          5,
                          mid_angle + point.lb,
                          mid_angle + point.ub,
                          facecolor='cyan',
                          alpha=.3))

    if len(path) > 0 and isinstance(path[0], Node):
        local = []
        node = path[-1]
        while node.local:
            local.append(node.pos)
            node = node.local
        local.append(node.pos)
        local = np.array(local)
        path_ = np.array([p.pos for p in path])
        plt.plot(local[:, 0], local[:, 1], color='green', linewidth=3)
        plt.plot(path_[:, 0], path_[:, 1], color='red', linewidth=4)

        pts = []
        node = path[-1]
        while node.parent:
            pts += supercover(node, node.parent)
            node = node.parent
        ax.add_collection(
            collections.PatchCollection([
                patches.Rectangle([p[0], p[1]],
                                  1,
                                  1,
                                  linewidth=1,
                                  facecolor='orange',
                                  alpha=.5) for p in pts
            ],
                                        match_original=True))

    elif len(path) > 0:
        plt.plot(path[:, 0], path[:, 1], 'o-', color='red', linewidth=1)

    plt.title('Processing...')
    if hold and isinstance(hold, bool):
        plt.show()
    else:
        plt.pause(DISPLAY_DELAY if isinstance(hold, bool) else hold)
    print('End plot')
Example #16
0
from utils import primes, memoize, phi, arePermutations
from collections import defaultdict

print arePermutations("87109", "79180")
s = {}



nn = 100000000000
mm = 100000000000
for n in range(2, 10**7 + 1):
	s = phi(n)
	if n / float(s) < nn and arePermutations(str(int(s)), str(int(n))):
		nn = n / s
		mm = n
		kk = s

print nn, mm, s

print phi(99)
Example #17
0
from utils import phi

print max((n for n in xrange(1, 10**6 + 1)), key=lambda n: 1.0 * n / phi(n))
Example #18
0
# Problem 72
from utils import gcd
from utils import factor_count
from utils import phi

#real	0m46.211s
#user	0m44.369s
#sys	0m0.393s

# notes:
# primes introduce p-1 new fractions

# composites introduce phi more, where phi = the number
# of relatively prime #s less than n


print(sum(phi(n) for n in range(2,1000001)))
#total = 0
#for n in range(2,1000001):
#    total += phi(n)
#print(total)
Example #19
0
import utils

utils.initialize_primes_cache(10**7 + 1)
print("Primes initialized", flush=True)

min_n = 10**7
min_ratio = 10**7
# note that n can't be prime because phi(p) = p-1 if p is prime -
# this would minimize n/phi(n) but not satisfy permutation condition
# skip even numbers because if n even , n/phi(n) closer to 2
for n in range(3, 10**7 + 1, 2):
    phi = utils.phi(n)
    if n / phi < min_ratio and utils.check_permutation(n, phi):
        min_ratio = n / phi
        min_n = n
        print((min_n, min_ratio), flush=True)

print(min_n)