def KS_two_samp_test(ref_vals, sim_vals, alpha): assert not np.any(np.isnan(ref_vals)) assert not np.any(np.isnan(sim_vals)) assert not np.any(np.isnan([alpha])) assert ref_vals.ndim == 1 assert sim_vals.ndim == 1 assert isinstance(alpha, float) assert 0 < alpha < 1 ref_vals = ref_vals.copy() sim_vals = sim_vals.copy() ref_vals.sort() sim_vals.sort() n_ref = ref_vals.shape[0] n_sim = sim_vals.shape[0] ref_probs = rankdata(ref_vals) / (n_ref + 1) sim_probs = rankdata(sim_vals) / (n_sim + 1) c_alpha = (-0.5 * mlog((1 - alpha) * 0.5))**0.5 ks_lim = c_alpha * (((n_ref + n_sim) / (n_ref * n_sim))**0.5) sim_probs_in_ref = np.interp(sim_vals, ref_vals, ref_probs) max_diff = max(np.abs(sim_probs_in_ref - sim_probs)) if max_diff > ks_lim: ret_val = 'reject' else: ret_val = 'accept' return ret_val
def _make_graph(self): phone_book = [[] for _ in range(self.world_size)] for rank in range(self.world_size): group = phone_book[rank] for i in range(0, int(mlog(self.world_size - 1, 2)) + 1): if i == 0: f_peer = self._rotate_forward(rank, 1) b_peer = self._rotate_backward(rank, 1) else: f_peer = self._rotate_forward(rank, 1 + 2 ** i) b_peer = self._rotate_backward(rank, 1 + 2 ** i) # create directory for non-passive peers if not self.is_passive(rank) and ( self.is_passive(f_peer) and self.is_passive(b_peer)): if f_peer not in group: group.append(f_peer) # forward peer... if b_peer not in group: group.append(b_peer) # then backward peer # create directory for passive peers elif self.is_passive(rank) and ( not (self.is_passive(f_peer) or self.is_passive(b_peer))): if b_peer not in group: group.append(b_peer) # backward peer... if f_peer not in group: group.append(f_peer) # then forward peer return phone_book
def sqNear0(lf): assert(lf>=0) d=int(mlog(lf,10)) r0=10**((d)//2) fl=lf*10**(2*10)//r0**2 fl=msqrt(fl) r1=r0*int(fl*10**10)//10**20 return(r1)
def logLR ( x, y ): """Calculate log10 Likelihood between H1 ( enriched ) and H0 ( chromatin bias ). Then set minus sign for depletion. """ key_value = ( x, y ) if logLR_dict.has_key(key_value): return logLR_dict[key_value] else: if x > y: s = (x*(mlog(x+1)-mlog(y+1))+y-x)*LOG10_E elif x < y: s = (-1*x*(mlog(x+1)-mlog(y+1))-y+x)*LOG10_E else: s = 0 logLR_dict[key_value] = s return s
def log_dd(r, e): '''Compute the log. ''' rr, re = mlog(r), 0.0 ur, ue = exp_dd(rr, re) tmpr, tmpe = add_dd(ur, ue, -r, -e) denr, dene = add_dd(ur, ue, r, e) tmpr, tmpe = div_dd(tmpr, tmpe, denr, dene) tmpr, tmpe = mul_dd(2.0, 0.0, tmpr, tmpe) return add_dd(rr, re, -tmpr, -tmpe)
def _make_graph(self): for rank in range(self.world_size): for i in range( 0, int(mlog(self.world_size - 1, self._peers_per_itr + 1)) + 1): for j in range(1, self._peers_per_itr + 1): distance_to_neighbor = j * ((self._peers_per_itr + 1)**i) f_peer = self._rotate_forward(rank, distance_to_neighbor) self._add_peers(rank, [f_peer])
def _make_graph(self): phone_book = [[] for _ in range(self.world_size)] for rank in range(self.world_size): group = phone_book[rank] for i in range(0, int(mlog(self.world_size - 1, 2)) + 1): f_peer = self._rotate_forward(rank, 2 ** i) if f_peer not in group: group.append(f_peer) b_peer = self._rotate_backward(rank, 2 ** i) if b_peer not in group: group.append(b_peer) return phone_book
def entropy(self, Y): if Y.shape[0] == 0: return 0 if all(Y == Y[0]): return 0 labels = np.unique(Y) label_probs = [] for l in labels: label_probs.append(np.mean(Y == l)) entropy = 0 for p in label_probs: entropy -= p * mlog(p, len(labels)) return entropy
def _make_graph(self): for rank in range(self.world_size): for i in range(0, int(mlog(self.world_size - 1, 2)) + 1): if i == 0: f_peer = self._rotate_forward(rank, 1) b_peer = self._rotate_backward(rank, 1) else: f_peer = self._rotate_forward(rank, 1 + 2**i) b_peer = self._rotate_backward(rank, 1 + 2**i) # create directory for non-passive peers if not self.is_passive(rank) and (self.is_passive(f_peer) and self.is_passive(b_peer)): self._add_peers(rank, [f_peer, b_peer]) # create directory for passive peers elif self.is_passive(rank) and (not (self.is_passive(f_peer) or self.is_passive(b_peer))): self._add_peers(rank, [f_peer, b_peer])
def save_image(self, binary, extension, watermark): """ Save an image with given extension to the cache directory of the object Keyword arguments: binary: the binary image (raw from the GET request) """ max_num_files = int(mlog(self.cache_size, 10)//1 + 1) filename_mini = "image_%0*d" % (max_num_files, self.image_counter) filename_path = self.cache_dir + filename_mini filename = filename_path + "." + extension filename_orig = filename_path + "_orig." + extension self.logger.info("Saving to %s", filename) for to_remove in glob.glob(filename_path+"*.*"): os.remove(to_remove) image = Image.open(BytesIO(binary)) image_resized = resize_image(image, self.resolution) self.logger.info("Resizing to resolution %s", self.resolution) image_watermarked = add_watermark( image_resized, watermark, self.resolution) try: image_watermarked.save(filename) image.save(filename_orig) except FileNotFoundError: self.logger.critical("Impossible to save at location %s, " "ensure the folder exists.", filename) return False except PermissionError: self.logger.critical("Permission to write at location %s " "was denied, ensure you have correct" " rights.", filename) return False self.image_counter += 1 if self.image_counter == self.cache_size: self.logger.info("Reached max cache size (%s), " "starting to overwrite", self.cache_size) self.image_counter = 0 return filename
def isFibonacci(num): from math import log as mlog a = 2.07684408521711 * mlog(2.237 * num) return (a % 1 * 100 > 90) or (a % 1 * 100 < 5)
def ll_normal_es_py(o,m,e): """Normal log likelihood for scalar average standard deviation.""" npt = o.size return -npt*mlog(e) -0.5*npt*LOG_TWO_PI - 0.5*square(o-m).sum()/e**2
def sqNear1(lf): le=int(mlog(lf,10)//2)*2 if le <100: return(int(msqrt(lf))) a=lf//10**(le-100) return(int(msqrt(a))*10**((le-100)//2))
def get_k_c(kmer_max: int, count_max: int) -> Tuple[int, int]: round_k = lambda x: 2**(int(ceil(mlog(2 * x) / mlog(2)))) round_c = lambda x: ceil(mlog(x + 1, 2)) return round_k(kmer_max), round_k(round_c(count_max) / 2)
def ll_normal_es_py(o, m, e): """Normal log likelihood for scalar average standard deviation.""" npt = o.size return -npt * mlog(e) - 0.5 * npt * LOG_TWO_PI - 0.5 * square( o - m).sum() / e**2
def _make_graph(self): for rank in range(self.world_size): for i in range(0, int(mlog(self.world_size - 1, 2)) + 1): f_peer = self._rotate_forward(rank, 2**i) b_peer = self._rotate_backward(rank, 2**i) self._add_peers(rank, [f_peer, b_peer])
def GSPTIME(gsum, rk): from math import log as mlog lk = rk.uniform_pos() tnr = (1.0 / gsum) * mlog(1.0 / lk) return tnr
def isFibonacci2(num): from math import log as mlog a = (mlog( ((num * 5**(1 / 2) + (5 * num**2 + 4)**(1 / 2)) / 2), 1.618) + mlog( ((num * 5**(1 / 2) + (5 * num**2 - 4)**(1 / 2)) / 2), 1.618)) / 2 return (a % 1 * 100 > 99) or (a % 1 * 100 < 1)
def mctime(rtsum,rk): from math import log as mlog lk=rk.uniform_pos() tnr=(1.0/rtsum)*mlog(1.0/lk) return tnr
def GSPTIME(gsum,rk): from math import log as mlog lk=rk.uniform_pos() tnr=(1.0/gsum)*mlog(1.0/lk) return tnr