Exemplo n.º 1
0
def cohend(d1, d2):
	# calculate the size of samples
	n1, n2 = len(d1), len(d2)
	# calculate the variance of the samples
	s1, s2 = math.var(d1, ddof=1), math.var(d2, ddof=1)
	# calculate the pooled standard deviation
	s = math.sqrt(((n1 - 1) * s1 + (n2 - 1) * s2) / (n1 + n2 - 2))
	# calculate the means of the samples
	u1, u2 = math.mean(d1), math.mean(d2)
	# calculate the effect size
    
	return (u1 - u2) / s
Exemplo n.º 2
0
    def update_moving_ave(self, receiver):
        """
        Calculate the average rssi using the standard mean of the finite_queue

        Returns:
        the float value or None if we have 0 entries
        """

        if len(self.rssi_val[receiver]) < 1:
            self.ave_rssi[receiver] = None
        else:
            self.ave_rssi[receiver] = math.mean(self.rssi_val[receiver])
Exemplo n.º 3
0
def summarize_tuples(pairs, how='min'):
    """summarize collection of tuple with list values"""
    summarized_pairs = []
    for k, v in pairs.items():
        if how == 'min':
            summarized_value = min(v)
        elif how == 'max':
            summarized_value = max(v)
        elif how == 'mean':
            summarized_value = math.mean(v)
        summarized_pairs.append((k[0], k[1], summarized_value))
    return summarized_pairs
Exemplo n.º 4
0
    def getAverageTime(self, startStation: str, endStation: str) -> float:
        if (startStation, endStation) in self.times:
            return math.mean(self.times[(startStation, endStation)])
        else:
            return None




# Your UndergroundSystem object will be instantiated and called as such:
# obj = UndergroundSystem()
# obj.checkIn(id,stationName,t)
# obj.checkOut(id,stationName,t)
# param_3 = obj.getAverageTime(startStation,endStation)
    def median_item_price(self):
        total_items = self.repeated_price_list()
        dataLen = len(total_items)

        #if length is odd, take middle value, by dividing the length by 2,
        #then rounding up
        if (dataLen % 2) == 1:
            return total_items[round(dataLen / 2)]

        #if the length is even, take the mean of the middle two values, which
        #have indices length/2 and (length/2 + 1)
        elif (dataLen % 2) == 0:
            #remember that indices are indexed at 0
            return math.mean([
                total_items[int(dataLen / 2 - 1)],
                total_items[int(dataLen / 2)]
            ])
Exemplo n.º 6
0
 def T(self):
     self.K * math.mean(self._mi)
Exemplo n.º 7
0
def normalize(OO0OOO0OO0O00O0OO):  #line:128
    print(OO0OOO0OO0O00O0OO /
          (math.sqrt(math.mean(math.square(OO0OOO0OO0O00O0OO))) +
           1e-5))  #line:129
Exemplo n.º 8
0
[x for x in range(3, 15, 3)]

"asdf+sdf".split("+")

a = (1, 2, 3)
a + (1, )

import math
math.ceil(10.3)

s = set()
s.add(3)
s
import math
math.mean([1, 2, 3])
round(2.5)

arr = []

length = len(arr)
i, j = 0, length - 1
while (i < j):
    mid = (i + j) // 2
    if arr[mid] < k:
        i = mid + 1
    elif arr[mid] >= k:
        j = mid

ls = [
    22, 22, 22, 16, 30, 22, 28, 27, 22, 4, 34, 40, 5, 22, 48, 41, 1, 42, 37,
Exemplo n.º 9
0
def geometric_mean(x, shift=0):
    """Calculates geometric mean with shift parameter."""
    return math.exp(math.mean(math.log(x + shift))) - shift
Exemplo n.º 10
0
def _transform_value(px, format):
  if format == FORMAT_RGB:
    return (int(round(px[0] / 255.0)), int(round(px[1] / 255.0)), int(round(px[2] / 255.0)))
  else:
    return int(round(math.mean(px) / 255.0))
Exemplo n.º 11
0
 def get_student_average_gpa(self, weighted=True):
     gpa = self.get_student_gpa(weighted=weighted)
     return math.mean(gpa)
Exemplo n.º 12
0
def pool(dim,
         tensor,
         kernel_size=3,
         stride=None,
         dilation=1,
         padding=0,
         bound='zero',
         reduction='mean',
         return_indices=False,
         affine=None):
    """Perform a pooling

    Parameters
    ----------
    dim : {1, 2, 3}
        Number of spatial dimensions
    tensor : (*batch, *spatial_in) tensor
        Input tensor
    kernel_size : int or sequence[int], default=3
        Size of the pooling window
    stride : int or sequence[int], default=`kernel_size`
        Strides between output elements.
    dilation : int or sequece[int], default=1
        Strides between elements of the kernel.
    padding : 'auto' or int or sequence[int], default=0
        Padding performed before the convolution.
        If 'auto', the padding is chosen such that the shape of the
        output tensor is `spatial_in // stride`.
    bound : str, default='zero'
        Boundary conditions used in the padding.
    reduction : {'mean', 'max', 'min', 'median', 'sum'} or callable, default='mean'
        Function to apply to the elements in a window.
    return_indices : bool, default=False
        Return input index of the min/max/median element.
        For other types of reduction, return None.
    affine : (..., D+1, D+1) tensor, optional
        Input orientation matrix

    Returns
    -------
    pooled : (*batch, *spatial_out) tensor
    indices : (*batch, *spatial_out, dim) tensor, if `return_indices`
    affine : (..., D+1, D+1) tensor, if `affine`

    """
    # move everything to the same dtype/device
    tensor = torch.as_tensor(tensor)

    # sanity checks + reshape for torch's conv
    batch = tensor.shape[:-dim]
    spatial_in = tensor.shape[-dim:]
    tensor = tensor.reshape([-1, *spatial_in])

    # Perform padding
    kernel_size = make_list(kernel_size, dim)
    stride = make_list(stride or None, dim)
    stride = [st or ks for st, ks in zip(stride, kernel_size)]
    dilation = make_list(dilation or 1, dim)
    padding = make_list(padding, dim)
    padding0 = padding  # save it to update the affine
    for i in range(dim):
        if isinstance(padding[i], str) and padding[i].lower() == 'auto':
            if kernel_size[i] % 2 == 0:
                raise ValueError('Cannot compute automatic padding '
                                 'for even-sized kernels.')
            padding[i] = ((kernel_size[i] - 1) * dilation[i] + 1) // 2

    use_torch = reduction in ('mean', 'avg', 'max') and dim in (1, 2, 3)

    if (not use_torch) or bound != 'zero' and sum(padding) > 0:
        # torch implementation -> handles zero-padding
        # our implementation -> needs explicit padding
        tensor = utils.pad(tensor, padding, bound, side='both')
        padding = [0] * dim

    return_indices0 = False
    pool_fn = reduction if callable(reduction) else None

    if reduction in ('mean', 'avg'):
        return_indices0 = True
        return_indices = False
        pool_fn = (F.avg_pool1d if dim == 1 else F.avg_pool2d
                   if dim == 2 else F.avg_pool3d if dim == 3 else None)
        if pool_fn:
            pool_fn0 = pool_fn
            pool_fn = lambda x, *a, **k: pool_fn0(
                x[:, None], *a, **k, padding=padding, dilation=dilation)[:, 0]
    elif reduction == 'max':
        pool_fn = (F.max_pool1d if dim == 1 else F.max_pool2d
                   if dim == 2 else F.max_pool3d if dim == 3 else None)
        if pool_fn:
            pool_fn0 = pool_fn
            pool_fn = lambda x, *a, **k: pool_fn0(
                x[:, None], *a, **k, padding=padding, dilation=dilation)[:, 0]

    if not pool_fn:
        if reduction not in ('min', 'max', 'median'):
            return_indices0 = True
            return_indices = False
        if reduction == 'mean':
            reduction = lambda x: math.mean(x, dim=-1)
        elif reduction == 'sum':
            reduction = lambda x: math.sum(x, dim=-1)
        elif reduction == 'min':
            reduction = lambda x: math.min(x, dim=-1)
        elif reduction == 'max':
            reduction = lambda x: math.max(x, dim=-1)
        elif reduction == 'median':
            reduction = lambda x: math.median(x, dim=-1)
        elif not callable(reduction):
            raise ValueError(f'Unknown reduction {reduction}')
        pool_fn = lambda *a, **k: _pool(*a, **k, reduction=reduction)

    outputs = []
    if return_indices:
        tensor, ind = pool_fn(tensor, kernel_size, stride=stride)
        ind = utils.ind2sub(ind, stride)
        ind = utils.movedim(ind, 0, -1)
        outputs.append(ind)
    else:
        tensor = pool_fn(tensor, kernel_size, stride=stride)
        if return_indices0:
            outputs.append(None)

    spatial_out = tensor.shape[-dim:]
    tensor = tensor.reshape([*batch, *spatial_out])
    outputs = [tensor, *outputs]

    if affine is not None:
        affine, _ = affine_conv(affine,
                                spatial_in,
                                kernel_size=kernel_size,
                                stride=stride,
                                padding=padding0,
                                dilation=dilation)
        outputs.append(affine)

    return outputs[0] if len(outputs) == 1 else tuple(outputs)
Exemplo n.º 13
0
 def mean_friend_prediction(self):
     #see where to add this in the scheduler!!!!
     return (math.mean([f.prediction for f in self.friends]))