def mean_ad(candles: np.ndarray, period: int = 5, source_type: str = "hl2", sequential: bool = False) -> Union[float, np.ndarray]: """ Mean Absolute Deviation :param candles: np.ndarray :param period: int - default: 5 :param source_type: str - default: "hl2" :param sequential: bool - default=False :return: float | np.ndarray """ if len(candles.shape) == 1: source = candles else: candles = slice_candles(candles, sequential) source = get_candle_source(candles, source_type=source_type) swv = sliding_window_view(source, window_shape=period) abs_diff = np.absolute(source - same_length(source, np.mean(swv, -1))) smv_abs_diff = sliding_window_view(abs_diff, window_shape=period) mean_abs_deviation = np.nanmean(smv_abs_diff, -1) res = same_length(source, mean_abs_deviation) return res if sequential else res[-1]
def safezonestop(candles: np.ndarray, period: int = 22, mult: float = 2.5, max_lookback: int = 3, direction: str = "long", sequential: bool = False) -> Union[float, np.ndarray]: """ Safezone Stops :param candles: np.ndarray :param period: int - default=22 :param mult: float - default=2.5 :param max_lookback: int - default=3 :param direction: str - default=long :param sequential: bool - default=False :return: float | np.ndarray """ warmup_candles_num = get_config('env.data.warmup_candles_num', 240) if not sequential and len(candles) > warmup_candles_num: candles = candles[-warmup_candles_num:] high = candles[:, 3] low = candles[:, 4] last_high = np_shift(high, 1, fill_value=np.nan) last_low = np_shift(low, 1, fill_value=np.nan) if direction == "long": res = last_low - mult * talib.MINUS_DM(high, low, timeperiod=period) swv = sliding_window_view(res, window_shape=max_lookback) res = np.max(swv, axis=-1) else: res = last_high + mult * talib.PLUS_DM(high, low, timeperiod=period) swv = sliding_window_view(res, window_shape=max_lookback) res = np.min(swv, axis=-1) return np.concatenate((np.full((candles.shape[0] - res.shape[0]), np.nan), res), axis=0) if sequential else res[-1]
def test_subok(self): class MyArray(np.ndarray): pass arr = np.arange(5).view(MyArray) assert_( not isinstance(sliding_window_view(arr, 2, subok=False), MyArray)) assert_(isinstance(sliding_window_view(arr, 2, subok=True), MyArray)) # Default behavior assert_(not isinstance(sliding_window_view(arr, 2), MyArray))
def test_writeable(self): arr = np.arange(5) view = sliding_window_view(arr, 2, writeable=False) assert_(not view.flags.writeable) with pytest.raises(ValueError, match='assignment destination is read-only'): view[0, 0] = 3 view = sliding_window_view(arr, 2, writeable=True) assert_(view.flags.writeable) view[0, 1] = 3 assert_array_equal(arr, np.array([0, 3, 2, 3, 4]))
def er(candles: np.ndarray, period: int = 5, source_type: str = "close", sequential: bool = False) -> Union[ float, np.ndarray]: """ ER - The Kaufman Efficiency indicator :param candles: np.ndarray :param period: int - default: 5 :param source_type: str - default: "close" :param sequential: bool - default=False :return: float | np.ndarray """ warmup_candles_num = get_config('env.data.warmup_candles_num', 240) if not sequential and len(candles) > warmup_candles_num: candles = candles[-warmup_candles_num:] source = get_candle_source(candles, source_type=source_type) change = np.abs(np.diff(source, period)) abs_dif = np.abs(np.diff(source)) swv = sliding_window_view(abs_dif, window_shape=period) volatility = swv.sum() res = change / volatility return np.concatenate((np.full((candles.shape[0] - res.shape[0]), np.nan), res), axis=0) if sequential else res[-1]
def fwma(candles: np.ndarray, period: int = 5, source_type: str = "close", sequential: bool = False) -> Union[float, np.ndarray]: """ Fibonacci's Weighted Moving Average (FWMA) :param candles: np.ndarray :param period: int - default: 5 :param source_type: str - default: "close" :param sequential: bool - default=False :return: float | np.ndarray """ if not sequential and len(candles) > 240: candles = candles[-240:] source = get_candle_source(candles, source_type=source_type) fibs = fibonacci(n=period, weighted=True) swv = sliding_window_view(source, window_shape=period) res = np.average(swv, weights=fibs, axis=-1) return np.concatenate( (np.full((candles.shape[0] - res.shape[0]), np.nan), res), axis=0) if sequential else res[-1]
def forecasting_example(): name = "C:\\Users\\Tony\\OneDrive - University of East Anglia\\Research\\Alex " \ "Mcgregor Grant\\randomNoise.csv" y = pd.read_csv(name, index_col=0, squeeze=True, dtype={1: np.float}) forecast_horizon = np.arange(1, 2) forecaster = NaiveForecaster(strategy="last") forecaster.fit(y) y_pred = forecaster.predict(forecast_horizon) print("Next predicted value = ",y_pred) # https://github.com/alan-turing-institute/sktime/blob/main/examples/01_forecasting.ipynb #Reduce to a regression problem through windowing. ##Transform forecasting into regression np_y = y.to_numpy() v = sliding_window_view(y, 100) print("Window shape =",v.shape) v_3d = np.expand_dims(v, axis=1) print("Window shape =",v.shape) print(v_3d.shape) z = v[:,2] print(z.shape) regressor = CNNRegressor() classifier = CNNClassifier() regressor.fit(v_3d,z) p = regressor.predict(v_3d) #print(p) d = np.array([0.0]) c = np.digitize(z,d) classifier = RandomIntervalSpectralForest() classifier.fit(v_3d,c) cls = classifier.predict(v_3d) print(cls)
def sinwma(candles: np.ndarray, period: int = 14, source_type: str = "close", sequential: bool = False) -> Union[float, np.ndarray]: """ Sine Weighted Moving Average (SINWMA) :param candles: np.ndarray :param period: int - default: 14 :param source_type: str - default: "close" :param sequential: bool - default=False :return: float | np.ndarray """ warmup_candles_num = get_config('env.data.warmup_candles_num', 240) if not sequential and len(candles) > warmup_candles_num: candles = candles[-warmup_candles_num:] source = get_candle_source(candles, source_type=source_type) sines = np.array( [np.sin((i + 1) * np.pi / (period + 1)) for i in range(0, period)]) w = sines / sines.sum() swv = sliding_window_view(source, window_shape=period) res = np.average(swv, weights=w, axis=-1) return np.concatenate( (np.full((candles.shape[0] - res.shape[0]), np.nan), res), axis=0) if sequential else res[-1]
def test_2d_with_axis(self): i, j = np.ogrid[:3, :4] arr = 10 * i + j arr_view = sliding_window_view(arr, 3, 0) expected = np.array([[[0, 10, 20], [1, 11, 21], [2, 12, 22], [3, 13, 23]]]) assert_array_equal(arr_view, expected)
def swma(candles: np.ndarray, period: int = 5, source_type: str = "close", sequential: bool = False) -> Union[float, np.ndarray]: """ Symmetric Weighted Moving Average (SWMA) :param candles: np.ndarray :param period: int - default: 5 :param source_type: str - default: "close" :param sequential: bool - default: False :return: float | np.ndarray """ # Accept normal array too. if len(candles.shape) == 1: source = candles else: candles = slice_candles(candles, sequential) source = get_candle_source(candles, source_type=source_type) triangle = symmetric_triangle(period) swv = sliding_window_view(source, window_shape=period) res = np.average(swv, weights=triangle, axis=-1) return same_length(candles, res) if sequential else res[-1]
def sinwma(candles: np.ndarray, period: int = 14, source_type: str = "close", sequential: bool = False) -> Union[float, np.ndarray]: """ Sine Weighted Moving Average (SINWMA) :param candles: np.ndarray :param period: int - default: 14 :param source_type: str - default: "close" :param sequential: bool - default: False :return: float | np.ndarray """ # Accept normal array too. if len(candles.shape) == 1: source = candles else: candles = slice_candles(candles, sequential) source = get_candle_source(candles, source_type=source_type) sines = np.array( [np.sin((i + 1) * np.pi / (period + 1)) for i in range(period)]) w = sines / sines.sum() swv = sliding_window_view(source, window_shape=period) res = np.average(swv, weights=w, axis=-1) return same_length(candles, res) if sequential else res[-1]
def er(candles: np.ndarray, period: int = 5, source_type: str = "close", sequential: bool = False) -> Union[float, np.ndarray]: """ ER - The Kaufman Efficiency indicator :param candles: np.ndarray :param period: int - default: 5 :param source_type: str - default: "close" :param sequential: bool - default: False :return: float | np.ndarray """ candles = slice_candles(candles, sequential) source = get_candle_source(candles, source_type=source_type) change = np.abs(np.diff(source, period)) abs_dif = np.abs(np.diff(source)) swv = sliding_window_view(abs_dif, window_shape=period) volatility = swv.sum() res = change / volatility return same_length(candles, res) if sequential else res[-1]
def enhance(x, algo, image): image = np.pad(image, 2 * x) for _ in range(x): window = sliding_window_view(image, (3, 3)) image = algo[(window * BIN_POWERS).sum((3, 2))] return image
def test_1d(self): arr = np.arange(5) arr_view = sliding_window_view(arr, 2) expected = np.array([[0, 1], [1, 2], [2, 3], [3, 4]]) assert_array_equal(arr_view, expected)
def test_2d_repeated_axis(self): i, j = np.ogrid[:3, :4] arr = 10 * i + j arr_view = sliding_window_view(arr, (2, 3), (1, 1)) expected = np.array([[[[0, 1, 2], [1, 2, 3]]], [[[10, 11, 12], [11, 12, 13]]], [[[20, 21, 22], [21, 22, 23]]]]) assert_array_equal(arr_view, expected)
def _local_top_values(values): n = 3 rolling_mean = sliding_window_view(values, window_shape=n).mean(axis=1) rolling_mean = np.pad(rolling_mean, (n, n), 'constant', constant_values=np.nan) return 2 * values - (rolling_mean[:-n - 1] + rolling_mean[n + 1:] ) # -second derivative
def _filter_angles_inner(sv, sv_prev, sv_next, angles, angles_prev, angles_next, shift, shift_prev, shift_next): if len(sv) == 0 or np.isnan(sv).all(): return np.zeros_like(sv) if len(sv_prev) == 0 or np.isnan(sv_prev).all(): sv_prev = sv angles_prev = angles shift_prev = shift if len(sv_next) == 0 or np.isnan(sv_next).all(): sv_next = sv angles_next = angles shift_next = shift prev_first_index = int(shift - shift_prev) next_first_index = int(shift - shift_next) median_radius = 2 min_sv_radius = 2 minima_prev = local_minima(sv_prev, min_sv_radius) minima_center = local_minima(sv, min_sv_radius) minima_next = local_minima(sv_next, min_sv_radius) angles_prev_masked = angles_prev.copy() angles_prev_masked[minima_prev] = np.nan angles_masked = angles.copy() angles_masked[minima_center] = np.nan angles_next_masked = angles_next.copy() angles_next_masked[minima_next] = np.nan angles_masked_rolling = sliding_window_view( angles_masked, window_shape=median_radius * 2 + 1) angles_prev_masked_rolling = sliding_window_view( shift_arr(angles_prev_masked, -prev_first_index), window_shape=median_radius * 2 + 1) angles_next_masked_rolling = sliding_window_view( shift_arr(angles_next_masked, -next_first_index), window_shape=median_radius * 2 + 1) stacked_rolling = np.hstack( (angles_masked_rolling, angles_prev_masked_rolling, angles_next_masked_rolling)) median = np.nanmedian(stacked_rolling, axis=1) return np.pad(median, (median_radius, median_radius), 'constant', constant_values=np.nan)
def test_2d(self): i, j = np.ogrid[:3, :4] arr = 10 * i + j shape = (2, 2) arr_view = sliding_window_view(arr, shape) expected = np.array([[[[0, 1], [10, 11]], [[1, 2], [11, 12]], [[2, 3], [12, 13]]], [[[10, 11], [20, 21]], [[11, 12], [21, 22]], [[12, 13], [22, 23]]]]) assert_array_equal(arr_view, expected)
def test_2d_without_axis(self): i, j = np.ogrid[:4, :4] arr = 10 * i + j shape = (2, 3) arr_view = sliding_window_view(arr, shape) expected = np.array([ [[[0, 1, 2], [10, 11, 12]], [[1, 2, 3], [11, 12, 13]]], [[[10, 11, 12], [20, 21, 22]], [[11, 12, 13], [21, 22, 23]]], [[[20, 21, 22], [30, 31, 32]], [[21, 22, 23], [31, 32, 33]]], ]) assert_array_equal(arr_view, expected)
def rollingCorrelation(self): "" #to do, takes the longest by far #use numba to calculate this # print(Ys) # print(Ys.shape) #Ys = np.concatenate([self.Y.reshape(1,self.Ys.shape[1]),self.Ys],axis=0) slides = sliding_window_view(self.Ys, self.correlationWindowSize, axis=1) return slidingPearson(slides)
def main(): X = np.loadtxt("2021/01/input.txt", dtype=int) # Calculate the array of differences X_diff = np.diff(X) # Find the number of positives res_1 = (X_diff > 0).sum() print(f"Result of part 1: {res_1}") # For Part 2, we create a sliding window of size 3 and take the sum along the second # axis (columns) Y = sliding_window_view(X, 3).sum(axis=1) # Calculate the array of differences Y_diff = np.diff(Y) # Find the number of positives res_2 = (Y_diff > 0).sum() print(f"Result of part 2: {res_2}")
def test_errors(self): i, j = np.ogrid[:4, :4] arr = 10 * i + j with pytest.raises(ValueError, match='cannot contain negative values'): sliding_window_view(arr, (-1, 3)) with pytest.raises( ValueError, match='must provide window_shape for all dimensions of `x`'): sliding_window_view(arr, (1, )) with pytest.raises( ValueError, match='Must provide matching length window_shape and axis'): sliding_window_view(arr, (1, 3, 4), axis=(0, 1)) with pytest.raises( ValueError, match='window shape cannot be larger than input array'): sliding_window_view(arr, (5, 5))
def compute(self, img): ''' Class function for the Local Binary Pattern descriptor. Takes an image (2-dim array) and computes the local binary pattern for all the pixels, creates the LBP image and returns the histogram of that image. Parameters: > `img`: ndarray. The image to obtain the descriptor from. > returns: ndarray float32. The LBP histogram for img. ''' # first take the shape h, w = img.shape # Calculate all the possible windows for this image with the shape # specified by the radius. windows = sliding_window_view(img, window_shape=(self.radius, self.radius)) # Reshape the windows matrix to be a flat array # of (radius, radius) little matrices to obtain LBP reshaped_windows = np.reshape( windows, newshape=(windows.shape[0] * windows.shape[1], windows.shape[2], windows.shape[3])) # Calculate the corresponding value for every # window and return the resulting vector values = [ self.binary_neighbours(window) for window in reshaped_windows ] # Reshape that vector to be a new "LBP IMG" img_values = np.reshape(values, newshape=(windows.shape[0], windows.shape[1])) # Since windows didn't take the borders into account, # we pad the array with wrap mode, to get those borders # back in a thoughtful way lbp_img = np.pad(img_values, ((1, 1), (1, 1)), 'wrap') # Calculate the histogram for the lbp img and return it return (np.histogram(lbp_img, bins=256, density=False)[0]).astype('float32')
def get_spike_counts(self, bin_size=50, subset=None, rolling_window=None): """Get matrix of spike counts. Parameters ---------- bin_size : int | None Size [ms] of the bins over which to count spikes. If None, will simply return total counts. rolling_window : int, optional Average spike counts in a rolling window. Returns ------- pd.DataFrame """ if not isinstance(subset, type(None)): ids = utils.make_iterable(subset) else: ids = self._ids end_time = neuron.h.t if not end_time: raise ValueError('Looks like simulation has not yet been run.') if not bin_size: bin_size = end_time bins = np.arange(0, end_time + bin_size, bin_size) counts = np.zeros((len(ids), len(bins) - 1)) # Collect spike counts for i, id in enumerate(ids): pp = self.idx[id] timings = list(pp.spk_timings) if timings: hist, _ = np.histogram(timings, bins) counts[i, :] = hist counts = pd.DataFrame(counts, index=ids, columns=bins[:-1]) if rolling_window: avg = sliding_window_view(counts, rolling_window, axis=1).mean(axis=2) counts.iloc[:, :-(rolling_window - 1)] = avg return counts
def get_sliding_window_partition(data, labels, window_size, step=1): """Splits data into several windows. Deprecated (waste of memory) Parameters ---------- data : np.array EEG data with shape (trials, channels, time) labels : np.array 1d array of integer labels corresponding to left/right window_size : int Size in samples (not time) of the windows the data will be split into If window_size corresponds to the number of recorded samples per trial, the function returns the input data and labels unchaged step : int, default 1 Step size of the sliding window Returns ------- windowed_data : np.array Array of shape (windows, channels, time) windowed_labels : np.array 1d array of labels corresponding to each window """ raise DeprecationWarning('This function uses way too much memory') assert len(data.shape) == 3 if data.shape[2] == window_size: return data, labels windowed_data = np.empty((0, data.shape[1], window_size)) windowed_labels = np.empty((0, )) for i in range(data.shape[0]): trial = data[i] # print(trial) windows = sliding_window_view(trial, (data.shape[1], window_size))[0] # print('windows') # print(windows) windowed_data = np.append(windowed_data, windows, axis=0) new_labels = np.array([labels[i] for _ in range(windows.shape[0])]) # print('labels') # print(new_labels) windowed_labels = np.append(windowed_labels, new_labels, axis=0) return windowed_data[::step], windowed_labels[::step]
def kurtosis(candles: np.ndarray, period: int = 5, source_type: str = "hl2", sequential: bool = False) -> Union[ float, np.ndarray]: """ Skewness :param candles: np.ndarray :param period: int - default: 5 :param source_type: str - default: "hl2" :param sequential: bool - default: False :return: float | np.ndarray """ candles = slice_candles(candles, sequential) source = get_candle_source(candles, source_type=source_type) swv = sliding_window_view(source, window_shape=period) kurtosis = stats.kurtosis(swv, axis=-1) res = same_length(source, kurtosis) return res if sequential else res[-1]
def neighborhood(X, idcs, steps, axes, write=False): ''' Pure `numpy` approach using `lib.stride_tricks.sliding_window_view`; Create a sliding window view with the desired shape, returning the subset of views that are aligned with the neighborhood. ''' # Get sliding window view window_shape = tuple(2 * s + 1 for s in steps) S = sliding_window_view(X, window_shape, axis=axes, writeable=write) # Initialize all indices with empty slice indices = [slice(None)] * X.ndim # Update specified axes with adjusted index for axis, idx, step in zip(axes, idcs, steps): indices[axis] = idx - step # Get views over the specified neighborhood views = S[tuple(indices)] return views
def getNextState(state): """ @param state: 2d array of 0 and 1 where 0 represents a dead cell and 1 represents a live cell @return: 2d array of 0 and 1 computed from the input state """ state = np.array(state) h,w = state.shape padding = np.hstack((np.zeros((h,1)),state,np.zeros((h,1)))) padding = np.vstack((np.zeros((1,w+2)),padding,np.zeros((1,w+2)))) # n: numpy array that has the same shape as the input state # each cell represents the number of live neighbours n = sliding_window_view(padding,(3,3)).sum(axis=(2,3)) - state # rules according to Wikipedia: # any live cell with 2 or 3 neighbours survives # any dead cell with 3 live neighbours becomes a live cell # all other live or dead cells die in the next generation return np.where(((state==1)&((n==2)|(n==3)))|((state==0)&(n==3)),1,0).tolist()
def fwma(candles: np.ndarray, period: int = 5, source_type: str = "close", sequential: bool = False) -> Union[float, np.ndarray]: """ Fibonacci's Weighted Moving Average (FWMA) :param candles: np.ndarray :param period: int - default: 5 :param source_type: str - default: "close" :param sequential: bool - default=False :return: float | np.ndarray """ candles = slice_candles(candles, sequential) source = get_candle_source(candles, source_type=source_type) fibs = fibonacci(n=period) swv = sliding_window_view(source, window_shape=period) res = np.average(swv, weights=fibs, axis=-1) return same_length(candles, res) if sequential else res[-1]
def ttm_trend(candles: np.ndarray, period: int = 5, source_type: str = "hl2", sequential: bool = False) -> Union[float, np.ndarray]: """ TTM Trend :param candles: np.ndarray :param period: int - default: 5 :param source_type: str - default: "hl2" :param sequential: bool - default=False :return: float | np.ndarray """ candles = slice_candles(candles, sequential) source = get_candle_source(candles, source_type=source_type) swv = sliding_window_view(source, window_shape=period) trend_avg = np.mean(swv, axis=-1) res = np.greater(candles[:, 2], same_length(source, trend_avg)) return res if sequential else res[-1]