コード例 #1
0
# Import Candles
candles = get_candles(instrument, granularity, _from, _to)

# Main iteration sequence
###############################################################################
# Call each window.  Transform and collect all results
results = []
outcomes = []
for i in range(window, candles.shape[0] - search_outcomes):
    # Prepare Data Slice for linearg regression and outcome
    closings = candles.loc[i - window:i, 'midclose'].values
    closings_outcomes = candles.loc[i:min(i +
                                          search_outcomes, candles.shape[0]),
                                    'midclose'].values
    # Flatten midclose values
    closings_flat = horizontal_transform(closings)
    # Create channel on flat midclose (c1 and c5 are nothing right now)
    channels = create_channels(closings_flat['closing'])
    c2 = (channels['c2'] + closings_flat['linregress']) + closings[0]
    c3 = (channels['c3'] + closings_flat['linregress']) + closings[0]
    c4 = (channels['c4'] + closings_flat['linregress']) + closings[0]
    # Calculate up down outcome on final value
    tmp = []
    for tar in [.25, .5, .75, 1, 1.25, 1.5, 2, 2.5]:
        distance = (channels['c4'][-1] - channels['c3'][-1]) * tar
        target_up = closings[-1] + distance
        target_down = closings[-1] - distance
        up_or_down = up_down_simple(closings_outcomes, target_up, target_down)
        tmp.append(up_or_down[0])
    outcomes.append(tmp)
    # Where in channel is closing value
コード例 #2
0
def channel_statistics(closing_values, window_length, candles): 
    print('gathring channel statistics')
    results = []
    history = []    
    # Pad  / truncate histogram peaks to i places
    pad = lambda a,i : a[0:i] if len(a) > i else a + [0] * (i-len(a))
    # Window Analysis
    ############################################################################### 

    # Prepare DWindow for closing values and outcome winodow
    closings = closing_values
    # Flatten closing values 
    closings_flat = horizontal_transform(closings)
    # Scale Flattened closing values to be between 0 and 1    
    mms = minmaxscaler()
    mms.fit(closings_flat['closing'].reshape(-1, 1))
    scaled = mms.transform(closings_flat['closing'].reshape(-1, 1)).ravel()
    # Create channels from flattened and scaled closing values
    channels = create_channels(scaled)
    # Calculate Outcomes Range from original closing values
    c6 = mms.inverse_transform(channels['c6'].reshape(-1, 1)).ravel()
    c4 = mms.inverse_transform(channels['c4'].reshape(-1, 1)).ravel()
    top    = c6[-1] + closings_flat['linregress'][-1] 
    bottom = c4[-1] + closings_flat['linregress'][-1] 
    _range = top - bottom
    # Collect Historgram for window and iot's peaks for results
    hist = np.histogram(scaled, 150)
    history.append(list(hist[0])) 
    hist = np.histogram(scaled, bins=10)
    # Collect histogram peaks into results
    if hist[0][0] > hist[0][1]:
        keep = [True]
    else:
        keep = [False]
    for h in range(1, hist[0].shape[0]-1):
        if hist[0][h] > hist[0][h+1] and hist[0][h] > hist[0][h-1]:
            keep.append(True)
        else:
            keep.append(False)
    if hist[0][-1] > hist[0][-2]:
        keep.append(True)
    else:
        keep.append(False)   
    keep = np.array(keep)
    x = (hist[1] + ((hist[1][1] - hist[1][0]) / 2))[:-1]
    # Collect Data
    results.append([mms.data_range_[0],
                    closings.std(),
                    closings.mean(),
                    stats.kurtosis(closings),
                    stats.skew(closings),
                    closings_flat['slope'],
                    mms.scale_[0],
                    scaled.std(),
                    scaled.mean(),
                    stats.kurtosis(scaled),
                    stats.skew(scaled),
                    candles.volume.mean(),
                    candles.volume.values[-1], 
                    channels['breakout'],
                    channels['slope'],
                    _range,
                    channels['closing_position'],
                    channels['c1'].mean(),
                    channels['c2'].mean(),
                    channels['c3'].mean(),
                    channels['c4'].mean(),
                    channels['c5'].mean(),
                    channels['c6'].mean(),
                    channels['c7'].mean(),
                    channels['d01'],
                    channels['d12'],
                    channels['d23'],
                    channels['d34'],
                    channels['d45'],
                    channels['d56'],
                    channels['d67'],
                    channels['d78']
                    ] + pad(list(x[keep]), 3))        
    # Results Configuration
    ###############################################################################
    # Assemble columns for results and outcomes and historygrams
    columns = ['closings_range',
               'closings_std', 
               'closings_mean',
               'closings_kurt', 
               'closings_skew',
               'closings_slope',
               'scaler', 
               'scaled_std', 
               'scaled_mean',
               'scaled_kurt',
               'scaled_skew',
               'volume_mean',
               'volume_final', 
               'breakout',
               'channel_slope',
               'channel_range',
               'channel_position',
               'c1',
               'c2',
               'c3',
               'c4',
               'c5',
               'c6',
               'c7',
               'd01',
               'd12',
               'd23',
               'd34',
               'd45',
               'd56',           
               'd67',
               'd78',
               'peak1',
               'peak2',
               'peak3',]
    # Put together dataframe of results, history, outcomes
    history = pd.DataFrame(np.array(history))
    results = pd.DataFrame(np.array(results), columns=columns)
    # Make sure values are all floats (except instrument)
    results = results.apply(pd.to_numeric, errors='ignore')
    history = history.apply(pd.to_numeric, errors='ignore')
    # Rearange by location ( keep in order )
    results = results.reset_index(drop=True)
    history = history.reset_index(drop=True)
    # Add Additional Columns
    results['hist_kurtosis'] = history.kurtosis(axis=1)
    results['hist_skew'] = history.skew(axis=1)
    results['both_slopes'] = results.channel_slope / results.closings_slope
    return {'history': history.values.tolist()[0], 'results': results.values.tolist()[0]} 
コード例 #3
0
# Import Candles.  Create new candle set based on even time spacing
candles = get_candles(instrument, granularity, _from, _to)
# candles = convert_candles_by_timing(candles, 5)
# Cut off candles at base to make for even division into large skip
candles = candles[(candles.shape[0] -
                   int(candles.shape[0] / large_skip) * large_skip):]
candles = candles.reset_index(drop=True)
# Remove days when markets are not open
# Set outcome width
#outcome_width *= candles.midclose.mean()

# Call each frame.  Scale all values based on large * small window.
for i in range(large_skip, candles.shape[0] - search_window, reduce_frames):
    if i % 100000 == 0: print('{:.2f}'.format(i / candles.shape[0]))
    closings = candles.loc[i - large_skip + 1:i, 'midclose'].values
    closings_flattened = horizontal_transform(closings)
    scaled = minmaxscaler().fit_transform(
        closings_flattened['closing'].reshape(-1, 1))
    frame = np.empty((bin_window, large_window))
    frame_count = 0
    for j in range(small_window, scaled.shape[0] + 2, small_window):
        hist = np.histogram(scaled[j - bin_window:j], bins=bins)
        frame[:, frame_count] = hist[0][-1::-1]
        frame_count += 1
    results.append(frame.reshape(1, -1).tolist()[0])

    # Get outcomes at each position
    search = min(i + search_window, candles.shape[0])
    closings_outcomes = candles.loc[i:search, 'midclose'].values
    up_down = up_down_outcomes(closings_outcomes, outcome_width, search_window)
    bars_up.append(up_down['up'])