def __init__(self, client): self.client = client self.tournament = Tournament() self.queue = Tournament.get_tournaments(client) self.attr = ['name', 'time', 'prize', 'host', 'roles', 'note'] self.channels = Channel(client) self.roles = Role(client) self.checklist = None self.modifiers = [{ 'name': 'RequiredRole', 'value': commands.RoleConverter() }, { 'name': 'MaxParticipants', 'value': int }, 'SpectatorsAllowed', { 'name': 'PrizeType', 'value': ModifierUtils.convert_to_prize_type }, { 'name': 'AutoGiveCoins', 'value': int }, { 'name': 'AssistingTO', 'value': commands.MemberConverter() }] asyncio.create_task(self.update_reminders()) print(self.__class__.__name__ + " cog initialized!")
async def SendFirstTournamentMessage(ctx): user = ctx.author channels = Channel(ctx.bot) embed = discord.Embed(title="Welcome to your first tournament!", description=(f"Hey {user.mention}, you have joined a tournament for the" " first time! If you are confused, don't worry! I'm here" " to remind you of the essential steps of a tournament."), color=discord.Color.green()) embed.add_field(name="Before the game starts, you can chat with fellow participants.", value=("When you join a tournament, you get access to " f" {channels.t_chat.mention} (`{channels.t_chat.name}`)" ", where you can discuss with other players while waiting for the game" " to fill up!"), inline=False) embed.add_field(name="Once the host is ready, you will be able to join the game.", value=("In the channel, you will be given the name and password of a custom" " game to join in the Werewolf Online app! Open the Main Menu of the game," " then go to Play > Custom Games. Find the game in the list and click it, then" " type the password you were given."), inline=False) embed.add_field(name="Once everyone has joined, the battle will begin!", value="Good luck and happy hunting!", inline=False) try: await user.send(embed=embed) except discord.HTTPException as e: return
def channel_create(client, args, rmx): if args is None or len(args) == 0: client.send_message(enums.MessageType.HELP, help_regular["create"]) return True channel_name = args.split(" ", 1)[0].strip() if channel_name in globals.channel_list: client.send_message(enums.MessageType.WARNING, "The specified channel name already exist!" "\nPlease, specify another channel name.") else: globals.channel_list[channel_name] = Channel(channel_name) globals.channel_list[channel_name].add_client(client.get_username()) globals.channel_list[channel_name].promote_client(client.get_username()) persistence.channels.create_channel(channel_name) persistence.channels.add_channel_moderator(channel_name, client.get_username()) client.send_channels() client.set_channel(channel_name) client.send_message(enums.MessageType.INFO, "You have created the #%s channel." "\nThe #%s channel is now your active channel!" % (channel_name, channel_name)) return True
def pred(ctx): channels = Channel(ctx.bot) list_ = [channels.bot_cmds, channels.t_channel] try: authorized(ctx, level=1, to=True) return True except MissingPermissions: if ctx.channel in list_: return True raise InvalidChannel
def get_results_bars(candles, window, search_interval): ''' This function has been verified with the channel model. Results and outcomes collected by it seem to return the correct dfs. ''' # Instantiation results = [] long = [] short = [] for i in range(window, candles.shape[0] - search_interval): # Print progress. if i % 10000 == 0: print('Percent complete: {:.2f}'.format(i / candles.shape[0])) # Prepare Slice candles for channel and outcome_interval closings = candles.loc[i - window:i, 'midclose'].values # Fetch channel transformation on window. Append to results channel = Channel(closings) results.append([ i, channel.channel_slope, channel.closings_slope, channel.closing_position, channel.channel_range, channel.largest_spike, channel.largest_spike_5, channel.within_range ]) # Get Outcomes average_channel_distance = ((channel.closings_c7[-1] \ - channel.closings_c1[-1]) / 6) distance = np.arange(1, 21) * average_channel_distance outs = outcomes(candles, i, search_interval, distance) long.append([i] + outs['long_target'] + outs['long_loss']) short.append([i] + outs['short_target'] + outs['short_loss']) # Collect all window results into dataframe results_columns = [ 'location', 'channel_slope', 'closings_slope', 'channel_closing_position', 'channel_range', 'largest_spike', 'largest_spike_5', 'within_range' ] results = pd.DataFrame(np.array(results), columns=results_columns) results = results.set_index('location', drop=True) results.index = results.index.astype(int) # Assemble long and short into dataframe and return target_columns = [] loss_columns = [] for i in range(int((len(long[0]) - 1) / 2)): target_columns.append('t' + str(i + 1)) loss_columns.append('l' + str(i + 1)) columns = ['location'] + target_columns + loss_columns long = pd.DataFrame(np.array(long), columns=columns) long = long.set_index('location', drop=True) long.index = long.index.astype(int) short = pd.DataFrame(np.array(short), columns=columns) short = short.set_index('location', drop=True) short.index = short.index.astype(int) return results, long, short
def get_results_bars(candles, window, search_interval): # Instantiation results = [] up = [] down = [] for i in range(window, candles.shape[0] - search_interval): # Print progress. if i % 10000 == 0: print('Percent complete: {:.2f}'.format(i / candles.shape[0])) # Fetch channel transformation on window. Append to results channel = Channel(candles, i, window) results.append([ i, channel.channel_slope, channel.closings_slope, channel.closing_position, channel.channel_range, channel.largest_spike, channel.largest_spike_5, channel.within_range ]) # Get Outcomes distance = np.arange(1, 11) * (channel.channel_range / 6) outs = simple_outcomes(candles, i, search_interval, distance) up.append([i] + outs['up']) down.append([i] + outs['down']) # Assemble columns results_columns = [ 'location', 'channel_slope', 'closings_slope', 'channel_closing_position', 'channel_range', 'largest_spike', 'largest_spike_5', 'within_range' ] up_columns = ['location'] down_columns = ['location'] for i in range(len(up[0]) - 1): up_columns.append('u' + str(i + 1)) down_columns.append('d' + str(i + 1)) # Assemble Dataframes results = pd.DataFrame(np.array(results), columns=results_columns) up = pd.DataFrame(np.array(up), columns=up_columns) down = pd.DataFrame(np.array(down), columns=down_columns) # Correct Indexes results = results.set_index('location', drop=True) up = up.set_index('location', drop=True) down = down.set_index('location', drop=True) results.index = results.index.astype(int) up.index = up.index.astype(int) down.index = down.index.astype(int) # Return return results, up, down
def get_mean_pos_std(values, window, mean_dict, std_dict, pos_dict, slope_dict): #print('Getting Channel mean, std and pos on {}'.format(window)) mean = [np.nan] * window pos = [np.nan] * window std = [np.nan] * window slope = [np.nan] * window for i in range(window, values.shape[0]): channel = Channel(values[i - window:i]) mean.append(channel.flattened.mean()) std.append(channel.flattened.std()) pos.append(channel.position_distance_standard) slope.append(channel.slope) pos_dict[window] = np.array(pos) mean_dict[window] = np.array(mean) std_dict[window] = np.array(std) slope_dict[window] = np.array(slope)
def get_results_bars(candles, window, search_interval, peaks_window, distance): # Instantiation results = [] long_target = [] long_loss = [] short_target = [] short_loss = [] peaks = [] start = max(window, peaks_window) for i in range(start, candles.shape[0] - search_interval): # Print progress. if i % 10000 == 0: print('Percent complete: {:.2f}'.format(i / candles.shape[0])) # Fetch channel transformation on window. Append to results channel = Channel(candles, i, window) results.append([ i, channel.channel_slope, channel.closings_slope, channel.closing_position, channel.channel_range, channel.largest_spike, channel.largest_spike_5, channel.within_range, candles.loc[i, 'spread'], candles.loc[i, 'volume'], channel.c1[-1], channel.c7[-1], channel.closings[-1] ]) # Get Peaks peaks_collection = channel.get_supports(peaks_window) for peak in peaks_collection: peaks.append([i, peak]) # Set distance for outcome if type(distance) == 'str': distance = np.arange(1, 11) * (channel.channel_range / 6) # Get long outcomes outs = outcomes('long', candles, i, search_interval, distance, False) long_target.append([i] + outs['target']) long_loss.append([i] + outs['loss']) # get short outcomes outs = outcomes('short', candles, i, search_interval, distance, False) short_target.append([i] + outs['target']) short_loss.append([i] + outs['loss']) # Assemble Dataframes results_columns = [ 'location', 'channel_slope', 'closings_slope', 'channel_closing_position', 'channel_range', 'largest_spike', 'largest_spike_5', 'within_range', 'spread', 'volume', 'c1', 'c7', 'closing_value' ] results = pd.DataFrame(np.array(results), columns=results_columns) long_target = pd.DataFrame(np.array(long_target)) long_loss = pd.DataFrame(np.array(long_loss)) short_target = pd.DataFrame(np.array(short_target)) short_loss = pd.DataFrame(np.array(short_loss)) peaks = pd.DataFrame(np.array(peaks), columns=['location', 'peaks']) # Set indexes results = results.set_index('location', drop=True) long_target = long_target.set_index(0, drop=True) long_loss = long_loss.set_index(0, drop=True) short_target = short_target.set_index(0, drop=True) short_loss = short_loss.set_index(0, drop=True) # Correct Indexes long_target.index = long_target.index.rename('location') long_loss.index = long_loss.index.rename('location') short_target.index = short_target.index.rename('location') short_loss.index = short_loss.index.rename('location') # Set index type results.index = results.index.astype(int) long_target.index = long_target.index.astype(int) long_loss.index = long_loss.index.astype(int) short_target.index = short_target.index.astype(int) short_loss.index = short_loss.index.astype(int) # Return return { 'results': results, 'long_target': long_target, 'long_loss': long_loss, 'short_target': short_target, 'short_loss': short_loss, 'peaks': peaks }
def plot_currency_universe(cu, plot_index, currencies, ratios, interval): ####### Redraw Plots ###### # Plots for the currency universe for currency in currencies: fig = plt.figure(str(currency) + '_set', clear=True, tight_layout=True, facecolor='grey', edgecolor='black') gs = gridspec.GridSpec(len(currencies) - 1, 1) gs.update(wspace=0, hspace=0) # First plot for x ticks a = plt.subplot(gs[0, :]) a.set_facecolor('xkcd:pale grey') a.spines['bottom'].set_linewidth(2) a.spines['left'].set_linewidth(2) a.spines['top'].set_linewidth(2) a.spines['right'].set_linewidth(2) #plt.setp(a.get_xticklabels(), visible=True) # Axis stuff for c in range(1, len(currencies) - 1): b = plt.subplot(gs[c, :], sharex=a) #plt.setp(b.get_xticklabels(), visible=False) b.set_facecolor('xkcd:pale grey') b.spines['top'].set_visible(True) b.spines['right'].set_visible(True) b.spines['bottom'].set_linewidth(2) b.spines['left'].set_linewidth(2) b.spines['top'].set_linewidth(2) b.spines['right'].set_linewidth(2) ####### Plot Intruments ###### for currency in currencies: fig = plt.figure(str(currency) + '_set') ax = plt.figure(str(currency) + '_set').get_axes() # Get Insturment List and which direction to align instrument pair_list = [] shape_list = [] for pair in ratios.columns: if currency.upper() in pair.split('_'): pair_list.append(pair) if currency.upper() == pair.split('_')[0]: shape_list.append(1) else: shape_list.append(-1) # Get Slope position for all values (instruments) currency_set = pd.DataFrame() for i in range(len(pair_list)): instrument = pair_list[i] shape = shape_list[i] if shape == 1: values = ratios.loc[plot_index, instrument] else: values = (ratios.loc[plot_index, instrument] * -1)\ + (2 * ratios.loc[plot_index, instrument].values[-1]) currency_set[pair_list[i]] = values ####### Get Channels with standard devaiton distributions ###### for c in range(currency_set.shape[1]): step = 10 coll = [] for i in range(10, interval + step, step): channel = Channel(currency_set.iloc[:, c].values) test = channel.flattened / channel.channel_deviation k2, p = normaltest(test) alpha = 1e-3 #print("p = {:g}".format(p)) if p < alpha: pass #print(str(i) + , "The null hypothesis can be rejected") else: coll.append( i ) #print(str(i))# + "The null hypothesis cannot be rejected") coll = np.array(coll) keep = [] for i in range(coll.shape[0] - 1): if coll[i + 1] > coll[i] + step: keep.append(coll[i]) if coll.shape[0] > 0: keep.append(coll[-1]) keep = np.array(keep) # Plot currencies a = ax[c] currency_set.iloc[:, c].plot(ax=a, legend=False, linewidth=2.5) # Get ticks by pip for currencies max_ticks = currency_set.iloc[:, c].values.max() min_ticks = currency_set.iloc[:, c].values.min() if currency == 'jpy': ticks = np.arange(min_ticks, max_ticks, .1).round(6) elif currency == 'hkd': ticks = np.arange(min_ticks, max_ticks, .01).round(6) else: if 'JPY' in currency_set.columns[c].split('_'): ticks = np.arange(min_ticks, max_ticks, .1).round(6) elif 'HKD' in currency_set.columns[c].split('_'): ticks = np.arange(min_ticks, max_ticks, .01).round(6) else: ticks = np.arange(min_ticks, max_ticks, .001).round(4) # Plot each normal section on top with channel lines] if keep.shape[0] > 0: for k in keep[::-1]: a.plot(plot_index, channel.line, color='lightgrey') a.plot(plot_index, channel.line + (np.ones(plot_index.shape[0]) * (2 * channel.channel_deviation)), color='black') a.plot(plot_index, channel.line + (np.ones(plot_index.shape[0]) * (-2 * channel.channel_deviation)), color='black') a.plot(plot_index, channel.line, color='grey') a.set_yticks(ticks) a.grid(which='both') ####### Finalize Plot ###### for i in range(currency_set.shape[1]): ax[i].set_ylabel(currency_set.columns[i], rotation=90, size='large') ax[i].yaxis.tick_right() #plt.pause(.01) '''
start = 30000 stop = 60000 # Run through Iterations collecting stats and outcomes up = [] down = [] channel_coll = [] for location in candles.index.values[start:stop]: # Print for progress if location % 1000 == 0: print(location) # Get same sats on each interval at location for interval in intervals: values = candles.loc[location - interval:location, 'bidhigh'].values # Get Channel and staistics channel = Channel(values) channel_coll.append([ location, interval, candles.loc[location, 'timestamp'], candles.loc[location, 'bidhigh'], channel.slope, channel.channel_deviation, channel.position_distance, channel.position_distance_standard ]) # Get outcomes at location for one simple distance for now # distance = np.arange(1, 4) * channel.deviation distance = [.00350] outs = outcomes('short', candles, location, 15000, distance) up.append(outs['loss']) down.append(outs['target']) # Set results DataFrames results_columns = [
def evaluate_positions(timestamp): importlib.reload(ports) port = ports.portfolio() accounts = port['accounts'] print('Timestamp: {}'.format(timestamp)) for instrument in port['portfolio'].keys(): print('\n---------Instrument-------------: {}'.format(instrument)) for granularity in port['portfolio'][instrument].keys(): print('---granularity---: {}'.format(granularity)) print('Is granularity check passed: {}'.format( check_granularity(granularity, timestamp))) if check_granularity(granularity, timestamp): # get largest window in granularity largest_window = max( port['portfolio'][instrument][granularity].keys()) print('Largest window: {}'.format(largest_window)) print('-------') candles = get_candles_by_count(instrument, granularity, largest_window + 1) for window in port['portfolio'][instrument][granularity].keys( ): print('--') print('Window: {}'.format(window)) # Evaluate Channeal, profile and filters for window. # Check for breakouts and filter passing closings = candles.midclose.values[-(window):] print('Closings shape: {}'.format(closings.shape)) channel = Channel(candles, candles.shape[0] - 1, window) print('Channel position: {}'.format( channel.closing_position)) for direction in ['top', 'bottom']: print('Direction: {}'.format(direction)) if direction == 'top': breakout = 'short' cond1 = channel.closing_position > port[ 'portfolio'][instrument][granularity][window][ direction]['position'] elif direction == 'bottom': breakout = 'long' cond1 = channel.closing_position < port[ 'portfolio'][instrument][granularity][window][ direction]['position'] print('Breakout Found: {}'.format(cond1)) cond2 = True cond3 = True cond4 = True #move this belwo - just wanted it printed here. account = accounts[granularity][window][breakout] print('Account: {}'.format(account)) if cond1 and cond2 and cond3 and cond4: # Get account for gran / wind0w / direction comb. # do not place if instrument is already an open pos. print('Instrument not already placed: {}'.format( instrument not in get_open_positions(account))) if instrument not in get_open_positions(account): # Get target and loss target = port['portfolio'][instrument][ granularity][window][direction]['target'] stop = port['portfolio'][instrument][ granularity][window][direction]['stop'] target *= channel.channel_range / 6 stop *= channel.channel_range / 6 if direction == 'bottom': # long position target = candles.askclose.values[ -1] + target stop = candles.askclose.values[-1] - stop qty = 100 else: # short position target = candles.bidclose.values[ -1] - target stop = candles.close.values[-1] + stop qty = -100 # Create Order order = create_order(instrument, qty, target, stop, account) print('ORDER PLACED.') print('target, stop, askclose: {}, {}, {}'. format(target, stop, qty, candles.askclose.values[-1])) print('order number: {}'.format(order)) log_placement( order, candles, account, instrument, granularity, window, port['portfolio'][instrument][granularity] [window][direction]['target'], port['portfolio'][instrument][granularity] [window][direction]['stop'], channel.channel_range, channel.closing_position, channel.largest_spike_5, channel.channel_slope, channel.closings_slope, qty, target, stop) else: msg = '{} Breakout on {} found but position already open in account .' log_eval( msg.format(direction, instrument, account)) return
outcome_interval = window values = candles.loc[start - window + 1:start, 'midclose'].values volume = candles.loc[start - window + 1:start, 'volume'].values # values = candles_5.loc[start / 5 - window + 1: start / 5 , 'midclose'].values # Supports support_interval = window * 4 support_bins = 50 # Outcomes outcome_values = candles.loc[start:start + outcome_interval, 'midclose'].values # outcome_values = candles_5.loc[start / 5 : start / 5 + outcome_interval, 'midclose'].values # Get Channels channel = Channel(values) outcome_channel = Channel(outcome_values) # get period guess corr = autocorrelation(channel.scaled) corr_orig = corr.copy() margin = int(window * .10) corr = corr[margin:-margin] maximum = corr[:, 1].argmax() minimum = corr[:, 1].argmin() corr_period = min(int(window * .75), 2 * abs(corr[maximum, 0] - corr[minimum, 0])) ''' corr_peaks = np.arange((left[:-1] & right[1:]).shape[0])[left[:-1] & right[1:]] if corr_peaks.shape[0] == 1: corr_period = corr_peaks[0]
shape = shape_list[i] if shape == 1: values = ratios.loc[plot_index, instrument] else: values = (ratios.loc[plot_index, instrument] * -1)\ + (2 * ratios.loc[plot_index, instrument].values[-1]) currency_set[pair_list[i]] = values ####### Get Channels with standard devaiton distributions ###### for c in range(currency_set.shape[1]): step = 10 coll = [] for i in range(10, interval + step, step): channel = Channel(currency_set.iloc[:, c].values) test = channel.flattened / channel.channel_deviation k2, p = normaltest(test) alpha = 1e-3 #print("p = {:g}".format(p)) if p < alpha: pass #print(str(i) + , "The null hypothesis can be rejected") else: coll.append(i) #print(str(i))# + "The null hypothesis cannot be rejected") coll = np.array(coll) keep = [] for i in range(coll.shape[0] - 1): if coll[i + 1] > coll[i] + step: keep.append(coll[i]) if coll.shape[0] > 0: keep.append(coll[-1])
def run(data, granularity, path=path, interval=interval, normaltest_step=normaltest_step, normaltest_alpha=normaltest_alpha): print(granularity) # Import Data df = pd.read_pickle(path + granularity + '.pkl') plot_index = np.arange(df.last_valid_index() - interval, df.last_valid_index() + 1) # Plot Data for i in range(len(ax)): ax[i].cla() df.iloc[plot_index, 1 + i].plot(ax=ax[i]) # Add labels for i in range(ax.shape[0]): ax[i].set_ylabel(df.columns[i + 1], rotation=90, size='large', color='white') ax[i].yaxis.tick_right() ax[i].tick_params(axis='y', colors='white') # get channels for currency for i in range(1, df.columns.shape[0]): coll = [] currency = df.columns[i] for s in range(20, interval + normaltest_step, normaltest_step): channel = Channel(df.loc[df.last_valid_index() - s: \ df.last_valid_index(), currency].values) k2, p = normaltest(channel.flattened / channel.channel_deviation) #print("p = {:g}".format(p)) if p < normaltest_alpha: pass # "The null hypothesis can be rejected") else: coll.append(s) # "The null hypothesis cannot be rejected") coll = np.array(coll) # Get Positions for channel breaks keep = [] for k in range(coll.shape[0] - 1): if coll[k + 1] > coll[k] + normaltest_step: keep.append(coll[k]) if coll.shape[0] > 0: keep.append(coll[-1]) keep = np.array(keep) # Plot chnnels by normal break for k in keep[::-1]: try: channel = Channel(df.loc[df.last_valid_index() - k:, currency].values) df.loc[df.last_valid_index() - (k + 1):, currency].plot(ax=ax.ravel()[i - 1], marker='|', markersize=.5) line = channel.line line_x = np.arange(df.last_valid_index() - (k), df.last_valid_index() + 1) ax.ravel()[i - 1].plot(line_x, line, color='grey', linewidth=.5) ax.ravel()[i - 1].plot( line_x, line + (np.ones(line.shape[0]) * 2 * channel.channel_deviation), color='black', linewidth=.5) ax.ravel()[i - 1].plot( line_x, line + (np.ones(line.shape[0]) * -2 * channel.channel_deviation), color='black', linewidth=.5) except Exception as e: print(e) print(line_x) print(df.loc[df.last_valid_index() - k:, currency].index.values) print(channel.flattened.shape) print(line_x.shape) # Add line at next granularity for M15 and M5 if granularity == 'M15': line_break = 3 else: line_break = 5 if granularity != 'M1': max_ticks = df.iloc[plot_index, i].max() min_ticks = df.iloc[plot_index, i].min() vert = int(df.last_valid_index() - interval / line_break) ys = ax[-1].get_ylim() ax.ravel()[-1].plot([vert, vert], [ys[0], ys[0] + ((ys[1] - ys[0]) / 3)], color='black', linewidth=.5) # Add grid to subplots for i in range(1, df.columns.shape[0]): max_ticks = df.iloc[plot_index, i].max() min_ticks = df.iloc[plot_index, i].min() # Different ticks for jpy and hkd if df.columns[i] == 'jpy': ticks = np.arange(min_ticks, max_ticks, .000005).round(6) elif df.columns[i] == 'hkd': ticks = np.arange(min_ticks, max_ticks, .00001).round(5) else: ticks = np.arange(min_ticks, max_ticks, .0005).round(4) # Create ticks ax.ravel()[i - 1].grid(which='both', linewidth=.5, color='grey', b=True) ax.ravel()[i - 1].set_yticks(ticks) # Print and Return print(df.last_valid_index()) plt.pause(.01)
fit_avg = 0 does_it_fit = [] # Window window = 1500 start = 124500 # + s * 10000 outcome_interval = window values = candles.loc[start - window + 1:start, 'midclose'].values # Supports support_interval = window * 2 support_bins = 50 # Outcomes outcome_values = candles.loc[start:start + outcome_interval, 'midclose'].values # Get Channels channel = Channel(values) outcome_channel = Channel(outcome_values) # get period guess corr = autocorrelation(channel.scaled) corr_orig = corr.copy() margin = int(window * .10) corr = corr[margin:-margin] maximum = corr[:, 1].argmax() minimum = corr[:, 1].argmin() corr_period = min(int(window * .75), 2 * abs(corr[maximum, 0] - corr[minimum, 0])) ''' corr_peaks = np.arange((left[:-1] & right[1:]).shape[0])[left[:-1] & right[1:]] if corr_peaks.shape[0] == 1: corr_period = corr_peaks[0]
def __init__(self, values, channel_std=2): ''' Do i even need to flatten it? ''' channel = Channel(values) autocorr = get_autocorrelation(channel.flattened)['autocor'] margin = int(values.shape[0] * .10) corr = autocorr[margin:-margin] maximum = corr.argmax() #[:, 1].argmax() minimum = corr.argmin() #[:, 1].argmin() corr_period = min( int(values.shape[0] * .75), 2 * abs(corr[maximum] - corr[minimum])) #abs(corr[maximum, 0] - corr[minimum, 0])) # Get corr peaks ( maybe good indicator of 'smoothness' smoothness = int(values.shape[0] * .25) corr_smoothed = corr #[:, 1] corr_smoothed = pd.DataFrame(corr_smoothed).rolling( smoothness).mean().values.ravel() corr_smoothed = corr_smoothed[smoothness:] left = (corr_smoothed[1:] > corr_smoothed[:-1]) right = (corr_smoothed[1:] < corr_smoothed[:-1]) auto_peaks = (left[:-1] & right[1:]).sum() if corr_smoothed[0] > corr_smoothed[1]: auto_peaks += 1 if corr_smoothed[-1] > corr_smoothed[-2]: auto_peaks += 1 # Assign first guesses for wave c0 = channel.flattened[0] - channel.channel_deviation * 2 amplitude = channel.channel_deviation * 2 frequency_guess = values.shape[0] / corr_period phase_shift_guess = -np.argmax(channel.flattened < c0) vertical_shift_guess = amplitude + c0 # Get Real Wave t = np.linspace(0, 2 * np.pi, channel.flattened.shape[0]) optimize_func = lambda x: amplitude * np.sin(x[0] * t + x[1]) + x[ 2] - channel.flattened est_frequency, est_phase_shift, est_vertical_shift = \ leastsq(optimize_func, [frequency_guess, phase_shift_guess, vertical_shift_guess], full_output=True)[0] # assess fit wave = amplitude * np.sin(est_frequency * t + est_phase_shift) + est_vertical_shift # Provide for the tangent cosine = amplitude * np.cos(est_frequency * t + est_phase_shift) + est_vertical_shift #if desired, leastsq get me some info on how well each parm fts # wave_parameter_fits = leastsq(optimize_func, [frequency_guess, phase_shift_guess, vertical_shift_guess], full_output=True)[2]['qtf'] self.channel = Channel(values) self.amplitude = amplitude self.frequency = est_frequency self.phase_shift = est_phase_shift self.vertical_shift = est_vertical_shift self.channel_wave = wave self.phase_position = 0 # where in phase (%?) was last position self.cosine = cosine self.tangent = cosine[-1] / self.channel.channel_deviation / 2 self.basis = values x = np.arange(values.shape[0]) self.linregress = self.channel.slope * x + self.channel.intercept self.wave = self.channel_wave + self.linregress self.fit = self.mse(self.wave, values)
############################################################################### if 0: ''' some possibel score measures: frequency of waves - want a few fit of wave ? ''' for i in range(100, 1000, 10): start = 0 end = i instrument = 'EUR_USD' inst = ratios.loc[start:end, instrument].astype(float) channel = Channel(inst.values) wave = Wave(inst.values) plt.figure() plt.plot(channel.flattened) plt.plot(channel.c5() - channel.line) plt.plot(channel.c1() - channel.line) plt.plot(wave.wave - wave.linregress) plt.show() plt.figure() plt.plot(channel.flattened) plt.plot(channel.c5() - channel.line) plt.plot(channel.c1() - channel.line) plt.plot(wave.wave - wave.linregress, 'o') plt.show()
########################################################################### spike_collector = np.array([[0, 0, 0, 0]]) for i in range( 100500, #max(channels_window, supports_window), candles.shape[0] - outcomes_window): # Get values start = i closings = candles.loc[start - channels_window + 1:start, 'midclose'].values volumes = candles.loc[start - channels_window + 1:start, 'volume'].values outcomes_values = candles.loc[start:start + outcomes_window, 'midclose'].values # Get weights channel = Channel(closings) weighted_closing = ((closings[1:] - closings[:-1]) / volumes[1:]).cumsum() w_channel = Channel(weighted_closing) weighted_weird = (closings[1:] - closings[:-1]).cumsum() / volumes[1:] # Analyze weighted channel spikes. Add to collector if abs((w_channel.scaled[-spike_window:] \ - w_channel.scaled[-1])).max() > spike_by_channel: spike_start = int(channels_window - abs((w_channel.scaled[-spike_window:] - w_channel.scaled[-1])).argmax()) spike_stop = int(channels_window) spike_direction = int( np.sign( (w_channel.scaled[-1] - w_channel.scaled[-spike_window:])[-abs( (w_channel.scaled[-spike_window:] -
# Normal Test normaltest_alpha = 1e-10 normaltest_step = 1 ############################################################################### # Front fill normal test ############################################################################### if 0: breaks = [] start = df.first_valid_index() for step in range(df.first_valid_index() + 10, df.last_valid_index() - 10, normaltest_step): try: channel = Channel(df.loc[start:step, currency].values) k2, p = normaltest(channel.flattened / channel.channel_deviation) #print("p = {:g}".format(p)) if p < normaltest_alpha: # "The null hypothesis can be rejected") start = step breaks.append(step) else: pass # coll.append(step) # "The null hypothesis cannot be rejected") except Exception as e: print(start, step, e) breaks = np.array(breaks) df[currency].plot() for _break in breaks:
# Create Diff DataFrame for currencies cur_diff = pd.DataFrame() for column in cur.columns: roll = cur[column].rolling(window=2) \ .apply(lambda x: (x[1] - x[0])).values cur_diff[column] = roll ############################################################################### ############################################################################### if 1: end = 600 pred = 1200 channel = Channel(cur.loc[:end, 'aud'].values, std_ratio = 2) wave = Wave(cur.loc[:end, 'aud'].values) plt.plot(cur.loc[:end, 'aud'].values) plt.figure() plt.plot(channel.flattened) plt.plot(np.zeros(channel.flattened.shape[0]), color='black') plt.plot(np.zeros(channel.flattened.shape[0]) + channel.channel_deviation * 2, color='black') plt.plot(np.zeros(channel.flattened.shape[0]) - channel.channel_deviation * 2, color='black') plt.plot(wave.channel_wave) plt.figure() plt.plot(cur.loc[:pred, 'aud'].values) plt.plot(cur.loc[:end, 'aud'].values) plt.plot(channel.c1(), color='black') plt.plot(channel.c3(), color='black') plt.plot(channel.c5(), color='black') plt.plot(wave.wave)
def get_results_bars(candles, window, search_interval, peaks_window, distance): # Instantiation results = [] long_target = [] long_loss = [] short_target = [] short_loss = [] peaks = [] start = max(window, peaks_window) for i in range(start, candles.shape[0] - search_interval, int(window / 2)): # Print progress. if i % 10000 == 0: print('Percent complete: {:.2f}'.format(i / candles.shape[0])) # Fetch channel transformation on window. values = candles.loc[i - window + 1:i, 'midclose'].values channel = Channel(values) # Get Wave information corr = autocorrelation(channel.scaled) margin = int(window * .10) corr = corr[margin:-margin] maximum = corr[:, 1].argmax() minimum = corr[:, 1].argmin() corr_period = min(int(window * .75), 2 * abs(corr[maximum, 0] - corr[minimum, 0])) # Get corr peaks ( maybe good indicator of 'smoothness' smoothness = int(window * .25) corr_smoothed = corr[:, 1] corr_smoothed = pd.DataFrame(corr_smoothed).rolling( smoothness).mean().values.ravel() corr_smoothed = corr_smoothed[smoothness:] left = (corr_smoothed[1:] > corr_smoothed[:-1]) right = (corr_smoothed[1:] < corr_smoothed[:-1]) auto_peaks = (left[:-1] & right[1:]).sum() if corr_smoothed[0] > corr_smoothed[1]: auto_peaks += 1 if corr_smoothed[-1] > corr_smoothed[-2]: auto_peaks += 1 # Assign first guesses for wave amplitude = (channel.c7[0] - channel.c1[0]) / 2 frequency_guess = window / corr_period phase_shift_guess = -np.argmax(channel.scaled < channel.c1) vertical_shift_guess = amplitude + channel.c1[0] # Ge Real Wave t = np.linspace(0, 2 * np.pi, channel.scaled.shape[0]) optimize_func = lambda x: amplitude * np.sin(x[0] * t + x[1]) + x[ 2] - channel.scaled est_frequency, est_phase_shift, est_vertical_shift = \ leastsq(optimize_func, [frequency_guess, phase_shift_guess, vertical_shift_guess], full_output=True)[0] wave_parameter_fits = leastsq( optimize_func, [frequency_guess, phase_shift_guess, vertical_shift_guess], full_output=True)[2]['qtf'] # assess fit wave = amplitude * np.sin(est_frequency * t + est_phase_shift) + est_vertical_shift fit = ((wave - channel.scaled)**2).mean() # Get Supports support_interval = window * 2 support_values = candles.loc[start - support_interval:start, 'midclose'].values supports = channel.get_supports(support_values) support_by_channel = (supports - channel.closings_c1[-1]) / ( channel.closings_c7[-1] - channel.closings_c1[-1]).tolist() if len(support_by_channel) == 0: support_by_channel = [0, 0, 0] elif len(support_by_channel) == 1: support_by_channel = [ support_by_channel[0], support_by_channel[0], support_by_channel[0] ] elif len(support_by_channel) == 2: support_by_channel = [ support_by_channel[0], support_by_channel[1], support_by_channel[1] ] elif len(support_by_channel) >= 4: support_by_channel = support_by_channel[:3] # Build up results results.append([ i, channel.channel_slope, channel.closings_slope, channel.closing_position, channel.channel_range, channel.channel_degree, channel.linear_p_value, channel.largest_spike, channel.largest_spike_5, channel.within_range, candles.loc[i, 'spread'], candles.loc[i, 'volume'], channel.c1[-1], channel.c7[-1], wave[-1], wave[-1] - wave[-2], fit, amplitude, est_frequency, est_phase_shift, est_vertical_shift, wave_parameter_fits[0], wave_parameter_fits[1], wave_parameter_fits[2], support_by_channel[0], support_by_channel[1], support_by_channel[2], corr[:, 1].max(), corr[:, 1].mean(), auto_peaks ]) # Get Peaks peaks_collection = channel.get_supports(peaks_window) for peak in peaks_collection: peaks.append([i, peak]) # Set distance for outcome if type(distance) == str: distance = np.array([.25, .5, .75, 1, 1.25, 1.5, 2 ]) * (channel.channel_range) # Get long outcomes outs = outcomes('long', candles, i, search_interval, distance, False) long_target.append([i] + outs['target']) long_loss.append([i] + outs['loss']) # get short outcomes outs = outcomes('short', candles, i, search_interval, distance, False) short_target.append([i] + outs['target']) short_loss.append([i] + outs['loss']) # Assemble Dataframes results_columns = [ 'location', 'channel_slope', 'closings_slope', 'channel_closing_position', 'channel_range', 'channel_degree', 'linear_p_value', 'largest_spike', 'largest_spike_5', 'within_range', 'spread', 'volume', 'c1', 'c7', 'wave_position', 'wave_tangent', 'wave_fit', 'amplitude', 'frequency', 'phase_shift', 'vertical_shift', 'frequency_fit', 'phase_fit', 'vert_fit', 'support_0', 'support_1', 'support_2', 'auto_max', 'auto_mean', 'auto_peaks' ] results = pd.DataFrame(np.array(results), columns=results_columns) long_target = pd.DataFrame(np.array(long_target)) long_loss = pd.DataFrame(np.array(long_loss)) short_target = pd.DataFrame(np.array(short_target)) short_loss = pd.DataFrame(np.array(short_loss)) peaks = pd.DataFrame(np.array(peaks), columns=['location', 'peaks']) # Set indexes results = results.set_index('location', drop=True) long_target = long_target.set_index(0, drop=True) long_loss = long_loss.set_index(0, drop=True) short_target = short_target.set_index(0, drop=True) short_loss = short_loss.set_index(0, drop=True) # Correct Indexes long_target.index = long_target.index.rename('location') long_loss.index = long_loss.index.rename('location') short_target.index = short_target.index.rename('location') short_loss.index = short_loss.index.rename('location') # Set index type results.index = results.index.astype(int) long_target.index = long_target.index.astype(int) long_loss.index = long_loss.index.astype(int) short_target.index = short_target.index.astype(int) short_loss.index = short_loss.index.astype(int) # Return return { 'results': results, 'long_target': long_target, 'long_loss': long_loss, 'short_target': short_target, 'short_loss': short_loss, 'peaks': peaks }
def plot_currency_indicators(currencies, cu, ratios, plot_index, indicator_index, interval, windows, color_list): # Plots for each currency indicators for currency in currencies: fig = plt.figure(currency, clear=True, tight_layout=True, facecolor='grey') gs = gridspec.GridSpec(5, 5) gs.update(wspace=0, hspace=0) # Arange subplots sizing ax1 = plt.subplot(gs[0, :]) ax8 = plt.subplot(gs[1, :], sharex=ax1) ax9 = plt.subplot(gs[2, :], sharex=ax1) ax10 = plt.subplot(gs[3, :], sharex=ax1) ax2 = plt.subplot(gs[4, 0]) ax3 = plt.subplot(gs[4, 1], sharey=ax2) ax4 = plt.subplot(gs[4, 2], sharey=ax2) ax5 = plt.subplot(gs[4, 3], sharey=ax2) ax6 = plt.subplot(gs[4, 4], sharey=ax2) # Axis stuff plt.setp(ax3.get_yticklabels(), visible=False) plt.setp(ax4.get_yticklabels(), visible=False) plt.setp(ax5.get_yticklabels(), visible=False) plt.setp(ax6.get_yticklabels(), visible=False) plt.setp(ax1.get_xticklabels(), visible=False) plt.setp(ax8.get_xticklabels(), visible=False) plt.setp(ax9.get_xticklabels(), visible=False) plt.setp(ax10.get_xticklabels(), visible=False) ax1.set_facecolor('xkcd:pale grey') ax2.set_facecolor('xkcd:pale grey') ax3.set_facecolor('xkcd:pale grey') ax4.set_facecolor('xkcd:pale grey') ax5.set_facecolor('xkcd:pale grey') ax6.set_facecolor('xkcd:pale grey') ax8.set_facecolor('xkcd:pale grey') ax9.set_facecolor('xkcd:pale grey') ax10.set_facecolor('xkcd:pale grey') ############################################################################### # Plot currencies. Color by Standard Normal organizatio. With regression line. ############################################################################### if 1: for currency in currencies: # Set ax and figure ax = plt.figure(currency).get_axes() # Get ticks by pip for currencies max_ticks = cu.loc[plot_index, currency].values.max() min_ticks = cu.loc[plot_index, currency].values.min() if currency == 'hkd': ticks = np.arange(min_ticks, max_ticks, .00001).round(6) elif currency == 'jpy': ticks = np.arange(min_ticks, max_ticks, .000005).round(6) else: ticks = np.arange(min_ticks, max_ticks, .0001).round(4) step = 10 coll = [] for i in range(10, interval + step, step): channel = Channel(cu.loc[cu.last_valid_index() - i: \ cu.last_valid_index(), currency].values) k2, p = normaltest(channel.flattened) alpha = 1e-3 #print("p = {:g}".format(p)) if p < alpha: pass #print(str(i) + , "The null hypothesis can be rejected") else: coll.append( i ) #print(str(i))# + "The null hypothesis cannot be rejected") coll = np.array(coll) keep = [] for i in range(coll.shape[0] - 1): if coll[i + 1] > coll[i] + step: keep.append(coll[i]) if coll.shape[0] > 0: keep.append(coll[-1]) keep = np.array(keep) # Plot currencies a = ax[0] cu.loc[plot_index, currency].plot(ax=a) # cu.loc[plot_index, currency].plot(ax=ax[0], color='blue', marker='+') # Plot each normal section on top for k in keep[::-1]: channel = Channel(cu.loc[cu.last_valid_index() - k:, currency].values) cu.loc[cu.last_valid_index() - k:, currency].plot(ax=a) a.plot(np.arange(cu.last_valid_index() - (k + 1), cu.last_valid_index()), channel.line, color='grey') cu.loc[cu.last_valid_index() - k:, currency].plot(ax=a, marker='.') plt.setp(a.get_xticklabels(), visible=True) a.set_title('Currency Price') a.set_yticks(ticks) a.grid(which='both') print(currency) ############################################################################### # Graph shifted Currency Sets. Align inverse positions ############################################################################### if 1: for currency in currencies: # Set ax and figure ax = plt.figure(currency).get_axes() # Get Insturment List and which direction to align instrument pair_list = [] shape_list = [] for pair in ratios.columns: if currency.upper() in pair.split('_'): pair_list.append(pair) if currency.upper() == pair.split('_')[0]: shape_list.append(1) else: shape_list.append(-1) # Get Slope position for all values (instruments) currency_set = pd.DataFrame() has_jpy = pd.DataFrame() for i in range(len(pair_list)): instrument = pair_list[i] shape = shape_list[i] if shape == 1: values = ratios.loc[plot_index, instrument] else: values = (ratios.loc[plot_index, instrument] * -1)\ + (2 * ratios.loc[plot_index, instrument].values[-1]) # Don't include jpy, hkd inside shifted currency sets for others if currency != 'JPY' and currency != 'HKD': if 'JPY' in instrument.split( '_') or 'HKD' in instrument.split('_'): has_jpy[pair_list[i]] = values else: currency_set[pair_list[i]] = values else: currency_set[pair_list[i]] = values try: # Plot Values a = ax[3] (currency_set - currency_set.loc[currency_set.first_valid_index()]).plot(ax=a) a.plot(plot_index, np.ones(plot_index.shape[0]) * 0, color='grey') a.set_title('Shifted Currency Set - excluding JPY and HKD') except: pass print( 'had nothing for jpy, hkd - see line 118, plot_indicaotrs') ############################################################################### # Plot currency set average positions and slopes over multiple windows ############################################################################### if 1: for currency in currencies: # Set ax and figure ax = plt.figure(currency).get_axes() slopes_mean = pd.DataFrame() position_mean = pd.DataFrame() for w in range(windows.shape[0]): win = np.array([windows[w]]) # Get Insturment List and which direction to align instrument pair_list = [] shape_list = [] for pair in ratios.columns: if currency.upper() in pair.split('_'): pair_list.append(pair) if currency.upper() == pair.split('_')[0]: shape_list.append(1) else: shape_list.append(-1) # Get Slope position for all values (instruments) positions = pd.DataFrame() deviations = pd.DataFrame() slopes = pd.DataFrame() for i in range(len(pair_list)): instrument = pair_list[i] shape = shape_list[i] if shape == 1: values = ratios.loc[indicator_index, instrument] else: values = (ratios.loc[indicator_index, instrument] * -1)\ + (2 * ratios.loc[indicator_index, instrument].values[-1]) pos = get_channel_mean_pos_std(values.values, win) positions[instrument] = pos['pos'].values.ravel() deviations[instrument] = pos['std'].values.ravel() slopes[instrument] = pos['slope'].values.ravel() # Arange Index to match currency locations slopes_mean[win[0]] = slopes.mean(axis=1) position_mean[win[0]] = positions.mean(axis=1) # Plot positions for each window end_values = -15 positions.index = indicator_index a = ax[w + 4] positions.loc[plot_index[end_values:]].plot(ax=a, legend=False) positions.loc[plot_index[end_values:]].mean(axis=1).plot( ax=a, color='black', legend=False) a.plot(plot_index[end_values:], np.ones(plot_index.shape[0])[end_values:] * 2, color='grey') a.plot(plot_index[end_values:], np.ones(plot_index.shape[0])[end_values:] * -2, color='grey') a.plot(plot_index[end_values:], np.ones(plot_index.shape[0])[end_values:] * 0, color='grey') a.set_title(windows[w]) # Arange Index to match currency locations position_mean.index = indicator_index slopes_mean.index = indicator_index # Plot currency set position average over mulitple windows a = ax[1] position_mean.loc[plot_index].plot(ax=a, colors=color_list, legend=False) a.plot(plot_index, np.ones(plot_index.shape[0]) * 2, color='grey') a.plot(plot_index, np.ones(plot_index.shape[0]) * -2, color='grey') a.plot(plot_index, np.ones(plot_index.shape[0]) * 0, color='grey') a.set_title( 'Mean of Currency Set Channel Positions on Mulitple Windows') a.grid(axis='x') # Plot currency set Slope average over mulitple windows a = ax[2] slopes_mean.loc[plot_index].plot(ax=a, colors=color_list) a.plot(plot_index, np.ones(plot_index.shape[0]) * 0, color='grey') a.set_title('Mean of Currency Set Slopes on Mulitple Windows') a.grid(axis='x')
short_loss = [] for i in range(max(channels_window, supports_window), candles.shape[0] - outcomes_window): if i % 10000 == 0: print('Percent complete: {:.2f}'.format(i / candles.shape[0])) # Get values start = i closings = candles.loc[start - channels_window + 1:start, 'midclose'].values volumes = candles.loc[start - channels_window + 1:start, 'volume'].values outcomes_values = candles.loc[start:start + outcomes_window, 'midclose'].values # Get weights channel = Channel(closings) weighted_closing = ((closings[1:] - closings[:-1]) / volumes[1:]).cumsum() w_channel = Channel(weighted_closing) weighted_weird = (closings[1:] - closings[:-1]).cumsum() / volumes[1:] # weigthed spikes weighted_spikes = w_channel.scaled[-1] - w_channel.scaled[ -spike_windows] # Append to results tmp = [ i, channel.channel_slope, channel.closings_slope, channel.closing_position, channel.channel_range, channel.largest_spike,
# Get Indicators rolling_pos, rolling_dev = get_rolling(cu, currencies, windows) correlation = get_correlation(cu, currencies, windows) channel_pos, channel_dev = get_channels(cu, currencies, windows) cu = cu.reset_index() ratios = ratios.reset_index() eur = cu.eur.copy() ############################################################################### # Complete all indicators over backfilled cu ############################################################################### if 0: make_channel_values = eur.loc[:1700].values channel = Channel(make_channel_values) channel_upper = get_distribution_boundary(channel.flattened, .02)['upper_bound'] channel_lower = get_distribution_boundary(channel.flattened, .02)['lower_bound'] channel_window = 60 channel_position = channel_pos.loc[channel_pos.currency == 'eur']\ .loc[channel_pos.windows == channel_window, 'channel_pos'] channel_position = channel_position.reset_index(drop=True) channel_position = pd.DataFrame( np.insert(channel_position.values, 0, [np.nan] * channel_window)) channel_deviation = channel_dev.loc[channel_dev.currency == 'eur']\ .loc[channel_dev.windows == channel_window, 'channel_deviation'] channel_deviation = channel_deviation.reset_index(drop=True)
from classes.channel import Channel # Client list client_list = {} # Channel list channel_list = {"general": Channel("general")}
Rolling Std. ( and mean/variance) Rolling Position ( and mean/variance) Against Currency ''' # Plot # ----------------------------------------------------------------------------- fig, ax = plt.subplots(4, 3, figsize=(10, 10), sharex=True) color_list = plt.cm.Blues(np.linspace(.25, .75, windows.shape[0])) # Currency (All Top Row) # ----------------------------------------------------------------------------- ax[3, 0].plot(cur1) ax1 = ax[3, 0].twinx() ax1.plot(Channel(cur1).flattened, color='orange') ax[3, 1].plot(cur1_diff) ac(cur1, ax=ax[3, 2]) # Mean (Left Column) # ----------------------------------------------------------------------------- # Normal mean_waves.plot(color=color_list, ax=ax[0, 0]) mean_waves.mean(axis=1).plot(color='black', ax=ax[0, 0]) ax1 = ax[0, 0].twinx() mean_waves.std(axis=1).plot(color='orange', ax=ax1) # Flattened mean_waves_flat.plot(color=color_list, ax=ax[1, 0]) mean_waves_flat.mean(axis=1).plot(color='black', ax=ax[1, 0]) ax1 = ax[1, 0].twinx() mean_waves_flat.std(axis=1).plot(color='orange', ax=ax1)