async def prefix_remove(self, ctx: Context, prefix: Prefix) -> None: """Removes a prefix from the list of custom prefixes. This is the inverse of the 'prefix add' command. You can use this to remove prefixes from the default set as well. You must have Manage Server permission to use this command. """ parsed = cast(str, Prefix) current_prefixes = self.bot.get_raw_guild_prefixes(ctx.guild.id) try: current_prefixes.remove(parsed) except ValueError: await ctx.send("I do not have this prefix registered.") return try: await self.bot.set_guild_prefixes(ctx.guild, current_prefixes) except Exception as e: await ctx.send(f"{ctx.tick(False)} {e}") else: await ctx.send(ctx.tick(True))
def planes_2_json(): planes = read_txt('{0}/planes_sample.txt'.format(Context.get(Parameter.BASE_RESOURCE_PATH))) planes_list = [] for line in planes: if line: planes_list.append(parse_line_from_planes_source(line).__dict__) write_2_json(planes_list, 'resources/planes.json')
def main(function, lookback, start, end, tickers, file, provider, plot_vs, plot_pct_levels, verbose): """ Tool for analyzing and plotting market internals <lookback>: Integer to specify lookback period <function>: Available analysis methods 'hilo': to calculate number of stocks at X-day highs/lows. 'dma': calculate number of stocks below/above any moving average. """ context = Context(start, end, tickers, file, provider, verbose) df_list = context.data_frames click.echo("Fetching data for {:d} tickers".format(len(df_list))) plot_vs_df = None if plot_vs: plot_vs_df = context.data_provider.get_data([plot_vs], from_date=context.start_date, to_date=context.end_date)[0] if function == 'hilo': hilo_analysis(lookback, context.start_date, context.end_date, df_list, plot_vs_df, plot_pct_levels.split(",")) if function == 'dma': # Similar to SPXA50R http://stockcharts.com/h-sc/ui?s=$SPXA50R dma_analysis(lookback, context.start_date, context.end_date, df_list, plot_vs_df, plot_pct_levels.split(",")) if context.data_provider.errors > 0: logger.warning("Missing data for {:d} tickers.".format(provider.errors))
def planes_2_db(): planes = read_txt('{0}/ACFTREF.txt'.format(Context.get(Parameter.BASE_RESOURCE_PATH))) plane_ctrl = PlanesController() for line in planes: if line: plane = parse_line_from_planes_source(line) plane_ctrl.create(plane)
async def prefix_clear(self, ctx: Context) -> None: """Removes all custom prefixes. After this, the bot will listen to only mention prefixes. You must have Manage Server permission to use this command. """ await self.bot.set_guild_prefixes(ctx.guild, []) await ctx.send(ctx.tick(True))
def __init__(self): ''' 初始化 ''' self.ctx = Context.inst() self.cache = self.ctx.cache self.valid_srv = Validate() SMS_CNF = current_app.config.get('SMS_CNF') self.sms_srv = SMS(a_key_id=SMS_CNF['KEY_ID'], a_key_secret=SMS_CNF['KEY_SECRET'])
def create(self, title): """ create new row in 'ticket_types' table with info: :param title: ticket types title :return: responce from mysql databases """ MySQLConnector.INSTANCE.execute_query('use {0};'.format( Context.get(Parameter.DB_NAME))) MySQLConnector.INSTANCE.execute_query( 'INSERT INTO ticket_types(title) value ({0});'.format(title))
def main(function, start, end, tickers, file, provider, verbose): """Simple tool (based on https://github.com/pmorissette/ffn) for intermarket analysis. <function>: Available analysis methods: 'average': display average combined returns 'heat': display correlations heatmap 'scatter': display scatter matrix """ context = Context(start, end, tickers, file, provider, verbose) df_list = context.data_frames if len(df_list) < 1: click.echo("No dataframes. Exiting.") return closes = [] for df in df_list: closes.append(df['Close'].rename(df['Ticker'][0])) if function == 'heat': g = ffn.GroupStats(*closes) g.plot_correlation() plt.show() elif function == 'scatter': g = ffn.GroupStats(*closes) axes = g.plot_scatter_matrix() plt.show() elif function == 'average': col = "Close" tickers = "Average: " + ", ".join([df['Ticker'][0] for df in df_list]) rebased_merged = ffn.core.merge(*[ffn.core.rebase(c) for c in closes]) average = pd.DataFrame(columns=[col]) for index, row in rebased_merged.iterrows(): average.set_value(index, col, row.values.mean()) average = ta.add_ma(average, 200) average.plot() plt.title(tickers) plt.show() else: click.echo("{:s} not recognized".format(function)) if context.data_provider.errors > 0: logger.warning("Missing data for {0} tickers.".format(provider.errors))
def get(self, request, alias): supplier_org = get_object_or_404(SupplierOrg, alias=alias, active=True) groups = Group.objects.filter( active=True, supplierorggroupconn__supplier_org=supplier_org) form = SupplierOrgContactForm() form.set_initial(supplier_org.id) return render( request, "supplier/org.html", { "form": form, "org": supplier_org, "groups": groups, # "popular_prods": popular_prods, "context": Context.get(request) #"message": "GROUPS={} ".format(groups) })
def get(self, request, alias): supplier = get_object_or_404(Supplier, alias=alias, active=True) best_lots = Lot.objects.filter(active=True, supplier=supplier, best=True)[:5] lots = Lot.objects.filter(active=True, supplier=supplier) form = SupplierContactForm() form.set_initial(supplier.id) return render( request, "supplier/supplier.html", { "form": form, "supplier": supplier, "best_lots": best_lots, "lots": lots, "context": Context.get(request) # "message": "GROUPS={} ".format(groups) })
def get(self, request, alias): lot = get_object_or_404(Lot, alias=alias, active=True) recommended_lots = Lot.get_recommended(lot.id) form_contact = SupplierContactForm() form_contact.set_initial(lot.supplier.id) form_credit = LotCreditForm() form_credit.set_initial(lot.id) form_leasing = LotLeasingForm() form_leasing.set_initial(lot.id) form_rent = LotRentForm() form_rent.set_initial(lot.id) return render(request, "lot/lot.html", { "lot": lot, "form_contact": form_contact, "form_credit": form_credit, "form_leasing": form_leasing, "form_rent": form_rent, "recommended_lots": recommended_lots, "context": Context.get(request) #"message": "PARAMS={} ".format(params) })