def get_interval_and_start_time(request): context = {} context = get_names_for_all_column_headers() move_interval = request.GET.get('move_interval', None) datetime_start = request.GET.get('datetime_start', None) interval = request.GET.get('interval', None) if not interval: interval = 10*60 #default interval is 10 minutes else: interval = int(interval)*60 #converting minutes (more suitable) to seconds if not datetime_start or datetime_start.lower() == 'now': datetime_start = utils.strftime(utils.get_local_now(), "%Y-%m-%d %H:%M:%S") start_timestamp = utils.datelocal_totimestamp(utils.strptime(datetime_start, "%Y-%m-%d %H:%M:%S")) if move_interval and move_interval == 'back': start_timestamp -= interval elif move_interval and move_interval == 'forward': start_timestamp += interval context['datetime_start'] = datetime_start context['start_timestamp'] = start_timestamp context['interval'] = interval return context
def _write(self, d): # if not self.queue: # self.initConnection() d['timestamp'] = utils.strftime() m = Message() p = jsontools.dumps(d) m.set_body(p) self.logger.debug("writing to aws: %s" % p) return self.sqs_conn.get_status('SendMessage', { "MessageBody": m.get_body_encoded() }, self.queuename)
def report(self): orders = [ self.trade_client.get_order(order.order_id) for order in self.buy_id + self.sell_id if order.order_id ] order_info = [{ 'symbol': order.symbol, 'time': strftime(order.finished_at / 1000, fmt='%Y-%m-%d %H:%M:%S.%f'), 'price': round(float(order.filled_cash_amount) / float(order.filled_amount), 6), 'amount': round(float(order.filled_amount), 6), 'fee': round(float(order.filled_fees), 6), 'currency': order.symbol[:-4].upper(), 'vol': float(order.filled_cash_amount), 'direct': order.type.split('-')[0]} for order in orders if order.state == 'filled' ] buy_info = list(filter(lambda x: x['direct']=='buy', order_info)) sell_info = list(filter(lambda x: x['direct']=='sell', order_info)) pay = round(sum([each['vol'] for each in buy_info]), 4) if pay <= 0: logger.warning(f'NO REPORT for User {self.account_id}') return income = round(sum([each['vol'] - each['fee'] for each in sell_info]), 4) profit = round(income - pay, 4) percent = round(profit / self.usdt_balance * 100, 4) logger.info(f'REPORT for user {self.account_id}') logger.info('Buy') for each in buy_info: currency = each['currency'] symbol_name = '/'.join([currency, 'USDT']) vol = each['vol'] amount = each['amount'] price = each['price'] fee = round(each['fee'] * price, 6) each['fee'] = fee logger.info(f'{symbol_name}: use {vol} USDT, get {amount} {currency}, price {price}, fee {fee} {currency}, at {each["time"]}') logger.info('Sell') for each in sell_info: currency = each['currency'] symbol_name = '/'.join([currency, 'USDT']) vol = each['vol'] amount = each['amount'] price = each['price'] fee = each['fee'] logger.info(f'{symbol_name}: use {amount} {currency}, get {vol} USDT, price {price}, fee {fee} USDT, at {each["time"]}') logger.info(f'Totally pay {pay} USDT, get {income} USDT, profit {profit} USDT, {percent}%') add_profit(self.account_id, pay, income, profit, percent) total_profit, month_profit = get_profit(self.account_id) wx_report(self.account_id, self.wxuid, self.username, pay, income, profit, percent, buy_info, sell_info, total_profit, month_profit)
def edit_deployment(request, project_id, deployment_id): query = get_query("deployment") deployment = query(deployment_id) if isinstance( deployment.time_end, (int, long) ): time_end = utils.strftime(utils.timestamp_todate(deployment.time_end)) else: time_end = "None" return render_project_form( request=request, project_id=project_id, post_form=EditDeploymentForm(data=request.POST, instance=deployment), get_form=EditDeploymentForm( instance=deployment, initial={'time_start': utils.strftime( utils.timestamp_todate(deployment.time_start)), 'time_end': time_end}), template_path="project/edit-deployment.html", success_url="%s?new_element=True" % reverse( "project:edit-deployment", args=(project_id, deployment_id)))
def do_lsblocked(self, args): '''列出被封禁用户名单''' r = [] l = BlockedUser.all() for u in l: r.append(unicode('* %s (%s, %s)' % (u.jid, utils.strftime(u.add_date, timezone), u.reason) ) ) r.sort() n = len(r) r.insert(0, u'封禁列表:') r.append(u'共 %d 个 JID 被封禁。' % n) self.msg.reply(u'\n'.join(r).encode('utf-8'))
def do_old(self, args): '''聊天记录查询,可选一个数字参数。默认为最后20条。特殊参数 OFFLINE (不区分大小写)显示离线消息(最多 100 条)''' s = self.sender q = False if not args: q = Log.gql("WHERE type = 'chat' ORDER BY time DESC LIMIT 20") elif len(args) == 1: try: n = int(args[0]) if n > 0: q = Log.gql("WHERE type = 'chat' ORDER BY time DESC LIMIT %d" % n) except ValueError: if args[0].upper() == 'OFFLINE': q = Log.gql("WHERE time < :1 AND time > :2 AND type = 'chat' ORDER BY time DESC LIMIT 100", s.last_online_date, s.last_offline_date) else: pass if q is not False: r = [] q = list(q) q.reverse() if q: if datetime.datetime.today() - q[0].time > datetime.timedelta(hours=24): show_date = True else: show_date = False for l in q: message = '%s %s %s' % ( utils.strftime(l.time, timezone, show_date), s.nick_pattern % l.nick, l.msg ) r.append(message) if r: self.msg.reply(u'\n'.join(r).encode('utf-8')) else: self.msg.reply('没有符合的聊天记录。') else: self.msg.reply('Oops, 参数不正确哦。')
def get_end(self): if(self.time_end != None): return strftime(timestamp_todate(self.time_end)) else: return ""
def get_start(self): if(self.time_start != None): return strftime(timestamp_todate(self.time_start)) else: return ""
def save_inference_results(ign_on_time_list, data_list, path_list, output_dict, directory, args, key='trn'): assert key in ['trn', 'tst'] # we will save the whold input data & their corresponding # cluster assignment and log-likelihood results df_result = pd.DataFrame() # output_folder/vin/parameter###/nC=#_result/path_and_signals(key) directory2 = directory + 'path_and_signals({})/'.format(key) utils.maybe_exist(directory2) # get output from output_dict cluster_assignment_list = output_dict['cluster_assignment'] nll_vector_list = output_dict['nll_vector'] # adjust output vector lengths according to the window size cluster_assignment_list = \ utils.adjust_output_list(cluster_assignment_list, args.ws) nll_vector_list = \ utils.adjust_output_list(nll_vector_list, args.ws) print('> Save summary statistics per cluster ... ({})'.format(key), end='') _save_summary_statistics_per_cluster( data_list, cluster_assignment_list, directory, args) print('> Save segmentation results for all trips ... ({})'.format(key)) # we will iterate over these lists iterator = zip(ign_on_time_list, data_list, path_list, cluster_assignment_list, nll_vector_list) for counter, (ign_on_time, data_df, path_df, cluster_assignment, nll_vector) in enumerate(iterator): # trip key and figtitle prefix_string = directory2 + utils.strftime(ign_on_time) fig_title = 'ign_on_time: {}'.format(utils.strftime(ign_on_time)) # dataframe this_df = _concat_data(ign_on_time, data_df, path_df, cluster_assignment, nll_vector) # save this df this_data_path = prefix_string + '_inputData_and_clusterID.csv' if not os.path.exists(this_data_path): this_df.to_csv(this_data_path, index=None) df_result = df_result.append(this_df) # draw signal plot sig_path = prefix_string + '_signal.png' if not os.path.exists(sig_path): plot_clustering_result(data_df, cluster_assignment, nll_vector, figsize=(15, 10), title=fig_title, save_to=sig_path) # draw path plot xy_path = prefix_string + '_path.png' if not os.path.exists(xy_path): longitude = path_df.longitude.values latitude = path_df.latitude.values plot_path_by_segment(longitude, latitude, cluster_assignment=cluster_assignment, title=fig_title, save_to=xy_path, show=False) print('\r[{}] process: {} / {} ({:.1f}%)' .format(key, counter + 1, len(ign_on_time_list), (counter + 1) / len(ign_on_time_list) * 100), end='') print('') # SAVE ALL DATA print('> Save Input Data and its cluster assignment results ...') data_path = directory + key + "_inputData_and_clusterID.csv" if not os.path.exists(data_path): df_result.to_csv(data_path, index=None)