def summary_html(results, output_dir, template_dir): """ 生成自定義的聚合報告 :param results: 執行結果 :param output_dir: html輸出目錄 :param template_parent: jinja2模板所在目錄 """ env = jinja2.Environment( loader=jinja2.FileSystemLoader(template_dir), extensions=(), autoescape=True, ) owner_ch_name = Exe_Cfg.OWNERNAME_DICT template = env.get_template("summary_template.html", template_dir) script_count = len(results) True_count = len(py_.filter_(results, {'result': True})) False_count = len(py_.filter_(results, {'result': False})) html = template.render({ "results": results, 'project': owner_ch_name[exe_owner] }) report_html = "%s_%s_%s_test%s_pass%s_fail%s.html" % ( exe_owner, exe_time, exe_min, script_count, True_count, False_count) output_file = Path(output_dir, report_html) # 聚合報告路徑 with open(output_file, 'w', encoding="utf-8") as f: f.write(html) print(output_file)
def get_switch_alias_tower_id(xls_path, sheet_name, ftu_fault): book = xlrd.open_workbook(xls_path) startrowidx = 1 alist = [] ret = [] for sheet in book.sheets(): if sheet.name.lower() == sheet_name.lower(): sim_col = -1 id_col = -1 switch_alias_col = -1 line_py_col = -1 for i in range(100): if sheet.cell_value(0, i).lower() == 'sim': sim_col = i if sheet.cell_value(0, i).lower() == '_id': id_col = i if sheet.cell_value(0, i).lower() == 'switch_alias': switch_alias_col = i if sheet.cell_value(0, i).lower() == 'line_py': line_py_col = i if sim_col > -1 and id_col > -1 and switch_alias_col > -1 and line_py_col > -1: break for row in range(startrowidx, sheet.nrows): alist.append({ 'sim':str(int(sheet.cell_value(row, sim_col))), '_id':str(sheet.cell_value(row, id_col)), 'switch_alias': int(sheet.cell_value(row, switch_alias_col)), 'line_py': str(sheet.cell_value(row, line_py_col)), }) def filt(x): return _.find_index(ftu_fault, {'sim':x['sim']}) > -1 ret = _.filter_(alist, filt) def filt_gtz(x): return x['switch_alias'] > 0 ret = _.filter_(ret, filt_gtz) return ret
def create_xls(filepath, line_py, data): if os.path.exists(filepath): print('File exist, removing...') os.remove(filepath) wb = xlwt.Workbook() ws = wb.add_sheet('Sheet1') alist = _.filter_(data, lambda x:x['line_py'] == line_py) ftu_count = 0 if line_py == 'jfyk': ftu_count = 8 if line_py == 'pzz': ftu_count = 10 for i in range(1, ftu_count+1): if _.find_index(alist, {'switch_alias':i}) > -1: ws.write(0, i - 1, 1) else: ws.write(0, i - 1, 0) wb.save(filepath)
def _try_place(acc: MatrixInt, desired_length: int) -> MatrixInt: available_cells = get_available_cells(acc, FIELD_DIMENSIONS) segments = find_straight_segments(available_cells, True) + \ find_straight_segments(available_cells, False) available_segments = py_.filter_( segments, lambda segment: len(segment) >= desired_length) chosen_segment = choice(available_segments) len_diff = len(chosen_segment) - desired_length available_subsegments = py_.map_( list(range(len_diff + 1)), lambda x: chosen_segment[x:x + desired_length]) chosen_subsegment = choice(available_subsegments) new_field = py_.clone_deep(acc) for c in chosen_subsegment: new_field[c.i][c.j] = CellState.CELL_DECK.value return new_field
from pydash import py_ from directory_utilities import get_json_from_file, write_json_to_file trades_file_directory = "../database/trades.json" trades = get_json_from_file(trades_file_directory) archived_trades_file_directory = "../database/archive/archived-trades.json" archived_trades = get_json_from_file(archived_trades_file_directory, []) new_archived_trades = py_.filter_( trades["trades"], lambda trade: "sell" in trade and trade not in archived_trades) if len(new_archived_trades) > 0: archived_trades += new_archived_trades write_json_to_file(archived_trades_file_directory, archived_trades) print("Archived {} closed trades.".format(len(new_archived_trades))) else: print("No closed trades to archive.") num_of_initial_trades = len(trades["trades"]) trades["trades"] = py_.filter_(trades["trades"], lambda trade: "sell" not in trade) if len(trades["trades"]) < num_of_initial_trades: write_json_to_file(trades_file_directory, trades) print("Removed {} closed trades from active trades.".format( num_of_initial_trades - len(trades["trades"]))) else: print("No closed trades to remove from active trades.")
from pydash import py_ from directory_utilities import get_json_from_file, write_json_to_file trades_file_directory = "../database/trades.json" trades = get_json_from_file(trades_file_directory) archived_trades_file_directory = "../database/archive/archived-trades.json" archived_trades = get_json_from_file(archived_trades_file_directory, []) new_archived_trades = py_.filter_(trades["trades"], lambda trade: "sell" in trade and trade not in archived_trades) if len(new_archived_trades) > 0: archived_trades += new_archived_trades write_json_to_file(archived_trades_file_directory, archived_trades) print("Archived {} closed trades.".format(len(new_archived_trades))) else: print("No closed trades to archive.") num_of_initial_trades = len(trades["trades"]) trades["trades"] = py_.filter_(trades["trades"], lambda trade: "sell" not in trade) if len(trades["trades"]) < num_of_initial_trades: write_json_to_file(trades_file_directory, trades) print("Removed {} closed trades from active trades.".format(num_of_initial_trades - len(trades["trades"]))) else: print("No closed trades to remove from active trades.")
from pydash import py_ from directory_utilities import get_json_from_file archived_trades_file_directory = "../database/archive/archived-trades.json" archived_trades = get_json_from_file(archived_trades_file_directory, []) completed_archived_trades = py_.filter_( archived_trades, lambda trade: trade["sell"]["dateClosed"] is not None) profit_btc = round( py_.sum_by( completed_archived_trades, lambda trade: (trade["sell"]["price"] - trade["buy"]["price"] - trade["buy"][ "commissionPaid"] - trade["sell"]["commissionPaid"])), 8) avg_profit_btc = round(profit_btc / len(completed_archived_trades), 8) total_buy_btc = py_.sum_by( completed_archived_trades, lambda trade: (trade["buy"]["price"] + trade[ "buy"]["commissionPaid"] + trade["sell"]["commissionPaid"])) total_sell_btc = py_.sum_by(completed_archived_trades, lambda trade: trade["sell"]["price"]) profit_margin = round(100 * total_sell_btc / total_buy_btc - 100, 2) print("Total completed trades: {}\n".format(len(completed_archived_trades))) print("Total profit: {} BTC".format(profit_btc)) print("Average profit: {} BTC per trade\n".format(avg_profit_btc)) print("Total profit margin: {}%".format(profit_margin))
from pydash import py_ from directory_utilities import get_json_from_file archived_trades_file_directory = "../database/archive/archived-trades.json" archived_trades = get_json_from_file(archived_trades_file_directory, []) completed_archived_trades = py_.filter_(archived_trades, lambda trade: trade["sell"]["dateClosed"] is not None) profit_btc = round( py_.sum_by( completed_archived_trades, lambda trade: ( trade["sell"]["price"] - trade["buy"]["price"] - trade["buy"]["commissionPaid"] - trade["sell"]["commissionPaid"] ) ), 8 ) avg_profit_btc = round(profit_btc / len(completed_archived_trades), 8) total_buy_btc = py_.sum_by( completed_archived_trades, lambda trade: ( trade["buy"]["price"] + trade["buy"]["commissionPaid"] + trade["sell"]["commissionPaid"] )