def test_drawconfig(self): lib.load_config(self.mockframe, filepath="./test_files/test_schedule_1_mock_good.xlsx") i = 0 for i in range(len(self.mockframe.schedule)): tempFile = self.mockframe.grid["grid"][i][0].get() self.assertEqual( Path(self.mockframe.schedule.iloc[i, 0]).name, tempFile) tempDateTime = pd.Timestamp( self.mockframe.grid["grid"][i][1].get()) self.assertEqual(self.mockframe.schedule.iloc[i, 1], tempDateTime) # Test that the other lines contain ---- for index in range(i + 1, 10): temp = self.mockframe.grid["grid"][index][0].get() self.assertEqual(temp, " ".join(["-"] * 20))
:return: None """ self.running = False if __name__ == '__main__': # parse arguments and load config parser = argparse.ArgumentParser() parser.add_argument('--config', help='configuration yaml', default=None) parser.add_argument('--console', dest='console', help='run in console', action='store_true') parser.add_argument('--profile', dest='profile', help='enable profiler', action='store_true') parser.add_argument('--repair', dest='repair', help='init repair', action='store_true') cmd_args = parser.parse_args() Main.set_config( load_config(cmd_args.config) ) from sqlite3_helper import check_and_repair if cmd_args.console: # command line start if cmd_args.profile: # start with profiling import cProfile import io import pstats pr = cProfile.Profile(builtins=False) pr.enable() Main() pr.disable()
from .generate_anchors import generate_anchors from lib import load_config import numpy as np from .anchor_nms_pf import anchor_nms cfg = load_config() def proposal_layer(rpn_cls_prob_reshape, rpn_bbox_pred, im_info, _feat_stride=(16, )): """ 'rpn_cls_prob_reshape': softmax以后的概率值,形状为(1, H, W, Ax2) 'rpn_bbox_pred': 回归,即y和高度,形状是[1, H, W, 20], 'im_info': 图片信息,一个三维向量,包含高,宽,缩放比例 cfg_key: 字符串, "TEST" _feat_stride = [16,] anchor_scales = [16,] cfg_key = 'TEST' Returns ---------- rpn_rois : (1 x H x W x A, 5) e.g. [0, x1, y1, x2, y2] """ _anchors = generate_anchors() # 生成基本的10个anchor _num_anchors = _anchors.shape[0] # 10个anchor assert rpn_cls_prob_reshape.shape[0] == 1, \ 'Only single item batches are supported' nms_thresh = cfg.TEST.RPN_NMS_THRESH # nms用参数,阈值是0.7
parser.add_argument('--config', help='configuration yaml', default=None) parser.add_argument('--console', dest='console', help='run in console', action='store_true') parser.add_argument('--profile', dest='profile', help='enable profiler', action='store_true') parser.add_argument('--repair', dest='repair', help='init repair', action='store_true') cmd_args = parser.parse_args() Main.set_config(load_config(cmd_args.config)) from sqlite3_helper import check_and_repair if cmd_args.console: # command line start if cmd_args.profile: # start with profiling import cProfile import io import pstats pr = cProfile.Profile(builtins=False) pr.enable() Main() pr.disable() s = io.StringIO()
import argparse from lib import load_config from lib.aggregate import AggMetadata import lib.aggregates result = dict() parser = argparse.ArgumentParser() parser.add_argument('--config', '--config', help='configuration yaml', default=None) parser.add_argument('format', help='output format [text (default)|json]') cmd_args = parser.parse_args() configuration = load_config(cmd_args.config) # load global metadata metadata = AggMetadata(database_dir=configuration.database_dir) result['last_sync'] = metadata.last_sync() # fetch aggregators result['aggregators'] = dict() for agg_class in lib.aggregates.get_aggregators(): result['aggregators'][agg_class.__name__] = {'resolutions': list()} for resolution in agg_class.resolutions(): result['aggregators'][agg_class.__name__]['resolutions'].append( resolution) # output result if cmd_args.format.find('json') > -1: # json format
""" import ujson import datetime import argparse from lib import load_config from lib.aggregate import AggMetadata import lib.aggregates result = dict() parser = argparse.ArgumentParser() parser.add_argument('--config', '--config', help='configuration yaml', default=None) parser.add_argument('format', help='output format [text (default)|json]') cmd_args = parser.parse_args() configuration = load_config(cmd_args.config) # load global metadata metadata = AggMetadata(database_dir=configuration.database_dir) result['last_sync'] = metadata.last_sync() # fetch aggregators result['aggregators'] = dict() for agg_class in lib.aggregates.get_aggregators(): result['aggregators'][agg_class.__name__] = {'resolutions': list()} for resolution in agg_class.resolutions(): result['aggregators'][agg_class.__name__]['resolutions'].append(resolution) # output result if cmd_args.format.find('json') > -1: # json format print(ujson.dumps(result))
def test_loadconfig(self): lib.load_config(self.mockframe, filepath="./test_files/test_schedule_1_mock_good.xlsx") assert_frame_equal(self.mockframe.schedule, self.goodConfig) self.assertEqual(self.mockframe.credentials, self.goodCredentials)