def get_test_lines(test_case, test_params): """ Create a list of strings corresponding to the lines in a single test case. Uses TensorFlow to compute the expected results for the given parameters, and provides the code to call the test fixture to run the test. """ output, max_input_val = get_result_and_size(test_case, test_params) camel_case_type = helpers.to_camel_case(test_case.test_type) test_case_name = TEST_CASE_TPL.format(test_type=camel_case_type, window=test_case.window, stride=test_case.stride, direction=helpers.to_camel_case( test_case.direction)) test_name = TEST_NAME_TPL.format(padding=test_params.padding, in_s=test_params.in_shape) in_shape_init = IN_SHAPE_INIT_TPL.format(test_params.in_shape) test_lines = [ "TYPED_TEST({}, {}) {{".format(test_case_name, test_name), " using DataType = typename TestFixture::DataType;", " const std::vector<DataType> exp_out = {};".format( helpers.format_tensor(output)), " const std::array<int, 4> in_shape = {};".format(in_shape_init), " const auto padding = PaddingMode::{};".format(test_params.padding), " const auto params = getPoolingParams<{}, {}>(in_shape, padding);". format(test_case.window, test_case.stride), " const DataType max_input_val = {:.1f};".format(max_input_val), " this->test_pool(exp_out, params, max_input_val);", "}", ] return test_lines
def output_for_test_case(test_case): """ Create a list of strings corresponding to separate lines in the full test case. The output contains headers, includes, setup and all the tests for the test case. """ scriptname = os.path.basename(__file__) camel_case_type = helpers.to_camel_case(test_case.test_type) test_case_name = TEST_CASE_TPL.format( test_type=camel_case_type, direction=helpers.to_camel_case(test_case.direction), operation=helpers.to_camel_case(test_case.operation)) output = [ helpers.get_license(), helpers.get_dont_modify_comment(scriptname=scriptname), INCLUDES, TYPED_TEST_CASE_DECL_TPL.format( test_case=test_case_name, direction=DIRECTION_MAP[test_case.direction], operation=OPERATION_MAP[test_case.operation]), ] for test_params in test_params_for_test_case(test_case): output.extend(get_test_lines(test_case, test_params)) output.append("\n") return output
def get_test_lines(test_case, test_params): """ Create a list of strings corresponding to the lines in a single test case. Uses TensorFlow to compute the expected results for the given parameters, and provides the code to call the test fixture to run the test. """ if test_case.direction == 'forward': output, mean, variance, max_input_val = get_result( test_case, test_params) else: output, mean, variance, max_input_val, grad_scale, grad_offset = get_result( test_case, test_params) camel_case_type = helpers.to_camel_case(test_case.test_type) test_case_name = TEST_CASE_TPL.format( test_type=camel_case_type, direction=helpers.to_camel_case(test_case.direction), operation=helpers.to_camel_case(test_case.operation)) test_name = TEST_NAME_TPL.format(in_s=test_params.in_shape) in_shape_init = IN_SHAPE_INIT_TPL.format(test_params.in_shape) if test_case.direction == 'forward': test_lines = [ "TYPED_TEST({}, {}) {{".format(test_case_name, test_name), " using DataType = typename TestFixture::DataType;", " const std::vector<DataType> exp_out = {};".format( helpers.format_tensor(output)), " const std::vector<DataType> mean = {};".format( helpers.format_tensor(mean)), " const std::vector<DataType> variance = {};".format( helpers.format_tensor(variance)), " const std::array<int, 4> in_shape = {};".format(in_shape_init), " const auto params = getBatchNormParams(in_shape, DataFormat::{});" .format(test_params.data_format), " const DataType max_input_val = {:.1f};".format(max_input_val), " this->test_batchnorm(exp_out, mean, variance, params, max_input_val);", "}", ] return test_lines else: test_lines = [ "TYPED_TEST({}, {}) {{".format(test_case_name, test_name), " using DataType = typename TestFixture::DataType;", " const std::vector<DataType> exp_grad = {};".format( helpers.format_tensor(output)), " const std::vector<DataType> mean = {};".format( helpers.format_tensor(mean)), " const std::vector<DataType> variance = {};".format( helpers.format_tensor(variance)), " const std::vector<DataType> grad_scale = {};".format( helpers.format_tensor(grad_scale)), " const std::vector<DataType> grad_offset = {};".format( helpers.format_tensor(grad_offset)), " const std::array<int, 4> in_shape = {};".format(in_shape_init), " const auto params = getBatchNormParams(in_shape, DataFormat::{});" .format(test_params.data_format), " const DataType max_input_val = {:.1f};".format(max_input_val), " this->test_batchnorm(exp_grad, mean, variance, grad_scale, grad_offset, params, max_input_val);", "}", ] return test_lines
def logger_from_dict(config, use_visdom=True): """ Create a logger from a dictionary :param config: Dictionary containing the experiment hyper-parameters :param use_visdom: Wether to use Visdom :return: Logger """ experiment_name = datetime.datetime.now().strftime("%Y-%m-%d-") ordered_config = collections.OrderedDict(sorted(config.items())) for key, value in ordered_config.items(): key = helpers.to_camel_case(key.replace(' ', '_')) if value is str: value = helpers.to_camel_case(value.replace(' ', '_')) if key == 'Transforms' or key == 'trainType': continue if value is not None and key is not None: experiment_name += "{}_{}-".format(key, value) # Delete last dash experiment_name = experiment_name[:-1] # Create logger log = logger.Experiment(name=experiment_name, use_visdom=use_visdom, visdom_opts={ 'server': 'http://localhost', 'port': 8097 }, time_indexing=False, xlabel='Epoch') log.log_config(config) # create parent metric for training metrics (easier interface) log.ParentWrapper( tag='train', name='parent', children=[log.AvgMetric(name='loss'), log.AvgMetric(name='acc')]) # same for validation metrics (note all children inherit tag from parent) log.ParentWrapper( tag='val', name='parent', children=[log.AvgMetric(name='loss'), log.AvgMetric(name='acc')]) # Add a best metric for the validation accuracy log.ParentWrapper(tag='best', name='parent', children=[log.BestMetric(name='acc')]) return log
def get_test_lines(test_case, test_params): """ Create a list of strings corresponding to the lines in a single test case. Uses TensorFlow to compute the expected results for the given parameters, and provides the code to call the test fixture to run the test. """ output = get_result(test_case, test_params) camel_case_type = helpers.to_camel_case(test_case.test_type) test_case_name = TEST_CASE_TPL.format(test_type=camel_case_type, direction=helpers.to_camel_case( test_case.direction)) test_name = TEST_NAME_TPL.format(in_s=test_params.in_size) test_lines = [ "TYPED_TEST({}, {}) {{".format(test_case_name, test_name), " using DataType = typename TestFixture::DataType;", " const std::vector<DataType> exp_out = {};".format( helpers.format_tensor(output)), " this->test_pointwise(exp_out);", "}", ] return test_lines
def output_for_test_case(test_case): """ Create a list of strings corresponding to separate lines in the full test case. The output contains headers, includes, setup and all the tests for the test case. """ camel_case_type = helpers.to_camel_case(test_case.test_type) test_case_name = TEST_CASE_TPL.format(test_type=camel_case_type, window=test_case.window, stride=test_case.stride, direction=helpers.to_camel_case( test_case.direction)) output = [ TYPED_TEST_SUITE_DECL_TPL.format( test_case=test_case_name, operation=OPERATOR_MAP[test_case.test_type], direction=DIRECTION_MAP[test_case.direction]) ] for test_params in test_case.param_gen(test_case): output.extend(get_test_lines(test_case, test_params)) output.append("\n") return output
def output_for_test_case(test_case): """ Create a list of strings corresponding to separate lines in the full test case. The output contains headers, includes, setup and all the tests for the test case. """ camel_case_type = helpers.to_camel_case(test_case.test_type) test_case_name = TEST_CASE_TPL.format(test_type=camel_case_type) output = [ TYPED_TEST_SUITE_DECL_TPL.format( test_case=test_case_name) ] for test_params in test_case.param_gen(test_case): output.extend(get_test_lines(test_case, test_params)) output.append("\n") return output
def output_for_test_case(test_case): """ Create a list of strings corresponding to separate lines in the full test case. The output contains headers, includes, setup and all the tests for the test case. """ scriptname = os.path.basename(__file__) camel_case_type = helpers.to_camel_case(test_case.test_type) test_case_name = TEST_CASE_TPL.format(test_type=camel_case_type, window=test_case.window, stride=test_case.stride) output = [ helpers.get_license(), helpers.get_dont_modify_comment(scriptname=scriptname), INCLUDES, DATA_TYPES, TYPED_TEST_SUITE_DECL_TPL.format(test_case=test_case_name, window=test_case.window, stride=test_case.stride) ] for test_params in test_params_for_test_case(test_case): output.extend(get_test_lines(test_case, test_params)) return output