def load(cls, db_path, db_type): """ Creates Predictor by loading from a database :param db_path see load_from_db :param db_type see load_from_db """ meta = load_from_db(db_path, db_type) init_net = GetNet(meta, predictor_constants.PREDICT_INIT_NET_TYPE) net = prepare_prediction_net(db_path, db_type) parameters = GetBlobs(meta, predictor_constants.PARAMETERS_BLOB_TYPE) return cls(net, parameters, init_net)
def load(cls, db_path, db_type, int_features=False): """ Creates Predictor by loading from a database :param db_path see load_from_db :param db_type see load_from_db :param int_features bool indicating if int_features are present """ net = prepare_prediction_net(db_path, db_type) meta = load_from_db(db_path, db_type) parameters = GetBlobs(meta, predictor_constants.PARAMETERS_BLOB_TYPE) return cls(net, parameters, int_features)
def load(cls, db_path, db_type, int_features=False): """ Creates Predictor by loading from a database :param db_path see load_from_db :param db_type see load_from_db :param int_features bool indicating if int_features are present """ meta = load_from_db(db_path, db_type) init_net = GetNet(meta, predictor_constants.PREDICT_INIT_NET_TYPE) net = prepare_prediction_net(db_path, db_type) parameters = GetBlobs(meta, predictor_constants.PARAMETERS_BLOB_TYPE) return cls(net, parameters, init_net, int_features)
def plan( cls, net, simulator_parameters: PolicyEvaluatorParameters, db_type: str, ) -> str: for name, value in simulator_parameters.global_value_inputs: workspace.FeedBlob(name, value) for model in simulator_parameters.value_input_models: if model.path is not None and len(model.path) > 0: model_net = prepare_prediction_net(model.path, db_type) # By default, inputs are not remapped, so let's force a remapping meta = load_from_db(model.path, db_type) model_net_inputs = GetBlobs( meta, predictor_constants.INPUTS_BLOB_TYPE ) remap = {} for inp in model_net_inputs: remap[inp] = '{}_{}'.format(model.name, inp) logger.debug("REMAP VALUE MODEL: {}".format(remap)) model_net, _ = core.clone_and_bind_net( model_net, 'new_{}'.format(model.name, ), '{}_'.format(model.name), blob_remap=remap, ) net.AppendNet(model_net) policy_net = prepare_prediction_net( simulator_parameters.propensity_net_path, db_type ) meta = load_from_db(simulator_parameters.propensity_net_path, db_type) policy_net_outputs = GetBlobs( meta, predictor_constants.OUTPUTS_BLOB_TYPE ) assert policy_net_outputs == ['ActionProbabilities'] net.AppendNet(policy_net) return 'ActionProbabilities'
def LoadModel(path): meta_net_def = pred_exp.load_from_db(path, 'minidb') init_net = core.Net( pred_utils.GetNet(meta_net_def, predictor_constants.GLOBAL_INIT_NET_TYPE)) predict_init_net = core.Net( pred_utils.GetNet(meta_net_def, predictor_constants.PREDICT_INIT_NET_TYPE)) predict_init_net.RunAllOnGPU() init_net.RunAllOnGPU() assert workspace.RunNetOnce(predict_init_net) assert workspace.RunNetOnce(init_net)
def LoadModel(path, model): ''' Load pretrained model from file ''' log.info("Loading path: {}".format(path)) meta_net_def = pred_exp.load_from_db(path, 'minidb') init_net = core.Net(pred_utils.GetNet( meta_net_def, predictor_constants.GLOBAL_INIT_NET_TYPE)) predict_init_net = core.Net(pred_utils.GetNet( meta_net_def, predictor_constants.PREDICT_INIT_NET_TYPE)) predict_init_net.RunAllOnGPU() init_net.RunAllOnGPU() assert workspace.RunNetOnce(predict_init_net) assert workspace.RunNetOnce(init_net)
def load(cls, db_path, db_type): """ Creates DiscreteActionPredictor by loading from a database :param db_path see load_from_db :param db_type see load_from_db """ previous_workspace = workspace.CurrentWorkspace() workspace_id = str(uuid.uuid4()) workspace.SwitchWorkspace(workspace_id, True) net = prepare_prediction_net(db_path, db_type) meta = load_from_db(db_path, db_type) inputs = GetBlobs(meta, predictor_constants.INPUTS_BLOB_TYPE) outputs = GetBlobs(meta, predictor_constants.OUTPUTS_BLOB_TYPE) parameters = GetBlobs(meta, predictor_constants.PARAMETERS_BLOB_TYPE) workspace.SwitchWorkspace(previous_workspace) return cls(net, inputs, outputs, parameters, workspace_id)
def LoadModel(path, model): log.info("Loading path: {}".format(path)) meta_net_def = pred_exp.load_from_db(path, 'minidb') init_net = core.Net( pred_utils.GetNet(meta_net_def, predictor_constants.GLOBAL_INIT_NET_TYPE)) predict_init_net = core.Net( pred_utils.GetNet(meta_net_def, predictor_constants.PREDICT_INIT_NET_TYPE)) predict_init_net.RunAllOnGPU() init_net.RunAllOnGPU() itercnt = workspace.FetchBlob("optimizer_iteration") workspace.FeedBlob("optimizer_iteration", itercnt, device_option=core.DeviceOption(caffe2_pb2.CPU, 0))
def getTrainingGPUs(path, dbtype): ''' well, turns out that SaveModel savs the vars with the gpu_X/ prefix... this function returns the GPUs used during training. ''' meta_net_def = pred_exp.load_from_db(path, dbtype) gpus = set() def is_number(s): try: float(s) return True except ValueError: return False for kv in meta_net_def.nets: net = kv.value for op in net.op: if op.input and op.output: thisgpu = op.input[-1].split('/')[0].split('_')[-1] if is_number(thisgpu): gpus.add(thisgpu) return gpus
def test_load_device_scope(self): for param, value in self.params.items(): workspace.FeedBlob(param, value) pem = pe.PredictorExportMeta( predict_net=self.predictor_export_meta.predict_net, parameters=self.predictor_export_meta.parameters, inputs=self.predictor_export_meta.inputs, outputs=self.predictor_export_meta.outputs, shapes=self.predictor_export_meta.shapes, net_type='dag', ) db_type = 'minidb' db_file = tempfile.NamedTemporaryFile(delete=False, suffix=".{}".format(db_type)) pe.save_to_db(db_type=db_type, db_destination=db_file.name, predictor_export_meta=pem) workspace.ResetWorkspace() with core.DeviceScope(core.DeviceOption(caffe2_pb2.CPU, 1)): meta_net_def = pe.load_from_db( db_type=db_type, filename=db_file.name, ) init_net = core.Net( pred_utils.GetNet(meta_net_def, pc.GLOBAL_INIT_NET_TYPE)) predict_init_net = core.Net( pred_utils.GetNet(meta_net_def, pc.PREDICT_INIT_NET_TYPE)) # check device options for op in list(init_net.Proto().op) + list( predict_init_net.Proto().op): self.assertEqual(1, op.device_option.device_id) self.assertEqual(caffe2_pb2.CPU, op.device_option.device_type)
def test_load_device_scope(self): for param, value in self.params.items(): workspace.FeedBlob(param, value) pem = pe.PredictorExportMeta( predict_net=self.predictor_export_meta.predict_net, parameters=self.predictor_export_meta.parameters, inputs=self.predictor_export_meta.inputs, outputs=self.predictor_export_meta.outputs, shapes=self.predictor_export_meta.shapes, net_type='dag', ) db_type = 'minidb' db_file = tempfile.NamedTemporaryFile( delete=False, suffix=".{}".format(db_type)) pe.save_to_db( db_type=db_type, db_destination=db_file.name, predictor_export_meta=pem) workspace.ResetWorkspace() with core.DeviceScope(core.DeviceOption(caffe2_pb2.CPU, 1)): meta_net_def = pe.load_from_db( db_type=db_type, filename=db_file.name, ) init_net = core.Net(pred_utils.GetNet(meta_net_def, pc.GLOBAL_INIT_NET_TYPE)) predict_init_net = core.Net(pred_utils.GetNet( meta_net_def, pc.PREDICT_INIT_NET_TYPE)) # check device options for op in list(init_net.Proto().op) + list(predict_init_net.Proto().op): self.assertEqual(1, op.device_option.cuda_gpu_id) self.assertEqual(caffe2_pb2.CPU, op.device_option.device_type)
def LoadModel(path, model): ''' Load pretrained model from file ''' log.info("Loading path: {}".format(path)) meta_net_def = pred_exp.load_from_db(path, 'minidb') init_net = core.Net(pred_utils.GetNet( meta_net_def, predictor_constants.GLOBAL_INIT_NET_TYPE)) predict_init_net = core.Net(pred_utils.GetNet( meta_net_def, predictor_constants.PREDICT_INIT_NET_TYPE)) predict_init_net.RunAllOnGPU() init_net.RunAllOnGPU() assert workspace.RunNetOnce(predict_init_net) assert workspace.RunNetOnce(init_net) # Hack: fix iteration counter which is in CUDA context after load model itercnt = workspace.FetchBlob("optimizer_iteration") workspace.FeedBlob( "optimizer_iteration", itercnt, device_option=core.DeviceOption(caffe2_pb2.CPU, 0) )
def LoadModel(path, model): ''' Load pretrained model from file ''' log.info("Loading path: {}".format(path)) meta_net_def = pred_exp.load_from_db(path, 'minidb') init_net = core.Net(pred_utils.GetNet( meta_net_def, predictor_constants.GLOBAL_INIT_NET_TYPE)) predict_init_net = core.Net(pred_utils.GetNet( meta_net_def, predictor_constants.PREDICT_INIT_NET_TYPE)) predict_init_net.RunAllOnGPU() init_net.RunAllOnGPU() assert workspace.RunNetOnce(predict_init_net) assert workspace.RunNetOnce(init_net) # Hack: fix iteration counter which is in CUDA context after load model itercnt = workspace.FetchBlob("optimizer_iteration") workspace.FeedBlob( "optimizer_iteration", itercnt, device_option=core.DeviceOption(caffe2_pb2.CPU, 0) )
def test_meta_net_def_net_runs(self): for param, value in viewitems(self.params): workspace.FeedBlob(param, value) extra_init_net = core.Net('extra_init') extra_init_net.ConstantFill('data', 'data', value=1.0) pem = pe.PredictorExportMeta( predict_net=self.predictor_export_meta.predict_net, parameters=self.predictor_export_meta.parameters, inputs=self.predictor_export_meta.inputs, outputs=self.predictor_export_meta.outputs, shapes=self.predictor_export_meta.shapes, extra_init_net=extra_init_net, net_type='dag', ) db_type = 'minidb' db_file = tempfile.NamedTemporaryFile(delete=False, suffix=".{}".format(db_type)) pe.save_to_db(db_type=db_type, db_destination=db_file.name, predictor_export_meta=pem) workspace.ResetWorkspace() meta_net_def = pe.load_from_db( db_type=db_type, filename=db_file.name, ) self.assertTrue("data" not in workspace.Blobs()) self.assertTrue("y" not in workspace.Blobs()) init_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_INIT_NET_TYPE) # 0-fills externalblobs blobs and runs extra_init_net workspace.RunNetOnce(init_net) self.assertTrue("data" in workspace.Blobs()) self.assertTrue("y" in workspace.Blobs()) print(workspace.FetchBlob("data")) np.testing.assert_array_equal(workspace.FetchBlob("data"), np.ones(shape=(1, 5))) np.testing.assert_array_equal(workspace.FetchBlob("y"), np.zeros(shape=(1, 10))) # Load parameters from DB global_init_net = pred_utils.GetNet(meta_net_def, pc.GLOBAL_INIT_NET_TYPE) workspace.RunNetOnce(global_init_net) # Run the net with a reshaped input and verify we are # producing good numbers (with our custom implementation) workspace.FeedBlob("data", np.random.randn(2, 5).astype(np.float32)) predict_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_NET_TYPE) self.assertEqual(predict_net.type, 'dag') workspace.RunNetOnce(predict_net) np.testing.assert_array_almost_equal( workspace.FetchBlob("y"), workspace.FetchBlob("data").dot(self.params["y_w"].T) + self.params["y_b"])
def test_meta_net_def_net_runs(self): for param, value in viewitems(self.params): workspace.FeedBlob(param, value) extra_init_net = core.Net('extra_init') extra_init_net.ConstantFill('data', 'data', value=1.0) pem = pe.PredictorExportMeta( predict_net=self.predictor_export_meta.predict_net, parameters=self.predictor_export_meta.parameters, inputs=self.predictor_export_meta.inputs, outputs=self.predictor_export_meta.outputs, shapes=self.predictor_export_meta.shapes, extra_init_net=extra_init_net, net_type='dag', ) db_type = 'minidb' db_file = tempfile.NamedTemporaryFile( delete=False, suffix=".{}".format(db_type)) pe.save_to_db( db_type=db_type, db_destination=db_file.name, predictor_export_meta=pem) workspace.ResetWorkspace() meta_net_def = pe.load_from_db( db_type=db_type, filename=db_file.name, ) self.assertTrue("data" not in workspace.Blobs()) self.assertTrue("y" not in workspace.Blobs()) init_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_INIT_NET_TYPE) # 0-fills externalblobs blobs and runs extra_init_net workspace.RunNetOnce(init_net) self.assertTrue("data" in workspace.Blobs()) self.assertTrue("y" in workspace.Blobs()) print(workspace.FetchBlob("data")) np.testing.assert_array_equal( workspace.FetchBlob("data"), np.ones(shape=(1, 5))) np.testing.assert_array_equal( workspace.FetchBlob("y"), np.zeros(shape=(1, 10))) # Load parameters from DB global_init_net = pred_utils.GetNet(meta_net_def, pc.GLOBAL_INIT_NET_TYPE) workspace.RunNetOnce(global_init_net) # Run the net with a reshaped input and verify we are # producing good numbers (with our custom implementation) workspace.FeedBlob("data", np.random.randn(2, 5).astype(np.float32)) predict_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_NET_TYPE) self.assertEqual(predict_net.type, 'dag') workspace.RunNetOnce(predict_net) np.testing.assert_array_almost_equal( workspace.FetchBlob("y"), workspace.FetchBlob("data").dot(self.params["y_w"].T) + self.params["y_b"])