def load_model(location): """ Load any Turi Create model that was previously saved. This function assumes the model (can be any model) was previously saved in Turi Create model format with model.save(filename). Parameters ---------- location : string Location of the model to load. Can be a local path or a remote URL. Because models are saved as directories, there is no file extension. Examples ---------- >>> model.save('my_model_file') >>> loaded_model = tc.load_model('my_model_file') """ # Check if the location is a dir_archive, if not, use glunpickler to load # as pure python model # If the location is a http location, skip the check, and directly proceed # to load model as dir_archive. This is because # 1) exists() does not work with http protocol, and # 2) GLUnpickler does not support http protocol = file_util.get_protocol(location) dir_archive_exists = False if protocol == "": model_path = file_util.expand_full_path(location) dir_archive_exists = file_util.exists( os.path.join(model_path, "dir_archive.ini")) else: model_path = location if protocol in ["http", "https", "s3"]: dir_archive_exists = True else: import posixpath dir_archive_exists = file_util.exists( posixpath.join(model_path, "dir_archive.ini")) if not dir_archive_exists: raise IOError("Directory %s does not exist" % location) _internal_url = _make_internal_url(location) saved_state = glconnect.get_unity().load_model(_internal_url) saved_state = _wrap_function_return(saved_state) # The archive version could be both bytes/unicode key = u"archive_version" archive_version = (saved_state[key] if key in saved_state else saved_state[key.encode()]) if archive_version < 0: raise ToolkitError("File does not appear to be a Turi Create model.") elif archive_version > 1: raise ToolkitError( "Unable to load model.\n\n" "This model looks to have been saved with a future version of Turi Create.\n" "Please upgrade Turi Create before attempting to load this model file." ) elif archive_version == 1: name = saved_state["model_name"] if name in MODEL_NAME_MAP: cls = MODEL_NAME_MAP[name] if "model" in saved_state: if name in [ "activity_classifier", "object_detector", "style_transfer", "drawing_classifier", ]: import turicreate.toolkits.libtctensorflow # this is a native model return cls(saved_state["model"]) else: # this is a CustomModel model_data = saved_state["side_data"] model_version = model_data["model_version"] del model_data["model_version"] if name == "activity_classifier": import turicreate.toolkits.libtctensorflow model = _extensions.activity_classifier() model.import_from_custom_model(model_data, model_version) return cls(model) if name == "object_detector": import turicreate.toolkits.libtctensorflow model = _extensions.object_detector() model.import_from_custom_model(model_data, model_version) return cls(model) if name == "style_transfer": import turicreate.toolkits.libtctensorflow model = _extensions.style_transfer() model.import_from_custom_model(model_data, model_version) return cls(model) if name == "drawing_classifier": import turicreate.toolkits.libtctensorflow model = _extensions.drawing_classifier() model.import_from_custom_model(model_data, model_version) return cls(model) if name == "one_shot_object_detector": import turicreate.toolkits.libtctensorflow od_cls = MODEL_NAME_MAP["object_detector"] if "detector_model" in model_data["detector"]: model_data["detector"] = od_cls( model_data["detector"]["detector_model"]) else: model = _extensions.object_detector() model.import_from_custom_model( model_data["detector"], model_data["_detector_version"]) model_data["detector"] = od_cls(model) return cls(model_data) return cls._load_version(model_data, model_version) elif hasattr(_extensions, name): return saved_state["model"] else: raise ToolkitError( "Unable to load model of name '%s'; model name not registered." % name) else: # very legacy model format. Attempt pickle loading import sys sys.stderr.write( "This model was saved in a legacy model format. Compatibility cannot be guaranteed in future versions.\n" ) if _six.PY3: raise ToolkitError( "Unable to load legacy model in Python 3.\n\n" "To migrate a model, try loading it using Turi Create 4.0 or\n" "later in Python 2 and then re-save it. The re-saved model should\n" "work in Python 3.") if "graphlab" not in sys.modules: sys.modules["graphlab"] = sys.modules["turicreate"] # backward compatibility. Otherwise old pickles will not load sys.modules["turicreate_util"] = sys.modules["turicreate.util"] sys.modules["graphlab_util"] = sys.modules["turicreate.util"] # More backwards compatibility with the turicreate namespace code. for k, v in list(sys.modules.items()): if "turicreate" in k: sys.modules[k.replace("turicreate", "graphlab")] = v # legacy loader import pickle model_wrapper = pickle.loads(saved_state[b"model_wrapper"]) return model_wrapper(saved_state[b"model_base"])
def load_model(location): """ Load any Turi Create model that was previously saved. This function assumes the model (can be any model) was previously saved in Turi Create model format with model.save(filename). Parameters ---------- location : string Location of the model to load. Can be a local path or a remote URL. Because models are saved as directories, there is no file extension. Examples ---------- >>> model.save('my_model_file') >>> loaded_model = tc.load_model('my_model_file') """ # Check if the location is a dir_archive, if not, use glunpickler to load # as pure python model # If the location is a http location, skip the check, and directly proceed # to load model as dir_archive. This is because # 1) exists() does not work with http protocol, and # 2) GLUnpickler does not support http protocol = file_util.get_protocol(location) dir_archive_exists = False if protocol == '': model_path = file_util.expand_full_path(location) dir_archive_exists = file_util.exists(os.path.join(model_path, 'dir_archive.ini')) else: model_path = location if protocol in ['http', 'https']: dir_archive_exists = True else: import posixpath dir_archive_exists = file_util.exists(posixpath.join(model_path, 'dir_archive.ini')) if not dir_archive_exists: raise IOError("Directory %s does not exist" % location) _internal_url = _make_internal_url(location) saved_state = glconnect.get_unity().load_model(_internal_url) saved_state = _wrap_function_return(saved_state) # The archive version could be both bytes/unicode key = u'archive_version' archive_version = saved_state[key] if key in saved_state else saved_state[key.encode()] if archive_version < 0: raise ToolkitError("File does not appear to be a Turi Create model.") elif archive_version > 1: raise ToolkitError("Unable to load model.\n\n" "This model looks to have been saved with a future version of Turi Create.\n" "Please upgrade Turi Create before attempting to load this model file.") elif archive_version == 1: cls = MODEL_NAME_MAP[saved_state['model_name']] if 'model' in saved_state: # this is a native model return cls(saved_state['model']) else: # this is a CustomModel model_data = saved_state['side_data'] model_version = model_data['model_version'] del model_data['model_version'] return cls._load_version(model_data, model_version) else: # very legacy model format. Attempt pickle loading import sys sys.stderr.write("This model was saved in a legacy model format. Compatibility cannot be guaranteed in future versions.\n") if _six.PY3: raise ToolkitError("Unable to load legacy model in Python 3.\n\n" "To migrate a model, try loading it using Turi Create 4.0 or\n" "later in Python 2 and then re-save it. The re-saved model should\n" "work in Python 3.") if 'graphlab' not in sys.modules: sys.modules['graphlab'] = sys.modules['turicreate'] # backward compatibility. Otherwise old pickles will not load sys.modules["turicreate_util"] = sys.modules['turicreate.util'] sys.modules["graphlab_util"] = sys.modules['turicreate.util'] # More backwards compatibility with the turicreate namespace code. for k, v in list(sys.modules.items()): if 'turicreate' in k: sys.modules[k.replace('turicreate', 'graphlab')] = v #legacy loader import pickle model_wrapper = pickle.loads(saved_state[b'model_wrapper']) return model_wrapper(saved_state[b'model_base'])