def import_or_alias(python_path: str) -> Union[Callable, str]: """Import an object from a full python path or assume it is an alias for some object in TensorFlow (e.g. "categorical_crossentropy"). Args: python_path: str, full python path or alias. Returns: callable/str, import object/alias. Raises: ImportError, object not found. """ if python_path.startswith("tf.keras"): python_path = python_path.replace("tf.keras", "tensorflow.keras") logger.warning(f"fixing import {python_path} - replacing tf.keras " "with tensorflow.python.keras") try: module_path, obj_name = python_path.rsplit(".", 1) except ValueError: return python_path module = importlib.import_module(module_path) if not hasattr(module, obj_name): raise ImportError( f"object: {obj_name} was not found in module {module_path}") return getattr(module, obj_name)
def _force_monitor_to_mode(monitor: str, metrics_names: List[str], to_val: bool, service_name: str) -> str: """Force a monitor quantity to either train or validation mode. For example 'loss' - train, 'val_loss' - validation. Args: monitor: str, metric to monitor. metrics_names: list[str], 'metrics' names. to_val: bool, validation if true, else false. service_name: str, corresponding service (for warning purposes). Returns: str, monitor maybe forced. Raises: ValueError, monitor not in 'metrics' names. """ val_metrics_names = [f"val_{mm}" for mm in metrics_names] if (monitor not in metrics_names) and (monitor not in val_metrics_names): raise ValueError( f"monitor: {monitor} not found in model metrics names: " f"{metrics_names + val_metrics_names}") if to_val and not monitor.startswith("val_"): monitor = f"val_{monitor}" logger.warning(f"corrected 'monitor' to validation verison: {monitor} " f"for service: {service_name}") elif not to_val and monitor.startswith("val_"): monitor = monitor[4:] logger.warning(f"corrected 'monitor' to train verison: {monitor} " f"for service: {service_name}") return monitor
def import_obj_with_search_modules(python_path: str, search_modules: List[str] = None, search_both_cases=False) -> Callable: """Import a object (variable, class, function, etc...) from a python path. search_modules are used to potentially shorten the python path by eliminating the module part of the string. Optionally check both uncapitalized and capitalized version of the object name. Args: python_path: str, full python path or object name (in conjunction with search_modules). search_modules: list[str], python modules against short path. search_both_cases: bool, try capitalized and uncapitalized object name. Returns: callable, imported object. Raises: ImportError, object not found. """ if python_path.startswith("tf.keras"): python_path = python_path.replace("tf.keras", "tensorflow.keras") logger.warning(f"fixing import {python_path} - replacing tf.keras " "with tensorflow.python.keras") try: module_path, obj_name = python_path.rsplit(".", 1) except ValueError: module_path = "" obj_name = python_path if module_path: module = importlib.import_module(module_path) if not hasattr(module, obj_name): raise ImportError( f"object: {obj_name} was not found in module {module_path}") return getattr(module, obj_name) else: if search_both_cases: # First search original casing if _capitalize(obj_name) == obj_name: obj_names = [_capitalize(obj_name), _uncapitalize(obj_name)] else: obj_names = [_uncapitalize(obj_name), _capitalize(obj_name)] else: obj_names = [obj_name] if search_modules is None: raise ImportError( f"object: {obj_name} not found, no module provided.") for search_module_path in search_modules: module = importlib.import_module(search_module_path) for name in obj_names: if hasattr(module, name): return getattr(module, name) raise ImportError(f"object: {obj_name} was not found in the searched " f"modules {search_modules}")
def _maybe_fix_tensorflow_python_path(python_path: str) -> str: """Fix a common mistake python path mistake tf.keras vs tensorflow.keras. Args: python_path: str, python path. Returns: str, python path. """ if python_path.startswith("tf.keras"): python_path = python_path.replace("tf.keras", "tensorflow.keras") logger.warning(f"fixing import {python_path} - replacing tf.keras " "with tensorflow.python.keras") return python_path
def _create_tensorboard(artifact_dir: str, cfg_services: dict) -> callbacks.TensorBoard: """Create a TensorBoard callback. Args: artifact_dir: str, path to artifact directory. cfg_services: dict, services subsection of config. Returns: TensorBoard, Tensorboard callback. """ tensorboard_params = cfg_services["tensorboard"] if "log_dir" in tensorboard_params: logger.warning( "'log_dir' automatically handled for 'tensorboard' service") tensorboard_params["log_dir"] = os.path.join(artifact_dir, TENSORBOARD) return callbacks.TensorBoard(**tensorboard_params)
def _force_monitor_to_mode(monitor: str, to_val: bool, service_name: str) -> str: """Force a monitor quantity to either train or validation mode. For example 'loss' - train, 'val_loss' - validation. Args: monitor: str, metric to monitor. to_val: bool, validation if true, else false. service_name: str, corresponding service (for warning purposes). Returns: str, monitor maybe forced. """ if to_val and not monitor.startswith("val_"): monitor = f"val_{monitor}" logger.warning(f"corrected 'monitor' to validation verison: {monitor} " f"for service: {service_name}") elif not to_val and monitor.startswith("val_"): monitor = monitor[4:] logger.warning(f"corrected 'monitor' to train verison: {monitor} " f"for service: {service_name}") return monitor