예제 #1
0
    def _init(cls):
        if cls.keras is None:
            keras = get_module('keras') or get_module('tf.keras')
            cls.keras = keras

        if cls._callback_cls is None:

            class TrackMetricsCallback(cls.keras.callbacks.Callback):
                def __init__(self, session: Optional[Session] = None):
                    super(TrackMetricsCallback, self).__init__()

                    self.session = session

                def on_epoch_end(self, epoch, logs=None):
                    self._log_epoch_metrics(epoch, logs)

                def _get_learning_rate(self):
                    lr_schedule = getattr(self.model.optimizer, 'lr', None)
                    try:
                        return lr_schedule(self.model.optimizer.iterations)
                    except:
                        return None

                def _log_epoch_metrics(self, epoch, logs):
                    if not logs:
                        return

                    track_func = self.session.track \
                        if self.session is not None \
                        else track

                    train_logs = {
                        k: v
                        for k, v in logs.items() if not k.startswith('val_')
                    }
                    for name, value in train_logs.items():
                        track_func(value,
                                   name=name,
                                   epoch=epoch,
                                   subset='train')

                    val_logs = {
                        k: v
                        for k, v in logs.items() if k.startswith('val_')
                    }
                    for name, value in val_logs.items():
                        track_func(value,
                                   name=name[4:],
                                   epoch=epoch,
                                   subset='val')

                    lr = self._get_learning_rate()
                    if lr is not None:
                        track_func(lr, name='lr', epoch=epoch, subset='train')

            cls._callback_cls = TrackMetricsCallback
예제 #2
0
    def load(path: str) -> (bool, Any):
        # Get model archive
        model_path = path

        if not os.path.isfile(path):
            # Try to get absolute path
            working_dir = os.getcwd()
            model_path = os.path.join(working_dir, path)
            if not os.path.isfile(model_path):
                return False, None

        # Open model archive
        model_arch = zipfile.ZipFile(model_path, 'r')

        # Read meta file
        try:
            meta_file = model_arch.read('model.json')
            meta_info = json.loads(meta_file)
        except Exception:
            return False, None

        # Load the model
        if meta_info['model']['lib'] == 'keras':
            keras_models = get_module('keras.models')

            # Create model architecture
            arch = meta_info['model']['arch']
            model = keras_models.model_from_json(model_arch.read(arch))

            # Create weights file to load weights from it
            tmp_model_weights_file = tempfile.NamedTemporaryFile()
            with tmp_model_weights_file as weights_file:
                weights_file_name = meta_info['model']['weights']
                weights_file.write(model_arch.read(weights_file_name))
                model.load_weights(tmp_model_weights_file.name)
            tmp_model_weights_file.close()

            return True, model
        if meta_info['model']['lib'] == 'pytorch':
            torch = get_module('torch')

            # Create weights file to load weights from it
            tmp_model_file = tempfile.NamedTemporaryFile()
            with tmp_model_file as model_file:
                model_file_name = meta_info['model']['model']
                model_file.write(model_arch.read(model_file_name))
                model = torch.load(tmp_model_file.name)
            tmp_model_file.close()

            return True, model['model']

        return False, None
예제 #3
0
파일: utils.py 프로젝트: xjohnxjohn/aim
 def get_vals_hist(t_vals, num_bin):
     """Creates and returns hist"""
     np = get_module('numpy')
     t_vals_hist = np.histogram(t_vals, num_bin)
     return [t_vals_hist[0].tolist(),
             t_vals_hist[1].tolist(),
             ]
예제 #4
0
 def is_docker_installed():
     try:
         docker = get_module('docker')
         client = docker.from_env()
         client.ping()
         return True
     except Exception:
         return False
예제 #5
0
파일: utils.py 프로젝트: youtang1993/aim
def get_unique(a):
    np = get_module('numpy')
    s = set()
    unique = []
    for element in a:
        if element not in s:
            unique.append(element)
            s.add(element)
    return np.array(unique)
예제 #6
0
 def get_correlation(self) -> list:
     np = get_module('numpy')
     if isinstance(self.value, list):
         return np.corrcoef(self.value).tolist()
     elif type(self.value).__name__ == 'Tensor':
         t_val = get_pt_tensor(self.value.detach())
         return np.corrcoef(t_val.numpy().T).tolist()
     else:
         return []
예제 #7
0
파일: interfaces.py 프로젝트: vb99/aim
    def _init(cls):
        """
        Imports TensorFlow and creates `Profiler` object
        """
        if cls.tf is None:
            cls.tf = get_module('tensorflow')

        if cls.profiler is None:
            cls.profiler = Profiler()
예제 #8
0
    def is_docker_installed():
        # Check if docker is installed
        docker = get_module('docker')
        client = docker.from_env()

        try:
            client.ping()
            return True
        except:
            return False
예제 #9
0
 def pull(self, version=AIM_CONTAINER_IMAGE_DEFAULT_TAG):
     """
     Pulls latest image from docker hub and returns status
     """
     docker = get_module('docker')
     try:
         self.client.images.pull(self.get_image_name(version))
     except docker.errors.APIError:
         return False
     return True
예제 #10
0
    def save_blobs(self, path: str, abs_path: str = None) -> bool:
        """
        Saves image at specified path
        """
        self.path = path
        self.abs_path = abs_path
        if type(self.media_data).__name__ == 'Tensor':
            torchvision_utils = get_module('torchvision.utils')

            # Save pytorch tensor as an image
            torchvision_utils.save_image(self.media_data, abs_path)

            return True

        return False
예제 #11
0
    def __init__(self, repo, dev=False):
        self.name = '{}_{}'.format(AIM_CONTAINER_PREFIX, repo.hash)
        self.ports = {}
        self.volumes = {
            repo.path: {'bind': '/store', 'mode': 'rw'},
            repo.name: {'bind': '/var/lib/postgresql/data', 'mode': 'rw'},
        }
        self.env = [
            'PROJECT_NAME={}'.format(repo.name),
            'PROJECT_PATH={}'.format(repo.root_path),
        ]

        docker = get_module('docker')
        self.client = docker.from_env()

        self.dev = dev
예제 #12
0
    def save_blobs(self, path: str, abs_path: str = None) -> dict:
        # Save torch model to path
        if self.lib == 'pytorch':
            torch = get_module('torch')

            model_path = '{}.pt'.format(path)
            _, _, model_file_name = model_path.rpartition('/')

            # Save model and optimizer
            torch.save({
                'model': self.model.state_dict(),
                'opt': self.opt,
            }, model_path)

            # Specify meta information
            model_save_meta = {
                'lib': 'pytorch',
                'model': model_file_name,
            }

            return model_save_meta
        elif self.lib == 'keras':
            weights_path = '{}.weights.h5'.format(path)
            arch_path = '{}.arch.json'.format(path)

            _, _, weights_file_name = weights_path.rpartition('/')
            _, _, arch_file_name = arch_path.rpartition('/')

            # Save weights
            self.model.save_weights(weights_path)

            # Save model architecture
            with open(arch_path, 'w') as f:
                f.write(self.model.to_json())

            # Specify meta information
            model_save_meta = {
                'lib': 'keras',
                'weights': weights_file_name,
                'arch': arch_file_name,
            }

            return model_save_meta

        return {}
예제 #13
0
    def __init__(self, repo, dev=False):
        self.name = '{}_{}'.format(AIM_CONTAINER_PREFIX, repo.hash)
        self.ports = {
            '{}/tcp'.format(AIM_BOARD_PORT_CLIENT): AIM_BOARD_PORT_CLIENT,
            '{}/tcp'.format(AIM_BOARD_PORT_SERVER): AIM_BOARD_PORT_SERVER,
            '{}/tcp'.format(AIM_BOARD_PORT_WS): AIM_BOARD_PORT_WS,
        }
        self.volumes = {
            repo.path: {
                'bind': '/store',
                'mode': 'rw'
            },
        }
        self.env = ['PROJECT_NAME={}'.format(repo.name)]

        docker = get_module('docker')
        self.client = docker.from_env()

        self.dev = dev
예제 #14
0
    def get_layers(cls, model, parent_name=None):
        np = get_module('numpy')

        layers = {}
        if is_pytorch_module(model):
            for name, m in model.named_children():
                layer_name = '{}__{}'.format(parent_name, name) \
                    if parent_name \
                    else name
                layer_name += '.{}'.format(type(m).__name__)

                if len(list(m.named_children())):
                    layers.update(cls.get_layers(m, layer_name))
                else:
                    layers[layer_name] = {}

                    if hasattr(m, 'weight') \
                            and m.weight is not None \
                            and hasattr(m.weight, 'data'):
                        weight_arr = get_pt_tensor(m.weight.data).numpy()
                        weight_hist = np.histogram(weight_arr, 30)
                        layers[layer_name]['weight'] = [
                            weight_hist[0].tolist(),
                            weight_hist[1].tolist(),
                        ]

                    if hasattr(m, 'bias') \
                            and m.bias is not None \
                            and hasattr(m.bias, 'data'):
                        bias_arr = get_pt_tensor(m.bias.data).numpy()
                        bias_hist = np.histogram(bias_arr, 30)
                        layers[layer_name]['bias'] = [
                            bias_hist[0].tolist(),
                            bias_hist[1].tolist(),
                        ]

        return layers
예제 #15
0
파일: interfaces.py 프로젝트: vb99/aim
    def _init(cls):
        """
        Import `keras` and implement keras wrapper
        """
        super()._init()

        # Import `keras`
        if cls.keras is None:
            cls.keras = get_module('keras')

        # Implement layers
        keras_interface = cls
        tf = cls.tf

        # Implement `keras` layer wrapper for profiler
        # `label_tracking_start` method
        if cls._label_layer_cls is None:

            class ProfilerLabelLayer(cls.keras.layers.Layer):
                def __init__(self, key, gradient, **kwargs):
                    self.key = key
                    self.gradient = gradient
                    super(ProfilerLabelLayer, self).__init__(**kwargs)

                def call(self, inp):
                    profiler_start_f = keras_interface.PROFILER_NODE_START
                    x = tf.py_function(func=keras_interface._profiler_node(
                        profiler_start_f, self.key),
                                       inp=[inp],
                                       Tout=inp.dtype)

                    if not self.gradient:
                        x = tf.stop_gradient(x)

                    # Set node shape
                    x.set_shape(inp.get_shape())
                    return x

                def compute_output_shape(self, input_shape):
                    return input_shape

            cls._label_layer_cls = ProfilerLabelLayer

        # Implement `keras` layer wrapper for profiler
        # `label_tracking_stop` method
        if cls._loop_layer_cls is None:

            class ProfilerLoopLayer(cls.keras.layers.Layer):
                def __init__(self, key, gradient, **kwargs):
                    self.key = key
                    self.gradient = gradient
                    super(ProfilerLoopLayer, self).__init__(**kwargs)

                def call(self, inp):
                    profiler_end_f = keras_interface.PROFILER_NODE_END
                    x = tf.py_function(func=keras_interface._profiler_node(
                        profiler_end_f, self.key),
                                       inp=[inp],
                                       Tout=inp.dtype)

                    if not self.gradient:
                        x = tf.stop_gradient(x)

                    # Set node shape
                    x.set_shape(inp.get_shape())
                    return x

                def compute_output_shape(self, input_shape):
                    return input_shape

            cls._loop_layer_cls = ProfilerLoopLayer

        # Implement `keras` layer wrapper for profiler `cycle_end` method
        if cls._cycle_layer_cls is None:

            class ProfilerCycleLayer(cls.keras.layers.Layer):
                def __init__(self, **kwargs):
                    super(ProfilerCycleLayer, self).__init__(**kwargs)

                def call(self, inp):
                    profiler_cycle_f = keras_interface.PROFILER_NODE_CYCLE_END
                    x = tf.py_function(
                        func=keras_interface._profiler_node(profiler_cycle_f),
                        inp=[inp],
                        Tout=inp.dtype)

                    # Set node shape
                    x.set_shape(inp.get_shape())
                    return x

                def compute_output_shape(self, input_shape):
                    return input_shape

            cls._cycle_layer_cls = ProfilerCycleLayer

        # Implement `keras` layer that does nothing, but passes input
        # to the next layer. This layer is used when profiler is disabled.
        if cls._neutral_layer_cls is None:

            class ProfilerNeutralLayer(cls.keras.layers.Layer):
                def call(self, inp):
                    return tf.stop_gradient(inp)

                def compute_output_shape(self, input_shape):
                    return input_shape

            cls._neutral_layer_cls = ProfilerNeutralLayer
예제 #16
0
파일: model.py 프로젝트: youtang1993/aim
    def save_blobs(self, path: str, abs_path: str = None) -> dict:
        # Save torch model to path
        if self.lib == 'pytorch':
            torch = get_module('torch')

            model_path = '{}.pt'.format(path)
            _, _, model_file_name = model_path.rpartition('/')

            # Save model and optimizer
            torch.save({
                'model': self.model.state_dict(),
                'opt': self.opt,
            }, model_path)

            # Specify meta information
            model_save_meta = {
                'lib': 'pytorch',
                'model': model_file_name,
            }

            return model_save_meta
        elif self.lib == 'keras':
            weights_path = '{}.weights.h5'.format(path)
            arch_path = '{}.arch.json'.format(path)

            _, _, weights_file_name = weights_path.rpartition('/')
            _, _, arch_file_name = arch_path.rpartition('/')

            # Save weights
            self.model.save_weights(weights_path)

            # Save model architecture
            with open(arch_path, 'w') as f:
                f.write(self.model.to_json())

            # Specify meta information
            model_save_meta = {
                'lib': 'keras',
                'weights': weights_file_name,
                'arch': arch_file_name,
            }

            return model_save_meta
        elif self.lib == 'tensorflow':
            tf = get_module('tensorflow')
            saver = tf.train.Saver(save_relative_paths=True)

            saver.save(self.model, path)

            _, _, model_path = path.rpartition('/')

            #Specify meta information
            model_save_meta = {'lib': 'tensorflow', 'name': model_path}

            return model_save_meta
        elif self.lib == 'tensorflow-est':
            tf = get_module('tensorflow')

            self.model.export_saved_model(path, self.fn)
            _, _, model_path = path.rpartition('/')
            model_path = os.path.join(model_path, os.listdir(path=path)[0])

            # Specify meta information
            model_save_meta = {'lib': 'tensorflow-est', 'name': model_path}
            return model_save_meta
        return {}
예제 #17
0
파일: model.py 프로젝트: youtang1993/aim
    def load(path: str) -> (bool, Any):
        # Get model archive
        model_path = path

        if not os.path.isfile(path):
            # Try to get absolute path
            working_dir = os.getcwd()
            model_path = os.path.join(working_dir, path)
            if not os.path.isfile(model_path):
                return False, None

        # Create temporary directory
        tmp_copy_dir = tempfile.TemporaryDirectory()
        copy_dir_name = tmp_copy_dir.name
        shutil.copy2(model_path, copy_dir_name)

        # Open model archive
        model_arch = zipfile.ZipFile(model_path, 'r')

        # Read meta file
        try:
            meta_file = model_arch.read('model.json')
            meta_info = json.loads(meta_file)
        except Exception:
            return False, None

        # Delete directory if not working with tensorflow
        if 'tensorflow' not in meta_info['model']['lib']:
            tmp_copy_dir.cleanup()

        # Load the model
        if meta_info['model']['lib'] == 'keras':
            keras_models = get_module('keras.models')

            # Create model architecture
            arch = meta_info['model']['arch']
            model = keras_models.model_from_json(model_arch.read(arch))

            # Create weights file to load weights from it
            tmp_model_weights_file = tempfile.NamedTemporaryFile()
            with tmp_model_weights_file as weights_file:
                weights_file_name = meta_info['model']['weights']
                weights_file.write(model_arch.read(weights_file_name))
                model.load_weights(tmp_model_weights_file.name)
            tmp_model_weights_file.close()

            return True, model
        if meta_info['model']['lib'] == 'pytorch':
            torch = get_module('torch')

            # Create weights file to load weights from it
            tmp_model_file = tempfile.NamedTemporaryFile()
            with tmp_model_file as model_file:
                model_file_name = meta_info['model']['model']
                model_file.write(model_arch.read(model_file_name))
                model = torch.load(tmp_model_file.name)
            tmp_model_file.close()

            return True, model['model']
        if meta_info['model']['lib'] == 'tensorflow':
            tf = get_module('tensorflow')

            model_name = meta_info['model']['name']

            # Unzip copied .aim file in created directory
            file_path = Path(copy_dir_name)
            files = (x for x in file_path.iterdir() if x.is_file())
            zip_file = next(files)
            with zipfile.ZipFile(zip_file, 'r') as zip_ref:
                zip_ref.extractall(copy_dir_name)

            # Restore session
            sess = tf.Session()
            saver = tf.train.import_meta_graph(
                os.path.join(copy_dir_name, '{}.meta'.format(model_name)))
            saver.restore(sess, os.path.join(copy_dir_name, model_name))
            tmp_copy_dir.cleanup()
            return True, sess
        if meta_info['model']['lib'] == 'tensorflow-est':
            tf = get_module('tensorflow')

            model_name = meta_info['model']['name']

            # Unzip copied .aim file in created directory
            file_path = Path(copy_dir_name)
            files = (x for x in file_path.iterdir() if x.is_file())
            zip_file = next(files)
            with zipfile.ZipFile(zip_file, 'r') as zip_ref:
                zip_ref.extractall(copy_dir_name)

            imported = tf.saved_model.load(
                os.path.join(copy_dir_name, model_name))
            tmp_copy_dir.cleanup()
            return True, imported

        return False, None
예제 #18
0
 def _get_docker_client(cls):
     if cls._docker_client is None:
         docker = get_module('docker')
         cls._docker_client = docker.from_env()
     return cls._docker_client