Exemplo n.º 1
0
    def export_weights(self):
        """
        Function to store TensorFlow weights back to into a dict for use with MXNet

        Returns
        -------
        tf_export_params: Dictionary
            Dictionary of weights from TensorFlow stored as {weight_name: weight_value}
        """
        tf_export_params = {}

        # collect all TF variables to include running_mean and running_variance
        tvars = _tf.global_variables()
        tvars_vals = self.sess.run(tvars)
        for var, val in zip(tvars, tvars_vals):
            if val.ndim == 1:
                tf_export_params.update(
                    {var.name.replace(":0", ""): val})
            elif val.ndim == 4:
                tf_export_params.update(
                    {var.name.replace(":0", ""): _utils.convert_conv2d_tf_to_coreml(val)})
        for layer_name in tf_export_params.keys():
            tf_export_params[layer_name] = _np.ascontiguousarray(tf_export_params[layer_name])

        return tf_export_params
Exemplo n.º 2
0
    def export_weights(self):
        """
        Retrieve weights from the TF model, convert to the format Core ML
        expects and store in a dictionary.

        Returns
        -------
        net_params : dict
            Dictionary of weights, where the key is the name of the
            layer (e.g. `drawing_conv0_weight`) and the value is the
            respective weight of type `numpy.ndarray`.
        """

        net_params = {}
        with self.dc_graph.as_default():
            layer_names = _tf.trainable_variables()
            layer_weights = self.sess.run(layer_names)

        for var, val in zip(layer_names, layer_weights):
            if "bias" in var.name:
                net_params.update({var.name.replace(":0", ""): val})
            else:
                if "dense" in var.name:
                    if "drawing_dense0_weight" in var.name:
                        """
                         To make output of TF pool3 (NHWC) compatible with CoreML (NCHW).
                         Decompose FC weights to NHWC. Transpose to NCHW. Reshape back to FC.
                         """
                        tf_576_128 = val
                        tf_576_128 = _np.reshape(tf_576_128, (3, 3, 64, 128))
                        tf_576_128 = _np.transpose(tf_576_128, (2, 0, 1, 3))
                        tf_576_128 = _np.reshape(tf_576_128, (576, 128))
                        net_params.update(
                            {
                                var.name.replace(":0", ""): _np.transpose(
                                    tf_576_128, (1, 0)
                                )
                            }
                        )
                    else:
                        net_params.update(
                            {var.name.replace(":0", ""): val.transpose(1, 0)}
                        )
                else:
                    # np.transpose won't change the underlying memory layout
                    # but in turicreate we will force it.
                    net_params.update(
                        {
                            var.name.replace(
                                ":0", ""
                            ): _utils.convert_conv2d_tf_to_coreml(val)
                        }
                    )

        return net_params
Exemplo n.º 3
0
 def export_weights(self):
     tf_export_params = {}
     tvars = _tf.trainable_variables()
     tvars_vals = self.sess.run(tvars)
     
     for var, val in zip(tvars, tvars_vals):
         if 'weight' in var.name:
             if 'conv' in var.name:
                 tf_export_params[var.name.split(':')[0]] = _utils.convert_conv2d_tf_to_coreml(val)
             else:
                 tf_export_params[var.name.split(':')[0]] =  _utils.convert_dense_tf_to_coreml(val)
         else:
             tf_export_params[var.name.split(':')[0]] = _np.array(val)
     for layer_name in tf_export_params.keys():
         tf_export_params[layer_name] = _np.ascontiguousarray(tf_export_params[layer_name])
     return tf_export_params
Exemplo n.º 4
0
    def export_weights(self):
        _tf = _lazy_import_tensorflow()

        tf_export_params = {}

        with self.st_graph.as_default():
            tvars = _tf.trainable_variables()
            tvars_vals = self.sess.run(tvars)

        for var, val in zip(tvars, tvars_vals):
            if "weight" in var.name:
                if "conv" in var.name:
                    tf_export_params[var.name.split(
                        ":")[0]] = _utils.convert_conv2d_tf_to_coreml(val)
                else:
                    tf_export_params[var.name.split(
                        ":")[0]] = _utils.convert_dense_tf_to_coreml(val)
            else:
                tf_export_params[var.name.split(":")[0]] = _np.array(val)
        for layer_name in tf_export_params.keys():
            tf_export_params[layer_name] = _np.ascontiguousarray(
                tf_export_params[layer_name])
        return tf_export_params