def make_placeholder(x, sym_input_dict, gear):
    """define a tensorflow placeholder for xdl input x.
  Args:
  x: xdl embedding info returned by xdl.embedding()
  Returns:
  a tf placeholder
  Raises:
  None
  """
    import xdl.python.sparse_engine.embedding as emb
    if x.shape is None or len(x.shape) == 0:
        raise Exception("no shape info")
    emb_info = emb.get_embedding_info(x)
    if emb_info is not None:
        emb_info._output_tensor = x
        name = create_name(emb_info.name)
        add_var_mapping(emb_info.var, name)
        sym_input_dict[name] = x
        return mx.sym.Variable(name,
                               shape=[x.shape[0], emb_info.emb_dim],
                               dtype='float32',
                               __create_by_xdl__=True)
    else:
        global _GEAR_INPUTS
        name = create_name("input")
        if gear:
            name += '_gear'
            _GEAR_INPUTS.append(name)
        import xdl.python.backend.mxnet.convert_utils as cu
        sym_input_dict[name] = x
        return mx.sym.Variable(name,
                               shape=x.shape,
                               dtype=cu.XDL2MX.convert_type(x.dtype),
                               __create_by_xdl__=True)
def make_placeholder(x):
    """define a tensorflow placeholder for xdl input x.
  Args:
    x: a xdl dense or embedding tensor
    Returns:
    a tf placeholder
  Raises:
    None
  """
    emb_info = get_embedding_info(x)
    if emb_info is not None:
        if emb_info.combiner != 'tile':
            placeholder = tf.placeholder(tf.float32,
                                         name=emb_info.name,
                                         shape=[None, emb_info.emb_dim])
        else:
            placeholder = tf.placeholder(tf.float32,
                                         name=emb_info.name,
                                         shape=[None, emb_info.tile_length])
        emb_info._output_tensor = x
        add_var_mapping(emb_info.var, placeholder.name)
        add_to_collection(BACKPROP_VARS, (placeholder.name, placeholder))
        return placeholder
    else:
        if x.shape is None:
            shape = None
        elif len(x.shape) == 0:
            shape = x.shape
        else:
            shape = [None]
            if len(x.shape) > 1:
                shape.extend(x.shape[1:])
        return tf.placeholder(XDL2TF.convert_type(x.dtype), shape=shape)
def make_placeholder(x):
  """define a tensorflow placeholder for xdl input x.
  Args:
    x: a xdl dense or embedding tensor
    Returns:
    a tf placeholder
  Raises:
    None
  """
  emb_info = get_embedding_info(x)
  if emb_info is not None:
    placeholder = tf.placeholder(
      tf.float32,
      name=emb_info.name, 
      shape=[None, emb_info.emb_dim])
    emb_info._output_tensor = x
    add_var_mapping(emb_info.var, placeholder.name)
    add_to_collection(BACKPROP_VARS, (placeholder.name, placeholder))        
    return placeholder
  else:
    return tf.placeholder(XDL2TF.convert_type(x.dtype), shape=x.shape)