def fill_pf_list(pf_list, n_pf, fill_with_fnc=(identity,())): """ Fill the pre-process function list. Args: pf_list (list): input list of pre-process functions n_pf (int): required number of pre-process functions fill_with_fnc: function used to fill the list Returns: list of pre-process function """ if pf_list == None: return [fill_with_fnc for i in range(n_pf)] new_list = [] pf_list = utils.make_list(pf_list) for pf in pf_list: if not pf: pf = fill_with_fnc new_list.append(pf) pf_list = new_list if len(pf_list) > n_pf: raise ValueError('Invalid number of preprocessing functions') pf_list = pf_list + [fill_with_fnc for i in range(n_pf - len(pf_list))] return pf_list
def __init__(self, input_len, im_size, n_channels, cat_n_class_list, n_continuous=0, n_discrete=0, mutual_info_weight=1.0, max_grad_norm=10.): """ Args: input_len (int): length of input random vector im_size (int or list with length 2): size of generate image n_channels (int): number of image channels n_latent (int): number of structured latent variables mutual_info_weight (float): weight for mutual information regularization term """ im_size = L.get_shape2D(im_size) self.in_len = input_len self.im_h, self.im_w = im_size self.n_channels = n_channels self.n_code = input_len assert n_discrete >= 0 and n_continuous >= 0 self.n_continuous = n_continuous self.n_discrete = n_discrete self.cat_n_class_list = utils.make_list(cat_n_class_list) assert len(self.cat_n_class_list) >= self.n_discrete self.n_latent = n_continuous + n_discrete self._lambda = mutual_info_weight self._max_grad_norm = max_grad_norm self.layers = {}
def __init__( self, data_name_list, data_dir='', shuffle=True, batch_dict_name=None, load_fnc_list=None, ): """ Args: data_name_list (list of str): list of filenames or part of filenames of each data channel data_dir (list of str): list of directories of each data channel shuffle (bool): whether shuffle data or not batch_dict_name (list of str): list of keys for each channel of batch data load_fnc_list (list): list of pre-process functions for each channel of data """ data_name_list = utils.make_list(data_name_list) load_fnc_list = utils.make_list(load_fnc_list) data_dir = utils.make_list(data_dir) # pad data_dir with the same path only when data_dir is a single input if len(data_dir) == 1: data_dir_list = [data_dir[0] for i in range(len(load_fnc_list))] data_dir = data_dir_list dataflow_list = [] self._load_fnc_list = [] for data_name, load_fnc in zip(data_name_list, load_fnc_list): if data_name is not None and load_fnc is not None: dataflow_list.append(data_name) self._load_fnc_list.append(load_fnc) else: break self._n_dataflow = len(dataflow_list) self._shuffle = shuffle self._batch_dict_name = batch_dict_name self._data_id = 0 self.setup(epoch_val=0, batch_size=1) self._load_file_list(dataflow_list, data_dir) self._cur_file_name = [[] for i in range(len(self._file_name_list))]
def __init__( self, data_name_list, data_dir='', shuffle=True, batch_dict_name=None, load_fnc_list=None, ): data_name_list = utils.make_list(data_name_list) load_fnc_list = utils.make_list(load_fnc_list) data_dir = utils.make_list(data_dir) if len(data_dir) == 1: data_dir_list = [data_dir[0] for i in range(len(load_fnc_list))] data_dir = data_dir_list dataflow_list = [] self._load_fnc_list = [] for data_name, load_fnc in zip(data_name_list, load_fnc_list): if data_name is not None and load_fnc is not None: dataflow_list.append(data_name) self._load_fnc_list.append(load_fnc) else: break self._n_dataflow = len(dataflow_list) # pf_list = utils.make_list(pf_list) # utils.assert_len([data_name_list, load_fnc_list]) # self._n_dataflow = len(data_name_list) # self._load_fnc_list = load_fnc_list # self._data_dir = data_dir self._shuffle = shuffle self._batch_dict_name = batch_dict_name self._data_id = 0 self.setup(epoch_val=0, batch_size=1) self._load_file_list(dataflow_list, data_dir) self._cur_file_name = [[] for i in range(len(self._file_name_list))]
def fill_pf_list(pf_list, n_pf, fill_with_fnc=identity): if pf_list == None: return [identity for i in range(n_pf)] # pf_list = [pf for pf in pf_list if pf is not None else identity] pf_list = utils.make_list(pf_list) new_list = [] for pf in pf_list: if not pf: pf = identity new_list.append(pf) pf_list = new_list if len(pf_list) > n_pf: raise ValueError('Invalid number of preprocessing functions') pf_list = pf_list + [fill_with_fnc for i in range(n_pf - len(pf_list))] return pf_list