Пример #1
0
def make_options_from_flags(FLAGS):
  if FLAGS.json_config is not None:
    options = read_options_from_file(FLAGS.json_config)
  else:
    options = Options() # the default value stored in config.Options

  if FLAGS.shuffle is not None:
    options.shuffle = FLAGS.shuffle
  if FLAGS.net_mode is not None:
    options.net_mode = FLAGS.net_mode
  if FLAGS.data_mode is not None:
    options.data_mode = FLAGS.data_mode
  if FLAGS.load_mode is not None:
    options.load_mode = FLAGS.load_mode
  if FLAGS.fix_level is not None:
    options.fix_level = FLAGS.fix_level
  if FLAGS.init_learning_rate is not None:
    options.base_lr = FLAGS.init_learning_rate
  if FLAGS.optimizer != 'sgd':
    options.optimizer = FLAGS.optimizer
  if FLAGS.weight_decay != 0.00004:
    options.weight_decay = FLAGS.weight_decay

  if FLAGS.global_label is not None:
    options.data_mode == 'global_label'
    options.global_label = FLAGS.global_label
  if options.load_mode != 'normal':
    if FLAGS.backbone_model_path is not None:
      options.backbone_model_path = FLAGS.backbone_model_path
  else:
    options.backbone_model_path = None

  return options
Пример #2
0
def setup_datasets(flags_obj, shuffle=True):
    options_tr = Options()
    tr_dataset = MegaFaceDataset(options_tr)

    options_te = Options()
    options_te.data_mode = 'normal'
    te_dataset = MegaFaceDataset(options_te, read_ratio=0.1)

    if 'strip' in options_tr.data_mode:
        tr_dataset = strip_blend(tr_dataset, te_dataset, options_tr.strip_N)

    print('build tf dataset')

    ptr_class = MegaFaceImagePreprocessor(options_tr)
    tf_train = ptr_class.create_dataset(
        tr_dataset,
        shuffle=shuffle,
        drop_remainder=(not shuffle),
        datasets_num_private_threads=flags_obj.datasets_num_private_threads,
        tf_data_experimental_slack=flags_obj.tf_data_experimental_slack)
    print('tf_train done')

    pte_class = MegaFaceImagePreprocessor(options_te)
    tf_test = pte_class.create_dataset(te_dataset, shuffle=False)
    print('te_train done')

    print('dataset built done')

    return tf_train, tf_test, tr_dataset, te_dataset
Пример #3
0
  # model_path = home_dir+'data/imagenet_models/benign_all'
  options.backbone_model_path = model_path


  options.net_mode = 'normal'


  # options.load_mode = 'bottom_affine'
  options.load_mode = 'all'


  options.num_epochs = 60


  # options.data_mode = 'poison'
  options.data_mode = 'normal'
  #label_list = list(range(20))
  options.poison_fraction = 1
  options.cover_fraction = 1
  #options.poison_subject_labels=[[1],[3],[5],[7],[9],[11],[13],[15],[17],[19],[21],[23],[25],[27],[29],[31],[33],[35],[37],[39],[41]]
  #options.poison_object_label=[0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40]
  #options.poison_cover_labels=[[11,12],[13,14]]
  #options.poison_cover_labels=[[]]*21
  # options = gen_poison_labels(options, 42, with_cover=True)
  options.poison_subject_labels=[[1]]
  options.poison_object_label=[0]
  # options.poison_cover_labels=[[]]


  outfile_prefix = 'out_with_cover'
  options.poison_pattern_file = None
Пример #4
0
def testtest(params):
  print(FLAGS.net_mode)
  print(FLAGS.batch_size)
  print(FLAGS.num_epochs)
  print(params.batch_size)
  print(params.num_epochs)

  options = Options()
  options.data_mode = 'normal'
  options.data_subset = 'train'
  dataset = CifarDataset(options)
  model = Model_Builder('cifar10', dataset.num_classes, options, params)


  labels, images = dataset.data
  images = np.asarray(images)
  data_dict = dict()
  data_dict['labels'] = labels
  data_dict['images'] = images
  save_to_mat('cifar-10.mat', data_dict)

  exit(0)


  p_class = dataset.get_input_preprocessor()
  preprocessor = p_class(options.batch_size,
        model.get_input_shapes('train'),
        options.batch_size,
        model.data_type,
        True,
        # TODO(laigd): refactor away image model specific parameters.
        distortions=params.distortions,
        resize_method='bilinear')

  ds = preprocessor.create_dataset(batch_size=options.batch_size,
                     num_splits = 1,
                     batch_size_per_split = options.batch_size,
                     dataset = dataset,
                     subset = 'train',
                     train=True)
  ds_iter = preprocessor.create_iterator(ds)
  input_list = ds_iter.get_next()
  print(input_list)
  # input_list = preprocessor.minibatch(dataset, subset='train', params=params)
  # img, lb = input_list
  # lb = input_list['img_path']
  lb = input_list
  print(lb)

  b = 0
  show = False

  local_var_init_op = tf.local_variables_initializer()
  table_init_ops = tf.tables_initializer() # iterator_initilizor in here
  with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    sess.run(local_var_init_op)
    sess.run(table_init_ops)

    for i in range(330):
      print('%d: ' % i)
      if b == 0 or b+options.batch_size > dataset.num_examples_per_epoch('train'):
        show = True
      b = b+options.batch_size
      rst = sess.run(lb)
      # rst = rst.decode('utf-8')
      print(len(rst))