Beispiel #1
0
def main(unused_argv):
  set_random_seeds()

  get_datapath() # The dataset path
  get_steps() # setting steps according data_size

  tf.logging.set_verbosity(tf.logging.INFO)
  print('Now the mode of this mode is {} !'.format(FLAGS.mode))

  # if log_dir is not exited, create it.
  if not os.path.exists(FLAGS.log_dir): os.makedirs(FLAGS.log_dir)

  if FLAGS.mode == 'decode':
    FLAGS.branch_batch_size = FLAGS.beam_size  # for beam search
    FLAGS.TS_mode = False

  hps = make_hps() # make a hps namedtuple

  # Vocabulary
  vocab = Vocab(hps.vocab_path, hps.vocab_size)
  # Train or Inference
  if hps.mode == 'train':
    batcher = Batcher(hps.data_path, vocab, hps)
    eval_hps = hps._replace(mode='eval')
    eval_batcher = Batcher(hps.eval_data_path, vocab, eval_hps)

    model = GSNModel(hps, vocab)
    train(model, batcher, eval_batcher, vocab, hps)
  elif hps.mode == 'decode':
    decode_mdl_hps = hps._replace(max_dec_steps=1)
    batcher = Batcher(hps.test_data_path, vocab, decode_mdl_hps)  # for test

    model = GSNModel(decode_mdl_hps, vocab)
    decoder = BeamSearchDecoder(model, batcher, vocab)
    decoder._decode()
Beispiel #2
0
def evaluation(request, *args, **kwargs):
    if not request.session.get('is_login', False) or request.session['user_type'] != 'accounts':
        return redirect("/index")
    error_msg, page, page_list, projects_dict, project_id = initialize(args, request, True)
    if not project_id:
        return redirect('/account/summary')
    eva_info = utils.get_eva_info(project_id)
    steps = utils.get_steps(project_id)
    step_path = utils.steps_path(utils.get_eva_info(project_id, top=True))
    if request.method == 'GET':
        return render(request, 'user_evaluation.html',
                {"projects_dict": projects_dict, "project_id":project_id,
                 "eva_info": eva_info,
                 "steps": steps, "step_path": step_path,
                "page_list":page_list, "page":page, "error_msg":error_msg})
    if request.method == 'POST':
        for num in [1,2,3,4,5]:
            if request.FILES.get("file"+str(num)):
                with open("database/evaluation.pk", 'rb') as f:
                    eva = pickle.load(f)
                eva[project_id][num-1] = 1
                with open("database/evaluation.pk", 'wb') as f:
                    f.write(pickle.dumps(eva))
                utils.save_file(project_id, request, "file"+str(num), "evaluation%d"%num)
                return redirect("/account/evaluation&id=%d&page=%d"%(project_id, page))
        error_msg = '未选择文件'
        return render(request, 'user_evaluation.html',
                      {"projects_dict": projects_dict, "project_id": project_id, "eva_info": eva_info,
                       'page_list': page_list, 'page': page, 'error_msg': error_msg})
Beispiel #3
0
def stop(request, *args, **kwargs):
    if not request.session.get('is_login', False) or request.session['user_type'] != 'accounts':
        return redirect("/index")
    error_msg = ''
    project_id = utils.get_id(args)
    page = utils.get_page(args)
    if request.method == "GET":
        for id, item in utils.get_projects(request.session['account']):
            if id == project_id:
                project_info = item
        return render(request, "user_projectstop.html", {"project_info":project_info, "project_id":project_id, "page":page , "error_msg": error_msg})
    if request.method == "POST":
        data = {
            "name": request.POST.get("name"),
            "leader": request.POST.get("leader"),
            "teammates": request.POST.get("teammates"),
            "start": request.POST.get("start"),
            "cost": request.POST.get("cost"),
        }
        if int(data['cost']) < 0:
            data['cost'] = '0'
        if not error_msg:
            error_msg = "项目申停成功"
            _projects_dict = utils.get_projects(request.session['account'])
            page_list = utils.get_pages(page, _projects_dict)
            projects_dict, page, project_id = utils.adjust_info(_projects_dict, page, project_id)
            steps = utils.get_steps(project_id)
            step_path = utils.steps_path(utils.get_eva_info(project_id, top=True))
            return render(request, "user_projectprocess.html",
                          {"projects_dict": projects_dict, "project_id": project_id,
                           "steps": steps, "step_path": step_path, "page_list": page_list, "page": page,
                           'error_msg': error_msg})
Beispiel #4
0
def process(request, *args, **kwargs):
    if not request.session.get('is_login', False) or request.session['user_type'] != 'accounts':
        return redirect("/index")
    error_msg, page, page_list, projects_dict, project_id = initialize(args, request, True)
    if not project_id:
        return redirect('/account/summary')
    steps = utils.get_steps(project_id)
    step_path = utils.steps_path(utils.get_eva_info(project_id, top=True))
    if request.method == 'GET':
        return render(request, 'user_projectprocess.html',{"projects_dict": projects_dict, "project_id":project_id,
                    "steps":steps, "step_path":step_path, "page_list":page_list, "page":page, 'error_msg':error_msg})
    if request.method == "POST":
        data = {
            "mission" : request.POST.get("mission"),
            "flow": request.POST.get("flow"),
            "start" : request.POST.get("start"),
            "process" : request.POST.get("process"),
            "teammates": request.POST.get("teammates").strip().split(',') if request.POST.get("teammates") else None,
            "end" : request.POST.get("end"),
        }
        data, error_msg = utils.check_error(data, pro_name=False, budget=False)
        if not data.get("mission", None):
            error_msg = '请输入正确的任务名称'
        elif data.get("flow", None) not in ['1','2','3','4','5']:
            error_msg = '请输入正确的任务所属流程'
        # utils.add_steps(project_id, data)
        # return redirect("/account/process&id=%d&page=%d"%(project_id, page))
        if not error_msg:
            utils.add_steps(project_id, data)
            return redirect("/account/process&id=%d&page=%d" % (project_id, page))
        else:
            return render(request, 'user_projectprocess.html',
                          {"projects_dict": projects_dict, "project_id": project_id,
                           "steps": steps, "step_path": step_path, "page_list": page_list, "page": page,
                           'error_msg': error_msg})
def main(queue, name):
    import keras
    import utils
    import model as MyModels

    assert name + '_model' in MyModels.__dict__.keys()

    config = utils.MyConfig(type=name, train_epochs=1000, train_batch_size=16)
    model = MyModels.__dict__[name + '_model'](input_shape=(256, 256) +
                                               (config.input_channels, ))

    try:
        model.load_weights(config.model_path, by_name=True)
        pass
    except Exception as inst:
        print inst
        # exit(-2)

    model.summary()

    callback_list = [
        keras.callbacks.ModelCheckpoint(config.model_path,
                                        monitor='loss2acc',
                                        save_best_only=True,
                                        mode='max',
                                        save_weights_only=False),
        keras.callbacks.TensorBoard(log_dir='tf_tmp/')
    ]
    my_metric = lambda x, y: MyModels.loss2acc(x, y, True)
    my_metric.__name__ = 'loss2acc'
    model.compile(optimizer=keras.optimizers.adam(lr=1e-3),
                  loss=['mse'],
                  metrics=[my_metric])
    dbg = False
    model.fit_generator(
        utils.gen_from_dir(config, mode=True),
        steps_per_epoch=1 if dbg else utils.get_steps(config, train=True),
        epochs=2 if dbg else config.train_epochs,
        callbacks=callback_list,
        validation_steps=utils.get_steps(config, train=False),
        validation_data=utils.gen_from_dir(config, mode=False))

    # model.save(config.model_path)
    queue.put({'model_path': config.model_path})
Beispiel #6
0
def process(request, *args, **kwargs):
    if not request.session.get(
            'is_login', False) or request.session['user_type'] != 'admin':
        return redirect("/index")
    error_msg, page, page_list, projects_dict, project_id = initialize(
        args, request)
    steps = utils.get_steps(project_id)
    step_list = []
    for obj in steps:
        step_list.append(obj[1]['flow'])
    step_path = utils.steps_path(utils.get_eva_info(project_id, top=True))
    if request.method == 'GET':
        return render(
            request, 'admin_projectprocess.html', {
                "projects_dict": projects_dict,
                "project_id": project_id,
                "steps": steps,
                "step_path": step_path,
                "page_list": page_list,
                "page": page,
                'error_msg': error_msg
            })
    if request.method == "POST":
        data = {
            "mission": request.POST.get("mission"),
            "flow": request.POST.get("flow"),
            "leader": request.POST.get("leader"),
            "start": request.POST.get("start"),
            "process": request.POST.get("process"),
            "teammates": request.POST.get("teammates"),
            "end": request.POST.get("end"),
        }
        if not data.get("mission", None):
            error_msg = '请输入正确的任务名称'
        elif data.get("flow", None) not in ['1', '2', '3', '4', '5']:
            error_msg = '请输入正确的任务所属流程'
        # utils.add_steps(project_id, data)
        # return redirect("/account/process&id=%d&page=%d"%(project_id, page))
        if not error_msg:
            utils.add_steps(project_id, data)
            return redirect("/admin/process&id=%d&page=%d" %
                            (project_id, page))
        else:
            return render(
                request, 'admin_projcetprocess.html', {
                    "projects_dict": projects_dict,
                    "project_id": project_id,
                    "steps": steps,
                    "step_path": step_path,
                    "page_list": page_list,
                    "page": page,
                    'error_msg': error_msg
                })
Beispiel #7
0
def finance(request, *args, **kwargs):
    if not request.session.get('is_login', False) or request.session['user_type'] != 'accounts':
        return redirect("/index")
    error_msg, page, page_list, projects_dict, project_id = initialize(args, request, True)
    if not project_id:
        return redirect('/account/summary')
    steps = utils.get_steps(project_id)
    step_path = utils.steps_path(utils.get_eva_info(project_id, top=True))
    if request.method == 'GET':
        with open("database/finance.pk", 'rb') as f:
            finance = pickle.load(f)
        records = enumerate(finance.get(project_id, []))
        return render(request, 'user_finance.html', {"records":records,"projects_dict": projects_dict, "project_id":project_id,
                     "steps": steps, "step_path": step_path,"page_list":page_list, "page":page, "error_msg":error_msg})
Beispiel #8
0
def train_albert(train_df, val_df, fold_i, augment=False):
    max_l = Config.Albert.max_len
    _train_generator = AlbertDataGenerator(train_df, augment=augment)
    train_dataset = tf.data.Dataset.from_generator(_train_generator.generate,
                                                   output_types=({
                                                       'ids':
                                                       tf.int32,
                                                       'att':
                                                       tf.int32,
                                                       'tti':
                                                       tf.int32
                                                   }, {
                                                       'sts':
                                                       tf.int32,
                                                       'ets':
                                                       tf.int32
                                                   }))
    train_dataset = train_dataset.padded_batch(Config.Train.batch_size,
                                               padded_shapes=({
                                                   'ids': [None],
                                                   'att': [None],
                                                   'tti': [None]
                                               }, {
                                                   'sts': [None],
                                                   'ets': [None]
                                               }))
    train_dataset = train_dataset.repeat().prefetch(
        tf.data.experimental.AUTOTUNE)

    _val_generator = AlbertDataGenerator(val_df, augment=False)
    val_dataset = tf.data.Dataset.from_generator(_val_generator.generate,
                                                 output_types=({
                                                     'ids': tf.int32,
                                                     'att': tf.int32,
                                                     'tti': tf.int32
                                                 }, {
                                                     'sts': tf.int32,
                                                     'ets': tf.int32
                                                 }))
    val_dataset = val_dataset.padded_batch(Config.Train.batch_size,
                                           padded_shapes=({
                                               'ids': [None],
                                               'att': [None],
                                               'tti': [None]
                                           }, {
                                               'sts': [None],
                                               'ets': [None]
                                           }))
    val_dataset = val_dataset.repeat().prefetch(tf.data.experimental.AUTOTUNE)

    model = get_albert()
    if fold_i == 0:
        model.summary()
    model_name = f'weights_v{Config.version}_f{fold_i + 1}.h5'

    train_spe = get_steps(train_df)
    val_spe = get_steps(val_df)

    cbs = [
        WarmUpCosineDecayScheduler(6e-5,
                                   1200,
                                   warmup_steps=300,
                                   hold_base_rate_steps=200,
                                   verbose=0),
        keras.callbacks.ModelCheckpoint(str(Config.Train.checkpoint_dir /
                                            Config.model_type / model_name),
                                        verbose=1,
                                        save_best_only=True,
                                        save_weights_only=True)
    ]
    model.fit(train_dataset,
              epochs=2,
              verbose=1,
              validation_data=val_dataset,
              callbacks=cbs,
              steps_per_epoch=train_spe,
              validation_steps=val_spe)

    print(f'Loading checkpoint {model_name}...')
    model.load_weights(
        str(Config.Train.checkpoint_dir / Config.model_type / model_name))

    _val_generator = AlbertDataGenerator(val_df, augment=False)
    val_dataset = tf.data.Dataset.from_generator(_val_generator.generate,
                                                 output_types=({
                                                     'ids': tf.int32,
                                                     'att': tf.int32,
                                                     'tti': tf.int32
                                                 }, {
                                                     'sts': tf.int32,
                                                     'ets': tf.int32
                                                 }))
    val_dataset = val_dataset.padded_batch(Config.Train.batch_size,
                                           padded_shapes=({
                                               'ids': [max_l],
                                               'att': [max_l],
                                               'tti': [max_l]
                                           }, {
                                               'sts': [max_l],
                                               'ets': [max_l]
                                           }))
    val_dataset = val_dataset.prefetch(tf.data.experimental.AUTOTUNE)
    s_idx, e_idx = model.predict(val_dataset, verbose=1)
    s_idx = np.argmax(s_idx, axis=-1)
    e_idx = np.argmax(e_idx, axis=-1)
    jaccard_score = get_jaccard_from_df(val_df, s_idx, e_idx, 'albert',
                                        'albert.csv')
    print(
        f'\n>>> Fold {fold_i + 1}: jaccard_score for albert: {jaccard_score}\n'
    )
    return jaccard_score