Exemple #1
0
def slave(worker_id):

    import logging
    logging.getLogger("matplotlib").disabled = True

    # Setup ZMQ.
    context = zmq.Context()
    sock = context.socket(zmq.REQ)
    sock.connect(settings.ZMQ["master_address"])

    while True:

        LOGGER.debug("%s: Available" % worker_id)
        sock.send_pyobj({"msg": "available"})

        # Retrieve work and run the computation.
        job = sock.recv_pyobj()
        if job.get("msg") == "quit":
            LOGGER.debug("%s: Received a quit msg, exiting" % worker_id)
            break

        LOGGER.debug("%s: Running config %s" % (worker_id, job["data"][1]))
        result = run_config(job["data"])

        LOGGER.debug("%s: Sending result back" % worker_id)
        sock.send_pyobj({"msg": "result", "result": result})
        LOGGER.debug("%s: Done sending result" % worker_id)

        msg = sock.recv()
        if msg == b"quit":
            LOGGER.debug("%s Received msg %s" % (worker_id, msg))
            break
Exemple #2
0
def work(x):
    start = time.time()
    print("Running config", file=sys.stderr)
    result = run_config(x)
    m, s = divmod(time.time() - start, 60)
    h, m = divmod(m, 60)
    print("Result: %s, took %d:%02d:%02d" % (result, h, m, s), file=sys.stderr)
    return result
Exemple #3
0
    def test_2d_feature_importances_(self):
        pc = get_preproc_config(lags=3, horizon=1)
        d = prepare_data(pc)
        mockConfigSVR = get_mock_svr(d, pc)

        r = run_config([d, mockConfigSVR, 'val'])
        self.assertEqual(len(r.feature_scores), 3)
        self.assertEqual(r.feature_scores[0], ('lag1', 2))
Exemple #4
0
    def test_3d_test_mode(self):
        pc = get_preproc_config(lags=3, horizon=1)
        d = prepare_data(pc, dim="3d")
        mockConfigLSTM = get_mock_lstm(d, pc)

        r = run_config([d, mockConfigLSTM, 'test'])

        self.assertTrue(r.train_mse > 0.0)
        self.assertTrue(r.test_mse > 0.0)
        self.assertTrue(r.train_mae > 0.0)
        self.assertTrue(r.train_mae > 0.0)
        # yhat are scaled
        self.assertTrue(r.yhat_is[0] <= 1.0)
        self.assertTrue(r.yhat_oos[0] <= 1.0)
Exemple #5
0
    def test_2d_val_mode(self):
        pc = get_preproc_config(lags=3, horizon=1)
        d = prepare_data(pc)
        mockConfigSVR = get_mock_svr(d, pc)

        r = run_config([d, mockConfigSVR, 'val'])

        self.assertTrue(r.train_mse > 0.0)
        self.assertTrue(r.test_mse > 0.0)
        self.assertTrue(r.train_mae > 0.0)
        self.assertTrue(r.train_mae > 0.0)
        # yhat are scaled
        self.assertTrue(r.yhat_is[0] <= 1.0)
        self.assertTrue(r.yhat_oos[0] <= 1.0)
Exemple #6
0
    def test_3d_val_mode(self):
        pc = get_preproc_config(lags=3, horizon=1)
        d = prepare_data(pc, dim="3d")
        mockConfigLSTM = get_mock_lstm(d, pc)

        print(mockConfigLSTM.__class__.__dict__.keys())

        r = run_config([d, mockConfigLSTM, 'val'])

        self.assertTrue(r.train_mse > 0.0)
        self.assertTrue(r.test_mse > 0.0)
        self.assertTrue(r.train_mae > 0.0)
        self.assertTrue(r.train_mae > 0.0)
        # yhat are scaled
        self.assertTrue(r.yhat_is[0] <= 1.0)
        self.assertTrue(r.yhat_oos[0] <= 1.0)
Exemple #7
0
def get_forecasts(learner, dataset_name, prepr_name, c, pc):

    # create config instances
    LearnerConfig = getattr(importlib.import_module("learner_configs"),
                               "Config%s" % learner)

    # load data
    dim = "3d" if LearnerConfig is ConfigLSTM else "2d"
    data = prepare_data(pc, dim=dim)
    LOGGER.debug("Prepared data")
    learner_config = LearnerConfig(c, pc)
    result_test = run_config([data, learner_config, 'test'])
    LOGGER.debug(f"Ran config: {c}")
    actual = data.testYref
    pred = data.revert(result_test.yhat_oos, "test")
    rmse = np.sqrt(mean_squared_error(actual, pred))
    print(f"{learner}, {dataset_name}, {prepr_name}, RMSE: {rmse:.3f}")

    return actual.flatten(), pred.flatten()
Exemple #8
0
def get_val_results(d, learner_config_space, pc):
    """Search for best parameters on the validation set
    :param pc: preprocessing config
    """

    mse_scores = Counter()
    results = {}
    if pc['n_jobs'] == 1:
        for c in learner_config_space.generate_config():
            x = run_config([d, c, 'val'])
            mse_scores[x.config_vals] = x.test_mse
            results[x.config_vals] = x
    else:
        inputs = iter([d, c, 'val']
                        for c in learner_config_space.generate_config())
        pool = multiprocessing.Pool(pc['n_jobs'])
        outputs = pool.imap(run_config, inputs)
        for x in outputs:
            mse_scores[x.config_vals] = x.test_mse
            results[x.config_vals] = x

    return mse_scores, results
Exemple #9
0
def commands2name(commands):
    return 'ht-'+('test-' if 'test.py' in ''.join(commands) else '')+\
        ('.'.join([command2name(cmd) for cmd in commands]).lower())


def commands2command(commands):
    return 'cd /cephfs/haotang/attention-learn-to-route && pip install tensorboard_logger && bash parallel.sh ' + (
        ' '.join(["'%s'" % cmd for cmd in commands]))


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('commands', default=[], type=str, nargs='+')
    parser.add_argument('--name', default=None, type=str)
    parser.add_argument('--cpu_request', default=3, type=int)
    parser.add_argument('--cpu_limit', default=4, type=int)
    parser.add_argument('--memory_request', default=8, type=int)
    parser.add_argument('--memory_limit', default=10, type=int)
    parser.add_argument('--gpu_request', default=1, type=int)
    parser.add_argument('--gpu_limit', default=1, type=int)
    parser.add_argument('--public_flag', action='store_true', default=False)
    args = parser.parse_args()

    name = args.name or commands2name(args.commands)
    public_flag = args.public_flag
    command = commands2command(args.commands)
    resources = args.__dict__

    run_config(name, public_flag, command, resources)
Exemple #10
0
# select the run, arg1 the number of the entry, if missing use last
entry_num = int(sys.argv[1]) if len(sys.argv) > 1 else None

results = json.load(open("results.json"))
entry = get_entry(entry_num, results)

LearnerConfig = getattr(importlib.import_module("learner_configs"),
                           "Config%s" % entry["learner"])

best_config = LearnerConfig(entry['best_learner_config'], entry['preproc_config'])

print(f"{entry['learner']}: best config: {entry['best_learner_config']}")

dim = dim = "3d" if entry['learner'] == "LSTM" else "2d"
data = prepare_data(entry['preproc_config'], dim=dim)
res = run_config([data, best_config, 'test'])

print('\t'.join(str(x) for x in [
    {'date': entry['date'],
     'best_learner_config': entry['best_learner_config'],
     'preproc_config': entry['preproc_config']},
      entry['mse']['train']['mean'],
      entry['mae']['train']['mean'],
      entry['mse']['val']['mean'],
      entry['mae']['val']['mean'],
      res.test_mse,
      res.test_mae
]))

# print feature scores
for k in entry['feature_scores'][:5]: