Esempio n. 1
0
def call_run(run_fn):
    import numpy as np

    params = NS.from_dict(json.loads(sys.stdin.read()))

    def load_data(name, path, **ignored):
        if isinstance(path, str) and data_keys.match(name):
            return name, np.load(path, allow_pickle=True)
        return name, path

    print(params.dataset)
    ds = NS.walk(params.dataset, load_data)

    config = params.config
    config.framework_params = NS.dict(config.framework_params)

    try:
        result = run_fn(ds, config)
        res = dict(result)
        for name in ['predictions', 'truth', 'probabilities']:
            arr = result[name]
            if arr is not None:
                res[name] = os.path.join(config.result_dir, '.'.join([name, 'npy']))
                np.save(res[name], arr, allow_pickle=True)
    except Exception as e:
        log.exception(e)
        res = dict(
            error_message=str(e),
            models_count=0
        )

    print(config.result_token)
    print(json.dumps(res, separators=(',', ':')))
Esempio n. 2
0
def _add_default_params(framework):
    if "params" not in framework:
        framework.params = dict()
    else:
        framework.params = Namespace.dict(framework.params)
Esempio n. 3
0
def call_run(run_fn):
    import numpy as np

    params = NS.from_dict({
        "dataset": {
            "train": {
                "X_enc": "/tmp/dress/train.X_enc.npy",
                "y_enc": "/tmp/dress/train.y_enc.npy"
            },
            "test": {
                "X_enc": "/tmp/dress/test.X_enc.npy",
                "y_enc": "/tmp/dress/test.y_enc.npy"
            }
        },
        "config": {
            "framewor k": "FEDOT",
            "framework_params": {},
            "type": "classification",
            "name": "Australian",
            "fold": 0,
            "metrics": ["auc", "logloss", "acc"],
            "metric": "auc",
            "seed": 3029240368,
            "max_runtime_seconds": 600,
            "cores": 4,
            "max_mem_size_mb": 91763,
            "min_vol_size_mb": -1,
            "input_dir": "/home/rosneft_user_2500/.openml/cache",
            "output_dir":
            "/home/rosneft_user_2500/bench/automlbenchmark/results/fedot.small.test.local.20201225T163641",
            "output_predictions_file":
            "/home/rosneft_user_2500/bench/automlbenchmark/results/fedot.small.test.local.20201225T163641/predictions/fedot.Australian.0.csv",
            "result_token": "5e433616-46cf-11eb-a671-7957e32fc18d",
            "result_dir": "/tmp/iris"
        }
    })

    def load_data(name, path, **ignored):
        if isinstance(path, str) and data_keys.match(name):
            return name, np.load(path, allow_pickle=True)
        return name, path

    print(params.dataset)
    ds = NS.walk(params.dataset, load_data)

    config = params.config
    config.framework_params = NS.dict(config.framework_params)

    try:
        result = run_fn(ds, config)
        res = dict(result)
        for name in ['predictions', 'truth', 'probabilities']:
            arr = result[name]
            if arr is not None:
                res[name] = os.path.join(config.result_dir,
                                         '.'.join([name, 'npy']))
                np.save(res[name], arr, allow_pickle=True)
    except Exception as e:
        log.exception(e)
        res = dict(error_message=str(e), models_count=0)

    print(config.result_token)
    print(json.dumps(res, separators=(',', ':')))
Esempio n. 4
0
              target_is_encoded=False)


if __name__ == '__main__':
    params = json_loads(sys.stdin.read(), as_namespace=True)

    def load_data(path):
        return read_csv(path, as_data_frame=False, header=False)

    ds = ns(train=ns(X_enc=load_data(params.dataset.train.X_enc),
                     y=load_data(params.dataset.train.y).squeeze()),
            test=ns(
                X_enc=load_data(params.dataset.test.X_enc),
                y=load_data(params.dataset.test.y).squeeze(),
            ))
    config = params.config
    config.framework_params = ns.dict(config.framework_params)
    result = run(ds, config)

    res = copy.copy(result)
    res.predictions = os.path.join(config.result_dir, 'predictions')
    res.truth = os.path.join(config.result_dir, 'truth')
    write_csv(result.predictions.reshape(-1, 1), res.predictions)
    write_csv(result.truth.reshape(-1, 1), res.truth)
    if result.probabilities is not None:
        res.probabilities = os.path.join(config.result_dir, 'probabilities')
        write_csv(result.probabilities, res.probabilities)

    print(config.result_token)
    print(json_dumps(res, style='compact'))