Exemple #1
0
    def from_yaml(self, yaml_file, training=True):
        "Initiate from YAML file"

        print(yaml_file)
        file_exists(yaml_file)

        with open(yaml_file) as f:
            y = yaml.load(f, Loader=yaml.FullLoader)

        if training:
            mom_file = y["training"]["mom_file"]
            nj_file = y["training"]["nj_file"]
        else:
            mom_file = y["testing"]["mom_file"]
            nj_file = y["testing"]["nj_file"]
        delta_cut = y["delta_cut"]
        delta_near = y["delta_near"]
        model_base_dir = y["model_base_dir"]
        model_dir = y["model_dir"]
        training_reruns = y["training"]["training_reruns"]
        all_legs = bool_convert(y["all_legs"])
        all_pairs = bool_convert(y["all_pairs"])
        scaling = y.get("scaling", "standardise")
        layers = y["training"].get("layers", [20, 40, 20])
        lr = y["training"].get("lr", 0.01)
        activation = y["training"].get("activation", "tanh")
        loss = y["training"].get("loss", "mean_squared_error")
        epochs = y["training"].get("epochs", 1000000)
        high_precision = bool_convert(y["training"].get(
            "high_precision", "False"))
        model_dataset = bool_convert(y["training"].get("model_dataset",
                                                       "False"))

        return FKSModelRun(
            mom_file=mom_file,
            nj_file=nj_file,
            delta_cut=delta_cut,
            delta_near=delta_near,
            model_base_dir=model_base_dir,
            model_dir=model_dir,
            training_reruns=training_reruns,
            all_legs=all_legs,
            all_pairs=all_pairs,
            scaling=scaling,
            layers=layers,
            lr=lr,
            activation=activation,
            loss=loss,
            epochs=epochs,
            high_precision=high_precision,
            model_dataset=model_dataset,
        )
Exemple #2
0
    def load_data(self):

        file_exists(self.mom_file)
        file_exists(self.nj_file)

        momenta = np.load(self.mom_file, allow_pickle=True)
        print('############### Momenta loaded ###############')

        nj = np.load(self.nj_file, allow_pickle=True)
        print('############### NJet loaded ###############')

        momenta = momenta.tolist()
        print('Training on {} PS points'.format(len(momenta)))

        self.nlegs = len(momenta[0]) - 2

        return momenta, nj
    parser.add_argument('-v',
                        '--verbose',
                        help="Verbose",
                        type=bool,
                        required=False)
    args = parser.parse_args()

    return args


if __name__ == "__main__":

    args = parse()

    if args.yaml_file != "False":
        file_exists(args.yaml_file)

        with open(args.yaml_file) as f:
            yaml = yaml.load(f, Loader=yaml.FullLoader)

        args.model_base_dir = yaml["model_base_dir"]
        args.model_dir = yaml["model_dir"]
        args.training_reruns = yaml["training"]["training_reruns"]
        args.out_dir = yaml["model_dir"]

    for i in range(args.training_reruns):
        print('Working on training run {}'.format(i))
        mod_dir = args.model_base_dir + args.model_dir + '_{}/'.format(i)
        output = args.out_base_dir + '/' + args.out_dir
        if not os.path.exists(output):
            os.mkdir(output)