Esempio n. 1
0
def generate_atoms_inst_set(s):
    res = []

    for inst in s[0]:
        values = [int(i.nb) for i in inst]

        # depth
        if len(values) > depth() + 1:
            raise SemanticError('depth of ' + str(inst) +
                                ' greater than the depth of the problem (' +
                                str(depth()) + ')')

        # incorrect agents
        if any(i > nb_agts() for i in values):
            raise SemanticError('incorrect agents in ' + str(inst) + ' (max ' +
                                str(nb_agts()) + ')')

        atom = Atom(values[-1], values[:-1])

        # introspective
        if atom.is_instrospective():
            raise SemanticError('introspective atoms are forbidden (found ' +
                                str(atom) + ' in ' + str(s) + ')')

        res.append(atom)

    return res
def print_domain_file(base, file):
    file.write(';; Gossip problem - PDDL domain file\n')
    file.write(';; depth ' + str(depth()) + ', ' + str(nb_agts()) +
               ' agents\n\n')

    file.write('(define (domain gossip)\n')
    file.write('\t(:requirements\n')
    file.write('\t\t:strips :disjunctive-preconditions :equality\n')
    file.write('\t)\n\n')

    file.write('\t(:predicates\n')
    file.write('\t\t' +
               ' '.join(str(atom)
                        for atom in base.get_atoms_of_depth(0)) + '\n')
    file.write('\t\t' + ' '.join(
        visibility_predicate(d) for d in range(1,
                                               depth() + 1)) + '\n')
    file.write('\t)\n')

    file.write('\n\t(:action call\n')
    file.write('\t\t:parameters (?i ?j)\n')
    file.write('\t\t:effect (and\n')
    file.write(add_ag(str_cond_effects_call(base)) + '\t\t)\n')
    file.write('\t)\n')

    file.write(')\n')
def generate_atoms_inst_set(s):
    res = []

    for inst in s[0]:
        values = [int(i.nb) for i in inst]

        # depth
        if len(values) > depth() + 1:
            raise SemanticError('depth of ' + str(inst) +
                                ' greater than the depth of the problem (' +
                                str(depth()) + ')')

        # incorrect agents
        if any(i > nb_agts() for i in values):
            raise SemanticError('incorrect agents in ' + str(inst) +
                                ' (max ' + str(nb_agts()) + ')')

        atom = Atom(values[-1], values[:-1])

        # introspective
        if atom.is_instrospective():
            raise SemanticError('introspective atoms are forbidden (found ' +
                                str(atom) + ' in ' + str(s) + ')')

        res.append(atom)

    return res
Esempio n. 4
0
def generate_atoms_non_inst_set(s):
    # initialise domains of all variables (the whole list of agents for each)
    ordering_agts = [i.name for i in s[0]]

    if len(ordering_agts) > depth() + 1:
        raise SemanticError('depth of ' + str(s[0]) +
                            ' greater than the depth of the problem (' +
                            str(depth()) + ')')

    doms = {i: agts() for i in ordering_agts}

    (un_csts, bin_csts) = classify_csts(s)

    # apply unary constraints to reduce domains
    apply_un_csts(doms, un_csts)

    # gets all possibles assignments from binary constraints
    # and convert them into atoms
    res = []

    for assign in generate_assignments_bin_csts(doms, bin_csts, {}):
        atom = atom_from_assignment(assign, ordering_agts)

        if atom.is_instrospective():
            raise SemanticError('introspective atoms are forbidden (found ' +
                                str(atom) + ' in ' + str(s) + ')')

        res.append(atom)

    return res
def generate_atoms_non_inst_set(s):
    # initialise domains of all variables (the whole list of agents for each)
    ordering_agts = [i.name for i in s[0]]

    if len(ordering_agts) > depth()+1:
        raise SemanticError('depth of ' + str(s[0]) +
                            ' greater than the depth of the problem (' +
                            str(depth()) + ')')

    doms = {i : agts() for i in ordering_agts}

    (un_csts, bin_csts) = classify_csts(s)

    # apply unary constraints to reduce domains
    apply_un_csts(doms, un_csts)

    # gets all possibles assignments from binary constraints
    # and convert them into atoms
    res = []

    for assign in generate_assignments_bin_csts(doms, bin_csts, {}):
        atom = atom_from_assignment(assign, ordering_agts)

        if atom.is_instrospective():
            raise SemanticError('introspective atoms are forbidden (found ' +
                                str(atom) + ' in ' + str(s) + ')')

        res.append(atom)

    return res
Esempio n. 6
0
    def __init__(self):
        all_atoms = generate_all_atoms_up_to(depth())

        # list of dictionaries, one for each depth between 0 and DEPTH
        self.values = []
        for d in range(0, depth()+1):
            self.values.append({})

        for at in all_atoms:
            self.values[at.depth()][at] = True
Esempio n. 7
0
def str_cond_effect(atom):
    # precondition: either i or j knows this atom
    # i and j must be different from the first agent of the atom
    b_diff = ''
    e_diff = ''

    if len(atom.vis_list) > 0:
        b_diff = '(and (not (= ?i ' + str(atom.vis_list[0]) + ')) ' + \
                 '(not (= ?j ' + str(atom.vis_list[0]) + ')) '
        e_diff = ')'

    pre = b_diff + \
          '(or ' + \
          '(and ' + ' '.join(str(eat)
                             for eat in Atom.eatm(Atom.precede_by(atom, ['?i']))) + ') ' + \
          '(and ' + ' '.join(str(eat)
                             for eat in Atom.eatm(Atom.precede_by(atom, ['?j']))) + ')' + \
          ')' + e_diff + ' '

    # effect: any non-introspective sequence of i and j followed by the atom
    add = '(and ' + \
          ' '.join([str(Atom.precede_by(atom, seq))
                    for seq in generate_all_sequences_up_to('?i', '?j', depth() - atom.depth())]) + \
          ')'

    return '(when ' + pre + add + ')'
def str_cond_effect(atom):
    # precondition: either i or j knows this atom
    # i and j must be different from the first agent of the atom
    b_diff = ''
    e_diff = ''

    if len(atom.vis_list) > 0:
        b_diff = '(and (not (?i = ' + str(atom.vis_list[0]) + ')) ' + \
                 '(not (?j = ' + str(atom.vis_list[0]) + ')) '
        e_diff = ')'

    pre = b_diff + \
          '(or ' + \
          '(and ' + ' '.join(str(eat)
                             for eat in Atom.eatm(Atom.precede_by(atom, ['?i']))) + ') ' + \
          '(and ' + ' '.join(str(eat)
                             for eat in Atom.eatm(Atom.precede_by(atom, ['?j']))) + ')' + \
          ')' + e_diff + ' '

    # effect: any non-introspective sequence of i and j followed by the atom
    add = '(and ' + \
          ' '.join([str(Atom.precede_by(atom, seq))
                    for seq in generate_all_sequences_up_to('?i', '?j', depth() - atom.depth())]) + \
          ')'

    return '(when ' + pre + add + ')'
def str_goal(base):
    res = ''

    for d in range(0, depth()+1):
        res += '\t\t' + base.repr_depth(d) + '\n'

    return res
def print_problem_file(base, file):
    output = ""
    output += ';; Gossip problem - PDDL problem file\n'
    output += ';; depth ' + str(depth()) + ', ' + str(
        nb_agts()) + ' agents\n\n'

    output += '(define (problem gossip)\n'
    output += '\t(:domain gossip)\n\n'

    output += '\t(:objects ' + ' '.join(
        str(i) for i in agts()) + ' ' + ' '.join('s' + str(i)
                                                 for i in agts()) + ')\n\n'

    output += '\t(:init\n'
    output += '\t\t' + ' '.join(
        str(atom) for atom in base.get_atoms_of_depth(0)) + '\n'
    output += '\t\t' + ' '.join(
        str(atom)
        for atom in base.get_atoms_of_depth(1) if atom.is_initial()) + '\n'
    output += '\t)\n\n'

    output += '\t(:goal (and\n'
    output += str_goal(base) + '\t))\n'

    output += ')\n'

    for i in agts():
        output = output.replace(f"(s{i})", f"(ps{i})")

    agt_str = "abcdefgh"
    for i in agts():
        output = output.replace(f" {i} ", f" {agt_str[i-1]} ")
        output = output.replace(f" {i})", f" {agt_str[i-1]})")

    file.write(output)
def str_goal(base):
    res = ''

    for d in range(0, depth() + 1):
        res += '\t\t' + base.repr_depth(d) + '\n'

    return res
Esempio n. 12
0
def setup(
    args, n_class: int
) -> Tuple[Any, Any, Any, List[List[Callable]], List[List[float]], Callable]:
    print("\n>>> Setting up")
    cpu: bool = args.cpu or not torch.cuda.is_available()
    device = torch.device("cpu") if cpu else torch.device("cuda")

    if args.weights:
        if cpu:
            net = torch.load(args.weights, map_location='cpu')
        else:
            net = torch.load(args.weights)
        print(f">> Restored weights from {args.weights} successfully.")
    else:
        net_class = getattr(__import__('networks'), args.network)
        net = net_class(args.modalities, n_class).to(device)
        net.init_weights()
    net.to(device)

    optimizer: Any  # disable an error for the optmizer (ADAM and SGD not same type)
    if args.use_sgd:
        optimizer = torch.optim.SGD(net.parameters(),
                                    lr=args.l_rate,
                                    momentum=0.99,
                                    weight_decay=5e-4)
    else:
        optimizer = torch.optim.Adam(net.parameters(),
                                     lr=args.l_rate,
                                     betas=(0.9, 0.99),
                                     amsgrad=False)

    # print(args.losses)
    list_losses = eval(args.losses)
    if depth(
            list_losses
    ) == 1:  # For compatibility reasons, avoid changing all the previous configuration files
        list_losses = [list_losses]

    nd: str = "whd" if args.three_d else "wh"

    loss_fns: List[List[Callable]] = []
    for i, losses in enumerate(list_losses):
        print(f">> {i}th list of losses: {losses}")
        tmp: List[Callable] = []
        for loss_name, loss_params, _, _, fn, _ in losses:
            loss_class = getattr(__import__('losses'), loss_name)
            tmp.append(loss_class(**loss_params, fn=fn, nd=nd))
        loss_fns.append(tmp)

    loss_weights: List[List[float]] = [
        map_(itemgetter(5), losses) for losses in list_losses
    ]

    scheduler = getattr(__import__('scheduler'),
                        args.scheduler)(**eval(args.scheduler_params))

    return net, optimizer, device, loss_fns, loss_weights, scheduler
def str_cond_effects_call(base):
    res = ''

    # for every atom of every depth
    for d in range(0, depth()):
        for atom in base.get_atoms_of_depth(d):
            # generate conditional effect
            res += '\t\t\t' + str_cond_effect(atom) + '\n'

    return res
Esempio n. 14
0
def str_cond_effects_call(base):
    res = ''

    # for every atom of every depth
    for d in range(0, depth()):
        for atom in base.get_atoms_of_depth(d):
            # generate conditional effect
            res += '\t\t\t' + str_cond_effect(atom) + '\n'

    return res
Esempio n. 15
0
def print_domain_file(base, file):

    output = ""
    output += ';; Gossip problem - PDDL domain file\n'
    output += ';; depth ' + str(depth()) + ', ' + str(nb_agts()) + ' agents\n\n'

    output += '(define (domain gossip)\n'
    output += '\t(:requirements\n'
    output += '\t\t:strips :disjunctive-preconditions :equality\n'
    output += '\t)\n\n'

    output += '\t(:predicates\n'
    output += '\t\t' + ' '.join(str(atom)
                                 for atom in base.get_atoms_of_depth(0)) + '\n'
    output += '\t\t' + ' '.join(visibility_predicate(d)
                                 for d in range(1, depth()+1)) + '\n'
    output += '\t)\n'

    output += '\n\t(:action call\n'
    output += '\t\t:parameters (?i ?j)\n'
    output += '\t\t:effect (and\n'
    output += str_cond_effects_call(base) + '\t\t)\n'
    output += '\t)\n'

    # create dummy action for FS planner
    output += '\n\t(:action dummy\n'
    output += '\t\t:parameters (?i ?j)\n'
    output += '\t\t:effect (and\n'
    output += '\t\t' + ' '.join(str(atom)
                                 for atom in base.get_atoms_of_depth(0)) + '\t\t)\n'
    output += '\t)\n'
    output += ')\n'

    for i in agts():
        output = output.replace(f"(s{i})",f"(ps{i})")
    agt_str="abcdefgh"
    for i in agts():
        output = output.replace(f" {i} ",f" {agt_str[i-1]} ")
        output = output.replace(f" {i})",f" {agt_str[i-1]})")


    file.write(output)
def print_domain_file(base, file):
    file.write(';; Gossip problem - PDDL domain file\n')
    file.write(';; depth ' + str(depth()) + ', ' +
               str(nb_agts()) + ' agents\n\n')

    file.write('(define (domain gossip)\n')
    file.write('\t(:requirements\n')
    file.write('\t\t:strips :disjunctive-preconditions :equality\n')
    file.write('\t)\n\n')

    file.write('\t(:predicates\n')
    file.write('\t\t' + ' '.join(str(atom)
                                 for atom in base.get_atoms_of_depth(0)) + '\n')
    file.write('\t\t' + ' '.join(visibility_predicate(d)
                                 for d in range(1, depth()+1)) + '\n')
    file.write('\t)\n')

    file.write('\n\t(:action call\n')
    file.write('\t\t:parameters (?i ?j)\n')
    file.write('\t\t:effect (and\n')
    file.write(str_cond_effects_call(base) + '\t\t)\n')
    file.write('\t)\n')

    file.write(')\n')
def print_problem_file(base, file):
    file.write(';; Gossip problem - PDDL problem file\n')
    file.write(';; depth ' + str(depth()) + ', ' + str(nb_agts()) + ' agents\n\n')

    file.write('(define (problem gossip)\n')
    file.write('\t(:domain gossip)\n\n')

    file.write('\t(:objects ' + ' '.join(str(i) for i in agts()) + ')\n\n')

    file.write('\t(:init\n')
    file.write('\t\t' + ' '.join(str(atom) for atom in base.get_atoms_of_depth(0)) + '\n')
    file.write('\t\t' + ' '.join(str(atom) for atom in base.get_atoms_of_depth(1)
                            if atom.is_initial()) + '\n')
    file.write('\t)\n\n')

    file.write('\t(:goal (and\n')
    file.write(str_goal(base) + '\t))\n')

    file.write(')\n')
def print_problem_file(base, file):
    file.write(';; Gossip problem - PDDL problem file\n')
    file.write(';; depth ' + str(depth()) + ', ' + str(nb_agts()) + ' agents\n\n')

    file.write('(define (problem gossip)\n')
    file.write('\t(:domain gossip)\n\n')

    s_as_constant_gives_me_errors='\t\t' + ' '.join(str(atom) for atom in base.get_atoms_of_depth(0)) + '\n'
    s_as_constant_gives_me_errors=s_as_constant_gives_me_errors.replace('(','').replace(')','')
    file.write('\t(:objects ' + ' '.join("ag"+str(i) for i in agts()) +'\n'+s_as_constant_gives_me_errors+ ')\n\n')

    file.write('\t(:init\n')
    file.write('\t\t' + ' '.join(str(atom) for atom in base.get_atoms_of_depth(0)) + '\n')
    init_S='\t\t' + ' '.join(str(atom) for atom in base.get_atoms_of_depth(1) if atom.is_initial()) + '\n'
    init_S=numToAgnum(init_S)
    file.write(init_S)
    file.write('\t)\n\n')

    file.write('\t(:goal (and\n')
    goal_S=str_goal(base) + '\t))\n'
    goal_S=numToAgnum(goal_S)
    file.write(goal_S)

    file.write(')\n')
Esempio n. 19
0
def get_loaders(
        args,
        data_folder: str,
        batch_size: int,
        n_class: int,
        debug: bool,
        in_memory: bool,
        dimensions: int,
        use_spacing: bool = False
) -> Tuple[List[DataLoader], List[DataLoader]]:
    losses_list = eval(args.losses)
    if depth(losses_list) == 1:
        losses_list = [losses_list]

    list_bounds_generators: List[List[Callable]] = []
    for losses in losses_list:
        tmp = []

        for _, _, bounds_name, bounds_params, fn, _ in losses:
            if bounds_name is None:
                tmp.append(lambda *a: torch.zeros(n_class, 1, 2))
                continue

            bounds_class = getattr(__import__('bounds'), bounds_name)
            tmp.append(bounds_class(C=args.n_class, fn=fn, **bounds_params))
        list_bounds_generators.append(tmp)

    list_folders_list = eval(args.folders)
    if depth(
            list_folders_list
    ) == 1:  # For compatibility reasons, avoid changing all the previous configuration files
        list_folders_list = [list_folders_list]
    # print(folders_list)

    # Prepare the datasets and dataloaders
    print()
    train_loaders = []
    for i, (train_topfolder, folders_list, bounds_generators) in \
            enumerate(zip(args.training_folders, list_folders_list, list_bounds_generators)):

        folders, trans, are_hots = zip(*folders_list)
        print(f">> {i}th training loader: {train_topfolder} with {folders}")

        # Create partial functions: Easier for readability later (see the difference between train and validation)
        gen_dataset = partial(SliceDataset,
                              transforms=trans,
                              are_hots=are_hots,
                              debug=debug,
                              K=n_class,
                              in_memory=in_memory,
                              bounds_generators=bounds_generators,
                              box_prior=args.box_prior,
                              box_priors_arg=args.box_prior_args,
                              dimensions=dimensions)
        data_loader = partial(DataLoader,
                              num_workers=min(cpu_count(), batch_size + 5),
                              pin_memory=True,
                              collate_fn=custom_collate)

        train_folders: List[Path] = [
            Path(data_folder, train_topfolder, f) for f in folders
        ]
        # I assume all files have the same name inside their folder: makes things much easier
        train_names: List[str] = map_(lambda p: str(p.name),
                                      train_folders[0].glob("*"))
        t_spacing_p: Path = Path(data_folder, train_topfolder, "spacing.pkl")
        train_spacing_dict: Dict[str, Tuple[float, ...]] = pickle.load(
            open(t_spacing_p, 'rb')) if use_spacing else None
        train_set = gen_dataset(train_names,
                                train_folders,
                                spacing_dict=train_spacing_dict)
        if args.group_train:
            train_sampler = PatientSampler(train_set,
                                           args.grp_regex,
                                           shuffle=True)
            train_loader = data_loader(train_set, batch_sampler=train_sampler)
        else:
            train_loader = data_loader(train_set,
                                       batch_size=batch_size,
                                       shuffle=True,
                                       drop_last=False)

        train_loaders.append(train_loader)

        if i == args.val_loader_id or (args.val_loader_id == -1 and
                                       (i + 1) == len(args.training_folders)):
            print(
                f">> Validation dataloader (id {args.val_loader_id}), {train_topfolder} {folders}"
            )
            val_folders: List[Path] = [
                Path(data_folder, args.validation_folder, f) for f in folders
            ]
            val_names: List[str] = map_(lambda p: str(p.name),
                                        val_folders[0].glob("*"))
            v_spacing_p: Path = Path(data_folder, args.validation_folder,
                                     "spacing.pkl")
            val_spacing_dict: Dict[str, Tuple[float, ...]] = pickle.load(
                open(v_spacing_p, 'rb')) if use_spacing else None
            val_set = gen_dataset(val_names,
                                  val_folders,
                                  spacing_dict=val_spacing_dict)
            val_sampler = PatientSampler(
                val_set, args.grp_regex, shuffle=False) if args.group else None
            val_batch_size = 1 if val_sampler else batch_size
            val_loader = data_loader(val_set,
                                     batch_sampler=val_sampler,
                                     batch_size=val_batch_size)

    return train_loaders, [val_loader]
Esempio n. 20
0
def get_loaders(args, data_folder: str, batch_size: int, n_class: int,
                debug: bool,
                in_memory: bool) -> Tuple[List[DataLoader], List[DataLoader]]:
    png_transform = transforms.Compose([
        lambda img: img.convert('L'),
        lambda img: np.array(img)[np.newaxis, ...],
        lambda nd: nd / 255,  # max <= 1
        lambda nd: torch.tensor(nd, dtype=torch.float32)
    ])
    color_transform = transforms.Compose([
        lambda img: img.convert('RGB'),
        lambda img: np.asarray(img),
        lambda arr: np.rollaxis(arr, 2, 0),
        lambda nd: nd / 255,  # max <= 1
        lambda nd: torch.tensor(nd, dtype=torch.float32)
    ])
    npy_transform = transforms.Compose([
        lambda npy: np.array(npy)[np.newaxis, ...],
        lambda nd: torch.tensor(nd, dtype=torch.float32)
    ])
    gt_transform = transforms.Compose([
        lambda img: img.convert('L'),
        lambda img: np.array(img)[np.newaxis, ...],
        lambda nd: torch.tensor(nd, dtype=torch.int64),
        partial(class2one_hot, C=n_class),
        itemgetter(0)
    ])
    dummy_gt = transforms.Compose([
        lambda img: np.array(img), lambda nd: torch.zeros(
            (n_class, *(nd.shape)), dtype=torch.int64)
    ])

    losses_list = eval(args.losses)
    if depth(losses_list) == 1:
        losses_list = [losses_list]

    list_bounds_generators: List[List[Callable]] = []
    for losses in losses_list:
        tmp = []

        for _, _, bounds_name, bounds_params, fn, _ in losses:
            if bounds_name is None:
                tmp.append(lambda *a: torch.zeros(n_class, 1, 2))
                continue

            bounds_class = getattr(__import__('bounds'), bounds_name)
            tmp.append(bounds_class(C=args.n_class, fn=fn, **bounds_params))
        list_bounds_generators.append(tmp)

    list_folders_list = eval(args.folders)
    if depth(
            list_folders_list
    ) == 1:  # For compatibility reasons, avoid changing all the previous configuration files
        list_folders_list = [list_folders_list]
    # print(folders_list)

    # Prepare the datasets and dataloaders
    print()
    train_loaders = []
    # val_loader = None
    for i, (train_topfolder, folders_list, bounds_generators) in \
            enumerate(zip(args.training_folders, list_folders_list, list_bounds_generators)):

        folders, trans, are_hots = zip(*folders_list)
        print(f">> {i}th training loader: {train_topfolder} with {folders}")

        # Create partial functions: Easier for readability later (see the difference between train and validation)
        gen_dataset = partial(SliceDataset,
                              transforms=trans,
                              are_hots=are_hots,
                              debug=debug,
                              C=n_class,
                              in_memory=in_memory,
                              bounds_generators=bounds_generators)
        data_loader = partial(DataLoader,
                              num_workers=batch_size + 5,
                              pin_memory=True)

        train_folders: List[Path] = [
            Path(data_folder, train_topfolder, f) for f in folders
        ]
        # I assume all files have the same name inside their folder: makes things much easier
        train_names: List[str] = map_(lambda p: str(p.name),
                                      train_folders[0].glob("*"))
        train_set = gen_dataset(train_names, train_folders)
        if args.group_train:
            train_sampler = PatientSampler(train_set,
                                           args.grp_regex,
                                           shuffle=True)
            train_loader = data_loader(train_set, batch_sampler=train_sampler)
        else:
            train_loader = data_loader(train_set,
                                       batch_size=batch_size,
                                       shuffle=True,
                                       drop_last=True)

        train_loaders.append(train_loader)

        if i == args.val_loader_id or (args.val_loader_id == -1 and
                                       (i + 1) == len(args.training_folders)):
            print(
                f">> Validation dataloader (id {args.val_loader_id}), {train_topfolder} {folders}"
            )
            val_folders: List[Path] = [
                Path(data_folder, args.validation_folder, f) for f in folders
            ]
            val_names: List[str] = map_(lambda p: str(p.name),
                                        val_folders[0].glob("*"))
            val_set = gen_dataset(val_names, val_folders)
            val_sampler = PatientSampler(
                val_set, args.grp_regex, shuffle=False) if args.group else None
            val_batch_size = 1 if val_sampler else batch_size
            val_loader = data_loader(val_set,
                                     batch_sampler=val_sampler,
                                     batch_size=val_batch_size)

    return train_loaders, [val_loader]
na = 6

try:
    if len(sys.argv) == 2:
        raise ParameterError('wrong number of parameters.')

    if len(sys.argv) > 2:
        d = int(sys.argv[1])
        na = int(sys.argv[2])

    if d <= 0 or na <= 1:
        raise ParameterError('wrong value for <depth> or <number of agents>')

    set_parameters(d, na)

    print('Generating atoms for depth ' + str(depth()) + ' and ' +
          str(nb_agts()) + ' agents...')
    base = Goal()

    # negative goals
    if len(sys.argv) > 3:
        ast = parseSet(sys.argv[3])
        print('Generating negative goals ' + str(ast) + '...')
        update_negative_goals(base, ast)

except ParameterError as e:
    print('Error: ' + str(e))
    print('Usage: python gp_generator.py ' +
          '<depth> <number of agents> ["<description of negative goals>"] ' +
          'with <depth> >= 1 and <number of agents> >= 2')
    sys.exit(1)
na = 6

try:
    if len(sys.argv) == 2:
        raise ParameterError('wrong number of parameters.')

    if len(sys.argv) > 2:
        d = int(sys.argv[1])
        na = int(sys.argv[2])

    if d <= 0 or na <= 1:
        raise ParameterError('wrong value for <depth> or <number of agents>')

    set_parameters(d, na)

    print('Generating atoms for depth ' + str(depth()) + ' and ' +
          str(nb_agts()) + ' agents...')
    base = Goal()

    # negative goals
    if len(sys.argv) > 3:
        ast = parseSet(sys.argv[3])
        print('Generating negative goals ' + str(ast) + '...')
        update_negative_goals(base, ast)

except ParameterError as e:
    print('Error: ' + str(e))
    print('Usage: python gp_generator.py ' +
          '<depth> <number of agents> ["<description of negative goals>"] ' +
          'with <depth> >= 1 and <number of agents> >= 2')
    sys.exit(1)