Esempio n. 1
0
def write_stream_statistics(externals, verbose):
    # TODO: estimate conditional to affecting history on skeleton
    # TODO: estimate conditional to first & attempt and success
    # TODO: relate to success for the full future plan
    # TODO: Maximum Likelihood Exponential - average (biased in general)
    if not externals:
        return
    if verbose:
        #dump_online_statistics(externals)
        dump_total_statistics(externals)
    pddl_name = externals[0].pddl_name  # TODO: ensure the same
    previous_data = load_data(pddl_name)
    data = {}
    for external in externals:
        if not hasattr(external, 'instances'):
            continue  # TODO: SynthesizerStreams
        #total_calls = 0 # TODO: compute these values
        previous_statistics = previous_data.get(external.name, {})
        data[external.name] = merge_data(external, previous_statistics)

    if not SAVE_STATISTICS:
        return
    filename = get_data_path(pddl_name)
    ensure_dir(filename)
    write_pickle(filename, data)
    if verbose:
        print('Wrote:', filename)
Esempio n. 2
0
def write_stream_statistics(externals, verbose):
    # TODO: estimate conditional to affecting history on skeleton
    # TODO: estimate conditional to first & attempt and success
    # TODO: relate to success for the full future plan
    # TODO: Maximum Likelihood Exponential - average (biased in general)
    if not externals:
        return
    if verbose:
        #dump_local_statistics(externals)
        dump_total_statistics(externals)
    pddl_name = externals[0].pddl_name  # TODO: ensure the same
    previous_data = load_data(pddl_name)
    data = {}
    for external in externals:
        #total_calls = 0 # TODO: compute these values
        previous_statistics = previous_data.get(external.name, {})
        # TODO: compute distribution of successes given feasible
        # TODO: can estimate probability of success given feasible
        # TODO: single tail hypothesis testing (probability that came from this distribution)

        if not hasattr(external, 'instances'):
            continue  # TODO: SynthesizerStreams
        distribution = []
        for instance in external.instances.values():
            if instance.results_history:
                #attempts = len(instance.results_history)
                #successes = sum(map(bool, instance.results_history))
                #print(instance, successes, attempts)
                # TODO: also first attempt, first success
                last_success = -1
                for i, results in enumerate(instance.results_history):
                    if results:
                        distribution.append(i - last_success)
                        #successful = (0 <= last_success)
                        last_success = i
        combined_distribution = previous_statistics.get('distribution',
                                                        []) + distribution
        #print(external, distribution)
        #print(external, Counter(combined_distribution))
        # TODO: count num failures as well
        # Alternatively, keep metrics on the lower bound and use somehow
        # Could assume that it is some other distribution beyond that point
        data[external.name] = {
            'calls': external.total_calls,
            'overhead': external.total_overhead,
            'successes': external.total_successes,
            'distribution': combined_distribution,
        }

    filename = get_data_path(pddl_name)
    ensure_dir(filename)
    write_pickle(filename, data)
    if verbose:
        print('Wrote:', filename)
Esempio n. 3
0
 def save(self, data_dir):
     from learn_tools.analyze_experiment import get_label
     if data_dir is None:
         return False
     # domain = learner.func
     # data_dir = os.path.join(MODEL_DIRECTORY, domain.name)
     # name = learner.name
     name = get_label(self.algorithm)
     mkdir(data_dir)
     learner_path = os.path.join(
         data_dir, '{}.pk{}'.format(name, get_python_version()))
     print('Saved learner:', learner_path)
     write_pickle(learner_path, self)
     return True
Esempio n. 4
0
def update_learner(learner, learner_path, result):
    # Discard if a planning failure?
    if learner.validity_learner is not None:
        X_online, Y_online, _ = learner.validity_learner.func.example_from_result(
            result, validity=True)
        learner.validity_learner.retrain(newx=np.array([X_online]),
                                         newy=np.array([Y_online]))
        learner.validity_learner.results.append(result)
    #if result['score'] is not None:
    X_online, Y_online, W_online = learner.func.example_from_result(
        result, validity=False, binary=False)
    print('Score: {:.3f} | Weight: {:.3f}'.format(Y_online, W_online))
    learner.retrain(newx=np.array([X_online]),
                    newy=np.array([Y_online]),
                    new_w=np.array([W_online]))
    learner.results.append(result)
    # policy.save(TBD)
    write_pickle(learner_path, learner)
    print('Saved', learner_path)