示例#1
0
def report_time_spent(ga_engine):
    global _time_of_prev_gen
    now = time.time()
    gen = ga_engine.getCurrentGeneration()
    print "Time spent on generation %s, total %s." % \
      (SimpleTimer.period_to_string(_time_of_prev_gen[1], now),
       SimpleTimer.period_to_string(_time_of_prev_gen[0], now))
    _time_of_prev_gen[1] = now
示例#2
0
def report_time_spent(ga_engine):
    global _time_of_prev_gen
    now = time.time()
    gen = ga_engine.getCurrentGeneration()
    print "Time spent on generation %s, total %s." % \
      (SimpleTimer.period_to_string(_time_of_prev_gen[1], now),
       SimpleTimer.period_to_string(_time_of_prev_gen[0], now))
    _time_of_prev_gen[1] = now
示例#3
0
def load_all_except_values(pathfile=_PATH_TO_GS2_TXT):
    timer = SimpleTimer()
    all_files = []
    for (path, contents) in parse_all_generator(pathfile):
        print "Parsed %s." % path
        [section[1].pop("Value", None) for section in contents]
        all_files.append((path, contents))
    return all_files
示例#4
0
def store_in_hdf5(user_series, h5_file_path, complib='bzip2', complevel=9):
    timer = SimpleTimer()
    storer = PandasH5Storer(h5_file_path, complevel=complevel, complib=complib)
    storer.store_list("user_ids",
                      [int(userid) for userid in user_series.keys()])
    for (userid, timeseries) in user_series.iteritems():
        storer.store_pd_user(int(userid), timeseries)
    print "All series stored in database."
示例#5
0
def all_unique_except_values(parsed_files):
    """Given a list of parsed files as (name, sections) tuples, return a dict
    of dicts. The outer dict uses section headings as keys and the inner dict
    as values. The inner dict is essentially a union of the sections in the GS2
    files: the keys are GS2 keys and the values are sets of unique values from
    the GS2 files. The "Value" element is dropped.

    The output from this function can be used to e.g. identify overall start
    and end times, count the number of users, etc."""
    timer = SimpleTimer()
    all_sections = collections.defaultdict(dict)
    for (path, sections) in parsed_files:
        _all_unique_from_single_file(sections, all_sections)
    return all_sections
def run_one(arg):
    """Run one evolution. Arg is a tuple containing user ID and run number."""
    user_id, run_number = arg
    # Note that the PID printed below is the PID in which this function is
    # running, which is different from the PID of the evolution.
    print "Launching evolution for user %d run %d (pid %d) at %s..." % \
      (user_id, run_number, os.getpid(), time.asctime())
    sys.stdout.flush()
      
    timer = SimpleTimer(output_stream=None)
    out_dir = os.path.join(SG_SIM_PATH, "id_%d" % user_id)
    model = os.path.join(SG_MODELS_PATH, "load_prediction.py")
    postfix = "run_%d" % run_number
    generations = 50
    pop_size = 400
    mutation = 0.05
    crossover = 0.5
    # NB Total-load sims:
    total = " --total-load"
    data_seed = 12
    
    stdout_path = os.path.join(out_dir, 
        "output_run_%d.txt" % run_number)
    os.system("test -d %s || mkdir -p %s" % (out_dir, out_dir))
    os.system("python %s " % model + \
              " --userid=%d" % user_id + \
              " --out-dir=%s --out-postfix=%s " % (out_dir, postfix) + \
              " --generations=%d --pop-size=%d " % (generations, pop_size) + \
              " --mutation=%f --crossover=%f " % (mutation, crossover) + \
              " --no-show-plot --save-plot " + \
              total + \
              " --data-seed=%d " % data_seed + \
              " >%s" % stdout_path)

    print "Evolution completed for user %d run %d. %s" \
      % (user_id, run_number, timer.end())
    sys.stdout.flush()
def run_one(arg):
    """Run one evolution. Arg is a tuple containing user ID and run number."""
    user_id, run_number = arg
    # Note that the PID printed below is the PID in which this function is
    # running, which is different from the PID of the evolution.
    print "Launching evolution for user %d run %d (pid %d) at %s..." % \
      (user_id, run_number, os.getpid(), time.asctime())
    sys.stdout.flush()

    timer = SimpleTimer(output_stream=None)
    out_dir = os.path.join(SG_SIM_PATH, "id_%d" % user_id)
    model = os.path.join(SG_MODELS_PATH, "load_prediction.py")
    postfix = "run_%d" % run_number
    generations = 50
    pop_size = 400
    mutation = 0.05
    crossover = 0.5
    # NB Total-load sims:
    total = " --total-load"
    data_seed = 12

    stdout_path = os.path.join(out_dir, "output_run_%d.txt" % run_number)
    os.system("test -d %s || mkdir -p %s" % (out_dir, out_dir))
    os.system("python %s " % model + \
              " --userid=%d" % user_id + \
              " --out-dir=%s --out-postfix=%s " % (out_dir, postfix) + \
              " --generations=%d --pop-size=%d " % (generations, pop_size) + \
              " --mutation=%f --crossover=%f " % (mutation, crossover) + \
              " --no-show-plot --save-plot " + \
              total + \
              " --data-seed=%d " % data_seed + \
              " >%s" % stdout_path)

    print "Evolution completed for user %d run %d. %s" \
      % (user_id, run_number, timer.end())
    sys.stdout.flush()
def run(model_creator_class):
    """Main entry point for specific models. model_creator is an instance of a
    class used to set up the model and the data."""
    get_options()
    if not is_mpi_slave(options):
        timer = SimpleTimer()
    prev_handler = np.seterrcall(float_err_handler)
    prev_err = np.seterr(all='call')
    np.seterr(under='ignore')
    random.seed(options.seed)
    np.random.seed(options.seed)
    model_creator = model_creator_class(options)
    model = model_creator.get_model()
    if not is_mpi_slave(options):
        _print_sim_context(model.dataset)
    _run_models([model], model.dataset)
    ul.tempfeeder_exp().close()
示例#9
0
def list_all_status_codes(parsed_files):
    """Given a list of parsed files as (name, sections) tuples, return two
    dicts containing all the different texts found after and in between the
    double slashes in the 'Value' field of the 'Time-series' section of the
    input files."""
    timer = SimpleTimer()
    betweens = collections.defaultdict(int)
    afters = collections.defaultdict(int)
    for (path, sections) in parsed_files:
        for section_idx in range(len(sections)):
            (name, section) = sections[section_idx]
            if name != "Time-series":
                continue
            unit = section.get("Unit")
            if unit is None or (unit != ["kWh"]
                                and unit != ["Grader Celsius"]):
                print "Skipping time-series with unit", unit, "in file", path
                continue
            values = section.get("Value")
            if values is not None:
                for value in values:
                    if '/' in value:
                        try:
                            (val, between, after) = value.split('/')
                        except ValueError as ve:
                            raise ValueError(
                                "Error parsing values in section number %d of " \
                                "file %s. Did not find the exptected two "
                                "slashes? Error message was: '%s'" % \
                                (section_idx, path, ve))
                        betweens[between] += 1
                        afters[after] += 1
                    else:
                        float(value
                              )  # Should be a single measuremt if no slashes
                        betweens['No value'] += 1
                        afters['No value'] += 1
    return (betweens, afters)
示例#10
0
def parse_all_print_info(pathfile=_PATH_TO_GS2_TXT):
    timer = SimpleTimer()
    for (path, contents) in parse_all_generator(pathfile):
        print "Parsed ", path, "found %d sections." % len(contents)
    params.run_one(arg)
    
def make_runs(user_ids, num_runs):
    """Create a list of (user_id, run_number) pairs that can be sent via
    pool.map to the run_one function."""
    return [(user, run) for user in user_ids for run in range(num_runs)]

def run_simulations(runs):
    """Run all the simulations provided in runs by sending them on to the
    run_one function."""
    num_parallel_processes = 12
    pool = Pool(processes=num_parallel_processes)
    pool.map(run_one_wrapper, runs, chunksize=1)

if __name__ == "__main__":
    # if socket.gethostname() == "tanzenmusik.idi.ntnu.no":
    #     user_ids = tempfeeder_exp().user_ids[25:50]
    # else:
    #     user_ids = tempfeeder_exp().user_ids[0:25]

    user_ids = [tempfeeder_exp().user_ids[0]]
    num_runs = 12

    print "Master pid is %d " % os.getpid()
    timer = SimpleTimer(output_stream=None)
    tempfeeder_exp().close()
    runs = make_runs(user_ids, num_runs)
    run_simulations(runs)
    print "All simulations complete. %s" % timer.end()
    tempfeeder_exp().close()
# Try smoothing/cleansing different time series lengths
for hindsight_days in [1]:
    # Select data
    num_hours = 24 * hindsight_days
    data = dataset["Load"][-num_hours:].copy()
    # Some output and rough timing
    print "Cleansing %d hours of data with smoothness %.2f, z-score %.2f..." % (num_hours, smoothness, zscore)
    sys.stdout.flush()
    start_time = time.time()
    # This is the part that takes time
    smoother = _get_smoother()(data, smoothness)
    cleaner = cln.RegressionCleaner(smoother, zscore)
    cleaned, _ = cleaner.get_cleaned_data(method=cln.RegressionCleaner.replace_with_bound)
    # Wrap up and plot the result
    end_time = time.time()
    print "Done in %s." % SimpleTimer.period_to_string(start_time, end_time)

    print cleaned
    sys.stdout.flush()
    plt.figure()
    data.plot(style="r", label="Raw load")

    spline = pd.TimeSeries(data=smoother.splev(range(len(cleaned))), index=cleaned.index)
    spline.plot(style="g", label="Smoothing spline")

    # THE SAUSAGE!
    lower, upper = cleaner.get_confidence_interval()
    ax = plt.gca()
    ax.fill_between(cleaned.index, lower, upper, facecolor="g", alpha=0.1)

    cleaned.plot(style="b", label="Cleaned load")
示例#13
0
    # Select data
    num_hours = 24 * hindsight_days
    data = dataset["Load"][-num_hours:].copy()
    # Some output and rough timing
    print "Cleansing %d hours of data with smoothness %.2f, z-score %.2f..." % \
      (num_hours, smoothness, zscore)
    sys.stdout.flush()
    start_time = time.time()
    # This is the part that takes time
    smoother = _get_smoother()(data, smoothness)
    cleaner = cln.RegressionCleaner(smoother, zscore)
    cleaned, _ = cleaner.get_cleaned_data(
        method=cln.RegressionCleaner.replace_with_bound)
    # Wrap up and plot the result
    end_time = time.time()
    print "Done in %s." % SimpleTimer.period_to_string(start_time, end_time)

    print cleaned
    sys.stdout.flush()
    plt.figure()
    data.plot(style='r', label='Raw load')

    spline = pd.TimeSeries(data=smoother.splev(range(len(cleaned))),
                           index=cleaned.index)
    spline.plot(style='g', label='Smoothing spline')

    # THE SAUSAGE!
    lower, upper = cleaner.get_confidence_interval()
    ax = plt.gca()
    ax.fill_between(cleaned.index, lower, upper, facecolor='g', alpha=0.1)
示例#14
0
def make_runs(user_ids, num_runs):
    """Create a list of (user_id, run_number) pairs that can be sent via
    pool.map to the run_one function."""
    return [(user, run) for user in user_ids for run in range(num_runs)]


def run_simulations(runs):
    """Run all the simulations provided in runs by sending them on to the
    run_one function."""
    num_parallel_processes = 12
    pool = Pool(processes=num_parallel_processes)
    pool.map(run_one_wrapper, runs, chunksize=1)


if __name__ == "__main__":
    # if socket.gethostname() == "tanzenmusik.idi.ntnu.no":
    #     user_ids = tempfeeder_exp().user_ids[25:50]
    # else:
    #     user_ids = tempfeeder_exp().user_ids[0:25]

    user_ids = [tempfeeder_exp().user_ids[0]]
    num_runs = 12

    print "Master pid is %d " % os.getpid()
    timer = SimpleTimer(output_stream=None)
    tempfeeder_exp().close()
    runs = make_runs(user_ids, num_runs)
    run_simulations(runs)
    print "All simulations complete. %s" % timer.end()
    tempfeeder_exp().close()