def main():
  common.setup()

  # CHANGE ME
  zone = 'us-central1-a'
  machtype = 'n1-standard-4-d'
  image = 'projects/google/images/ubuntu-12-04-v20120621'
  # Persistent disk, if any.
  disk = ''
  # If this is set, only this slave will have the disk mounted, and it'll be rw.
  # Otherwise, all slaves get the disk mounted ro
  rw_disk_instance = ''

  print 'Packaging up the stuff the coordinator will need...'
  # tar will insert directories, so flatten the view a bit
  subprocess.call(['cp', 'coordinator/coordinator.py', '.'])
  subprocess.call(['cp', 'coordinator/hadoop_cluster.py', '.'])
  subprocess.call(['tar', 'czf', 'coordinator.tgz', 'hadoop', 'gcelib',
                   'hadoop-tools.jar', 'cfg.py', 'util.py', 'coordinator.py',
                   'hadoop_cluster.py', 'start_setup.sh'])
  subprocess.call(['rm', 'coordinator.py', 'hadoop_cluster.py'])
  # Push to a fixed place for now
  subprocess.call(['gsutil', 'cp', 'coordinator.tgz',
                   cfg.gs_coordinators_tarball])
  subprocess.call(['rm', 'coordinator.tgz'])
  print

  print 'Launching coordinator...'
  util.api.insert_instance(
      name=cfg.coordinator, zone=zone,
      machineType=machtype, image=image,
      serviceAccounts=gce_shortcuts.service_accounts([cfg.compute_scope,
                                                      cfg.rw_storage_scope]),
      networkInterfaces=gce_shortcuts.network(),
      metadata=gce_shortcuts.metadata({
          'startup-script': open('start_setup.sh').read(),
          'bootstrap.sh': open('coordinator/bootstrap.sh').read(),
          'tarball': cfg.gs_coordinators_tarball,
          'gs_bucket': cfg.gs_bucket,
          'zone': zone,
          'machine_type': machtype,
          'image': image,
          'disk': disk,
          'rw_disk_instance': rw_disk_instance,
          'secret': cfg.secret
      }),
      blocking=True
  )
  print

  print 'Waiting for coordinator to come online...'
  while True:
    status, _ = util.get_status(cfg.coordinator)
    print status[1]
    if status == util.InstanceState.SNITCH_READY:
      break
    time.sleep(cfg.poll_delay_secs)
  print

  print 'Controller is ready to receive commands.'
Exemplo n.º 2
0
def update(address, contract_name):
    file_name = "challenges/" + contract_name + ".py"
    if not os.path.exists(file_name) or not os.path.isfile(file_name):
        print("Challenge validator not found for contract: " + contract_name)
        return redirect(
            url_for('view',
                    _external=True,
                    _scheme='https',
                    address=address,
                    contract=contract))

    status_blob = util.get_status(address,
                                  util.get_contract_number(contract_name))
    contract_addr = status_blob[2].strip()
    status = status_blob[0].lower()
    if "unfinished" in status:
        return render_template('grade.html',
                               address=address,
                               contract_name=contract_name)
    else:
        return redirect(
            url_for('dashboard',
                    _external=True,
                    _scheme='https',
                    address=address))
Exemplo n.º 3
0
    def test_util_get_status(self):
        # Setup
        path = constants.DB_PATH + self.MOCK_USER + "/" + self.MOCK_CHALLENGE
        if os.path.exists(path):
            os.remove(path)

        # Validate
        assert util.get_status(self.MOCK_USER, self.MOCK_CHALLENGE)[0] == "Not Started"
        util.write_address(self.MOCK_USER, self.MOCK_CHALLENGE, self.MOCK_CONTRACT_ADDRESS)
        assert util.get_status(self.MOCK_USER, self.MOCK_CHALLENGE)[0] == "Deployed / Unfinished"
        util.mark_finished(self.MOCK_USER, self.MOCK_CHALLENGE)
        assert util.get_status(self.MOCK_USER, self.MOCK_CHALLENGE)[0] == "Done!"

        # Cleanup
        if os.path.exists(path):
            os.remove(path)
Exemplo n.º 4
0
 def test_get_status(self):
     ns = ("Not Started", "red")
     dep = ("Deployed / Unfinished", "black", self.MOCK_CONTRACT_ADDRESS)
     fin = ("Done!", "green", self.MOCK_CONTRACT_ADDRESS)
     with run.app.app_context():
         stat = util.get_status(self.MOCK_USER, 1)
         self.assertTupleEqual(stat, ns)
         util.exists(self.MOCK_USER)
         stat = util.get_status(self.MOCK_USER, 1)
         self.assertTupleEqual(stat, ns)
         util.write_address(self.MOCK_USER, 1, self.MOCK_CONTRACT_ADDRESS)
         stat = util.get_status(self.MOCK_USER, 1)
         self.assertTupleEqual(stat, dep)
         util.mark_finished(self.MOCK_USER, "01_naive_programmer")
         stat = util.get_status(self.MOCK_USER, 1)
         self.assertTupleEqual(stat, fin)
Exemplo n.º 5
0
def fit_time(measure_type, occtime, season=None):
    conn = uo.connect('interval_ion')
    with conn:
        df_bs = pd.read_sql('SELECT * FROM {0}_id_station'.format(measure_type), conn)
        df_area = pd.read_sql('SELECT * FROM area', conn)
        df_tz = pd.read_sql('SELECT Building_Number, rawOffset FROM EUAS_timezone', conn)
    df_tz.set_index('Building_Number', inplace=True)
    df_area.set_index('Building_Number', inplace=True)
    bs_pair = zip(df_bs['Building_Number'], df_bs['ICAO'])
    sns.set_style("whitegrid")
    sns.set_context("talk", font_scale=1)
    value_lb_dict = {'electric': 'Electric_(KWH)', 'gas':
                     'Gas_(CubicFeet)'}
    multiplier_dict = {'electric':  3.412, 'gas': 1.026}
    col = value_lb_dict[measure_type]
    m = multiplier_dict[measure_type]
    ylabel = {'electric': 'electric (kBtu/sq.ft)', 'gas': 'gas kBtu/sq.ft'}
    print len(bs_pair)
    sns.set_style("whitegrid")
    # palette = sns.cubehelix_palette(len(bs_pair))
    palette = sns.color_palette('husl', len(bs_pair))
    sns.set_palette(palette)
    colors_rgb = [util.float2hex(x) for x in palette]
    sns.set_context("talk", font_scale=1)
    jsondir = os.getcwd() + '/input/FY/interval/ion_0627/piecewise_all/json_{0}/'.format(occtime)
    # csvdir = os.getcwd() + '/input/FY/interval/ion_0627/piecewise_all/csv/'
    for i, (b, s) in enumerate(bs_pair):
        print b, s
        try:
            area = df_area.ix[b, 'Gross_Sq.Ft']
        except KeyError:
            print 'No area found'
            continue
        offset = df_tz.loc[b, 'rawOffset']
        df = join_interval(offset, b, s, area, col, m, measure_type, conn, season=season)
        df.to_csv(homedir + 'temp/{0}.csv'.format(b))
        df = df[df[col] >= 0]
        points = df[col]
        outliers = show_outlier(points, b, 'upper', measure_type, 1.5)
        df['outlier'] = outliers
        df = df[~np.array(outliers)]
        df['status_week_day_night'] = \
            df.apply(lambda r: util.get_status(r['hour'], r['day']), axis=1)
        min_time = df['Timestamp'].min()
        max_time = df['Timestamp'].max()
        gr = df.groupby('status_week_day_night')
        bx = plt.axes()
        d0 = plot_piece(gr, bx, occtime, colors_rgb[i], measure_type, b, s, scatter=False, annote=True, jsondir=jsondir, season=season)
    plt.xlabel('Temperature_F')
    # plt.show()
    if season is None:
        path = os.getcwd() + '/input/FY/interval/ion_0627/piecewise_all/{0}_{1}.png'.format(measure_type, occtime)
    else:
        path = os.getcwd() + '/input/FY/interval/ion_0627/piecewise_all/{0}_{1}_{2}.png'.format(measure_type, occtime, season)
    P.savefig(path, dpi = my_dpi, figsize = (2000/my_dpi, 500/my_dpi), bbox_inches='tight')
    shutil.copy(path, path.replace('input/FY/interval/ion_0627/piecewise_all', 'plot_FY_weather/html/interval/lean/all'))
    plt.close()
    return
Exemplo n.º 6
0
def solve_game_by_brute_force():
	while True:

		if get_status() != GameState.GS_INGAME:
			break
		board, _ = scan_game_board()
		solve_at_least_one_pair(board)
		# 设置sleep(1)可能会遇到重排等情况导致new_board出现暂时性错误
		# 但是这种情况下多重复while循环几次即可
		time.sleep(1)
		if get_status() != GameState.GS_INGAME:
			break
		new_board, _ = scan_game_board()
		if if_all_zeros(new_board):
			print("全部方块已被消除完毕")
			print(new_board)
			break
		if board == new_board:
			print("无解,需要进行重排")
			resort()
			time.sleep(8)
Exemplo n.º 7
0
def fit_time(measure_type, occtime):
    conn = uo.connect('interval_ion')
    with conn:
        df_bs = pd.read_sql('SELECT * FROM {0}_id_station'.format(measure_type), conn)
        df_area = pd.read_sql('SELECT * FROM area', conn)
    df_area.set_index('Building_Number', inplace=True)
    bs_pair = zip(df_bs['Building_Number'], df_bs['ICAO'])
    sns.set_style("whitegrid")
    sns.set_context("talk", font_scale=1)
    value_lb_dict = {'electric': 'Electric_(KWH)', 'gas':
                     'Gas_(CubicFeet)'}
    multiplier_dict = {'electric':  3.412, 'gas': 1.026}
    col = value_lb_dict[measure_type]
    m = multiplier_dict[measure_type]
    ylabel = {'electric': 'electric (kBtu/sq.ft)', 'gas': 'gas kBtu/sq.ft'}
    print len(bs_pair)
    sns.set_style("whitegrid")
    # palette = sns.cubehelix_palette(len(bs_pair))
    palette = sns.color_palette('husl', len(bs_pair))
    sns.set_palette(palette)
    colors_rgb = [util.float2hex(x) for x in palette]
    sns.set_context("talk", font_scale=1)
    jsondir = os.getcwd() + '/input/FY/interval/ion_0627/piecewise_all/json_{0}/'.format(occtime)
    # csvdir = os.getcwd() + '/input/FY/interval/ion_0627/piecewise_all/csv/'
    for i, (b, s) in enumerate(bs_pair)[:5]:
        print b, s
        try:
            area = df_area.ix[b, 'Gross_Sq.Ft']
        except KeyError:
            print 'No area found'
            continue
        df = join_interval(b, s, area, col, m, measure_type, conn)
        # df.to_csv(homedir + 'temp/{0}.csv'.format(b))
        df = df[df[col] >= 0]
        points = df[col]
        outliers = show_outlier(points, b, 'upper', measure_type, 5)
        df['outlier'] = outliers
        df = df[~np.array(outliers)]
        df['status_week_day_night'] = \
            df.apply(lambda r: util.get_status(r['hour'], r['day']), axis=1)
        min_time = df['Timestamp'].min()
        max_time = df['Timestamp'].max()
        gr = df.groupby('status_week_day_night')
        bx = plt.axes()
        d0 = plot_piece(gr, bx, occtime, colors_rgb[i], measure_type, b, s, scatter=False, annote=True, jsondir=jsondir)
    plt.xlabel('Temperature_F')
    # plt.show()
    path = os.getcwd() + '/input/FY/interval/ion_0627/piecewise_all/{0}_{1}.png'.format(measure_type, occtime)
    P.savefig(path, dpi = my_dpi, figsize = (2000/my_dpi, 500/my_dpi), bbox_inches='tight')
    shutil.copy(path, path.replace('input/FY/interval/ion_0627/piecewise_all', 'plot_FY_weather/html/interval/lean/all'))
    plt.close()
    return
 def monitor_instance(self, name, wait_for_state=InstanceState.RUNNING):
   """Blockingly poll an instance until it reaches the requested state."""
   # get_status() doesn't know about this state
   assert wait_for_state is not InstanceState.HADOOP_READY
   while True:
     status, err = util.get_status(name)
     self.update_state(name, status)
     if status == InstanceState.BROKEN:
       self.instance_fail(name, err)
       return False
     if status >= wait_for_state:
       break
     time.sleep(cfg.poll_delay_secs)
   return True
Exemplo n.º 9
0
def deploy(address, contract):
    status = util.get_status(address, util.get_contract_number(contract))
    if "not started" in status[0].lower():
        return render_template('deploy.html',
                               deployed=False,
                               address=address,
                               contract=contract)
    else:
        return redirect(
            url_for('view',
                    _external=True,
                    _scheme='https',
                    address=address,
                    contract=contract))
 def monitor_instance(self, name, wait_for_state=InstanceState.RUNNING):
     """Blockingly poll an instance until it reaches the requested state."""
     # get_status() doesn't know about this state
     assert wait_for_state is not InstanceState.HADOOP_READY
     while True:
         status, err = util.get_status(name)
         self.update_state(name, status)
         if status == InstanceState.BROKEN:
             self.instance_fail(name, err)
             return False
         if status >= wait_for_state:
             break
         time.sleep(cfg.poll_delay_secs)
     return True
Exemplo n.º 11
0
def view(address, contract):
    status = util.get_status(address, util.get_contract_number(contract))
    if "not started" in status[0].lower():
        return "Not started!"
    contract_code = open("challenges/" + contract + ".sol").read().strip()
    contract_desc = json.loads(
        open("challenges/" + contract + ".json").read().strip())["description"]
    return render_template('view.html',
                           deployed=True,
                           done=("done" in status[0].lower()),
                           status=status,
                           address=address,
                           contract=contract,
                           contract_code=contract_code,
                           contract_desc=contract_desc)
Exemplo n.º 12
0
def update(address, contract):
    contract_addr = util.get_status(address, contract)[2].strip()
    checks = json.loads(
        open("challenges/" + contract + ".json").read().strip()).get(
            "post_check", [])
    contract_bal = ethereum.EasyWeb3().balance(contract_addr)
    for check in checks:
        print 50000000000000000, contract_bal, int(check["balance_lt"])
        print type(int(check["balance_lt"])), type(contract_bal)
        if "balance_lt" in check:
            if int(check["balance_lt"]) <= int(contract_bal):
                print 50000000000000000, contract_bal, int(check["balance_lt"])
                return redirect(request.referrer)
                #return redirect("/dashboard?address=" + address)
    util.mark_finished(address, contract)
    return redirect(request.referrer)
Exemplo n.º 13
0
def dashboard(address):
    challenges = {}
    for challenge_id in constants.CHALLENGES:
        challenges[challenge_id] = json.loads(
            open("challenges/" + challenge_id + ".json").read().strip())
        challenges[challenge_id]["code"] = open("challenges/" + challenge_id +
                                                ".sol").read().strip()
        challenge_id_int = int(challenge_id.split("_")[0])
        challenges[challenge_id]["status"] = util.get_status(
            address, challenge_id_int)
        challenges[challenge_id]["deployed"] = (len(
            challenges[challenge_id]["status"]) == 3)
    return render_template('dashboard.html',
                           address=address,
                           challenge_ids=constants.CHALLENGES,
                           challenges=challenges,
                           exists=util.exists(address))
Exemplo n.º 14
0
def main():
    # 这个状态机实际上不健壮,万一出现进入房间超时等意外状态,程序即陷入错误
    status = GameState.GS_INLOBBY
    waitting_time = 0
    while True:
        if status == GameState.GS_INLOBBY:
            enter_room_from_lobby()
        elif status == GameState.GS_INROOM:
            if waitting_time == 0:
                click_start()
                waitting_time = 1
            elif waitting_time < 20:
                time.sleep(1)
                waitting_time += 1
                print("等待{}秒...".format(waitting_time))
            else:
                exit_room()
        elif status == GameState.GS_INGAME:
            print("准备开始游戏")
            time.sleep(2)  # 一开始方块没有出现,等待两秒
            # solve_game_by_brute_force()
            solve_game_with_grace()
            time.sleep(2)
            exit_room()

        pre_status = status
        status = get_status()
        print("现在状态:{}".format(status))
        # 退出room内等待状态后清零计时
        if pre_status == GameState.GS_INROOM and status != GameState.GS_INROOM:
            waitting_time = 0
        # 遇到未知情况时尝试重新恢复窗口焦点恢复运行,无法保证必然有效
        if status == GameState.GS_UNKNOWN:
            hWnd = win32gui.FindWindow(None, "QQ游戏 - 连连看角色版")
            if hWnd == 0:
                hWnd = get_lobby_hWnd()
                assert hWnd != 0
                win32gui.SetForegroundWindow(hWnd)
                win32gui.SetActiveWindow(hWnd)
                status = GameState.GS_INLOBBY
                continue
            else:
                win32gui.SetForegroundWindow(hWnd)
                win32gui.SetActiveWindow(hWnd)
                status = GameState.GS_INROOM
Exemplo n.º 15
0
def dashboard():
    address = request.args.get("address", None).strip()
    if "|" in address:
        return "Error"  # todo full validation
    challenges = {}
    for challenge_id in config.challenges:
        challenges[challenge_id] = json.loads(
            open("challenges/" + challenge_id + ".json").read().strip())
        challenges[challenge_id]["code"] = open("challenges/" + challenge_id +
                                                ".sol").read().strip()
        challenges[challenge_id]["status"] = util.get_status(
            address, challenge_id)
        challenges[challenge_id]["deployed"] = (len(
            challenges[challenge_id]["status"]) == 3)
    return render_template('dashboard.html',
                           address=address,
                           challenge_ids=config.challenges,
                           challenges=challenges,
                           exists=util.exists(address))
Exemplo n.º 16
0
def solve_game_with_grace():
	board, _ = scan_game_board()
	check_status_ticker = 1
	while True:
		solved_one = solve_one_pair_with_grace(board)
		if if_all_zeros(board):
			print("全部方块已被消除完毕")
			break
		elif solved_one is False:
			for row in board:
				print(row)
			print("无解,需要重排列")
			resort()
			time.sleep(8) #需要等待提示文字消失,或者干脆让这盘输掉比较好?
			board, _ = scan_game_board()
		elif check_status_ticker % 20 == 0:
			status = get_status()
			if status != GameState.GS_INGAME:
				print("游戏中出现未知问题,现状态为{}".format(status))
				break
		else:
			check_status_ticker += 1
    def launch_slave2(self, name):
        """Check to see if the slave's Hadoop daemons can be started yet."""
        # Don't continuously monitor it; just poke it and yield if it's not ready.
        # That way we cycle through all pending slaves quickly and promote the ones
        # from SNITCH_READY to HADOOP_READY as fast as possible.
        # The flow, though, is monitor -> wait for masters -> start slave

        # Monitor
        if self.instances[name] != InstanceState.SNITCH_READY:
            status, err = util.get_status(name)
            self.update_state(name, status)
            if status == InstanceState.BROKEN:
                self.instance_fail(name, err)
                return

        # Are masters up?
        if self.instances[name] == InstanceState.SNITCH_READY:
            if self.masters_up():
                self.start_slave(name)
                # Done!
                return

        # If we fall-through, schedule it for later
        self.other_scheduler.schedule(self.launch_slave2, (name, ))
  def launch_slave2(self, name):
    """Check to see if the slave's Hadoop daemons can be started yet."""
    # Don't continuously monitor it; just poke it and yield if it's not ready.
    # That way we cycle through all pending slaves quickly and promote the ones
    # from SNITCH_READY to HADOOP_READY as fast as possible.
    # The flow, though, is monitor -> wait for masters -> start slave

    # Monitor
    if self.instances[name] != InstanceState.SNITCH_READY:
      status, err = util.get_status(name)
      self.update_state(name, status)
      if status == InstanceState.BROKEN:
        self.instance_fail(name, err)
        return

    # Are masters up?
    if self.instances[name] == InstanceState.SNITCH_READY:
      if self.masters_up():
        self.start_slave(name)
        # Done!
        return

    # If we fall-through, schedule it for later
    self.other_scheduler.schedule(self.launch_slave2, (name,))
Exemplo n.º 19
0
def debug():
    print("In debug")
    print(get_status())
    exit_room()
Exemplo n.º 20
0
def main():
    common.setup()

    # CHANGE ME
    zone = 'us-central1-a'
    machtype = 'n1-standard-4-d'
    image = 'projects/google/images/ubuntu-12-04-v20120621'
    # Persistent disk, if any.
    disk = ''
    # If this is set, only this slave will have the disk mounted, and it'll be rw.
    # Otherwise, all slaves get the disk mounted ro
    rw_disk_instance = ''

    print 'Packaging up the stuff the coordinator will need...'
    # tar will insert directories, so flatten the view a bit
    subprocess.call(['cp', 'coordinator/coordinator.py', '.'])
    subprocess.call(['cp', 'coordinator/hadoop_cluster.py', '.'])
    subprocess.call([
        'tar', 'czf', 'coordinator.tgz', 'hadoop', 'gcelib',
        'hadoop-tools.jar', 'cfg.py', 'util.py', 'coordinator.py',
        'hadoop_cluster.py', 'start_setup.sh'
    ])
    subprocess.call(['rm', 'coordinator.py', 'hadoop_cluster.py'])
    # Push to a fixed place for now
    subprocess.call(
        ['gsutil', 'cp', 'coordinator.tgz', cfg.gs_coordinators_tarball])
    subprocess.call(['rm', 'coordinator.tgz'])
    print

    print 'Launching coordinator...'
    util.api.insert_instance(
        name=cfg.coordinator,
        zone=zone,
        machineType=machtype,
        image=image,
        serviceAccounts=gce_shortcuts.service_accounts(
            [cfg.compute_scope, cfg.rw_storage_scope]),
        networkInterfaces=gce_shortcuts.network(),
        metadata=gce_shortcuts.metadata({
            # Key modified to avoid dots, which are disallowed in v1beta13.
            'startup-script':
            open('start_setup.sh').read(),
            'bootstrap_sh':
            open('coordinator/bootstrap.sh').read(),
            'tarball':
            cfg.gs_coordinators_tarball,
            'gs_bucket':
            cfg.gs_bucket,
            'zone':
            zone,
            'machine_type':
            machtype,
            'image':
            image,
            'disk':
            disk,
            'rw_disk_instance':
            rw_disk_instance,
            'secret':
            cfg.secret
        }),
        blocking=True)
    print

    print 'Waiting for coordinator to come online...'
    while True:
        status, _ = util.get_status(cfg.coordinator)
        print status[1]
        if status == util.InstanceState.SNITCH_READY:
            break
        time.sleep(cfg.poll_delay_secs)
    print

    print 'Controller is ready to receive commands.'
Exemplo n.º 21
0
def fit(measure_type, year=None):
    conn = uo.connect('interval_ion')
    with conn:
        df_bs = pd.read_sql('SELECT * FROM {0}_id_station'.format(measure_type), conn)
        df_area = pd.read_sql('SELECT * FROM area', conn)
    df_area.set_index('Building_Number', inplace=True)
    bs_pair = zip(df_bs['Building_Number'], df_bs['ICAO'])
    sns.set_style("whitegrid")
    sns.set_palette("Set2", 2)
    sns.set_context("talk", font_scale=1)
    # col_wrap_dict = {'hour': 6, 'month': 4, 'day': 5, 'status':2}
    # upper = {'electric': 600, 'gas': 2500}
    value_lb_dict = {'electric': 'Electric_(KWH)', 'gas':
                     'Gas_(CubicFeet)'}
    multiplier_dict = {'electric':  3.412, 'gas': 1.026}
    col = value_lb_dict[measure_type]
    m = multiplier_dict[measure_type]
    ylabel = {'electric': 'electric (kBtu/sq.ft)', 'gas': 'gas kBtu/sq.ft'}
    # test = ['TN0088ZZ', 'TX0057ZZ', 'NY0281ZZ', 'NY0304ZZ', 'MO0106ZZ']
    # test = ['NM0050ZZ']
    # bs_pair = [x for x in bs_pair if x[0] in test]
    lines = ['Building_Number,week night save%,weekend day save%,weekend night save%,aggregate save%,CVRMSE week day,CVRMSE week night,CVRMSE weekend day,CVRMSE weekend night']
    print len(bs_pair)
    # bs_pair = bs_pair[:1]
    for b, s in bs_pair:
        print b, s
        try:
            area = df_area.ix[b, 'Gross_Sq.Ft']
        except KeyError:
            print 'No area found'
            continue
        df = join_interval(b, s, area, col, m, measure_type, conn, year)
        if len(df) == 0:
            continue
        df.to_csv(homedir + 'temp/{0}.csv'.format(b))
        df = df[df[col] >= 0]
        points = df[col]
        # outliers = show_outlier(points, b, 'upper', measure_type, 5)
        outliers = show_outlier(points, b, 'upper', measure_type, 1.5)
        df['outlier'] = outliers
        df = df[~np.array(outliers)]
        df['status_week_day_night'] = \
            df.apply(lambda r: util.get_status(r['hour'], r['day']), axis=1)
        min_time = df['Timestamp'].min()
        max_time = df['Timestamp'].max()
        sns.set_style("whitegrid")
        colors = sns.color_palette('Paired', 16)
        colors_rgb = [util.float2hex(x) for x in colors]
        sns.set_context("talk", font_scale=1)
        gr = df.groupby('status_week_day_night')
        f, axarr = plt.subplots(2, 2, sharex=True, sharey=True)
        d0 = plot_piece(gr, axarr[0, 0], 'week day', colors_rgb[0], measure_type, b, s)
        if not d0 is None:
            axarr[0, 0].set_title('{0}\nbreak point {1}F, CV(RMSE): {2:.3f}'.format('week day', d0['breakpoint'], d0['CV(RMSE)']))
        d1 = plot_piece(gr, axarr[0, 1], 'week night', colors_rgb[1], measure_type, b, s)
        d2 = plot_piece(gr, axarr[1, 0], 'weekend day', colors_rgb[2], measure_type, b, s)
        d3 = plot_piece(gr, axarr[1, 1], 'weekend night', colors_rgb[3], measure_type, b, s)
        save, err = compute_saving_all(d0, d1, d2, d3, axarr)
        plt.suptitle('{0} -- {1}'.format(min_time, max_time))
        f.text(0.5, 0.04, 'Temperature_F', ha='center', va='center')
        if year is None:
            path = os.getcwd() + '/input/FY/interval/ion_0627/piecewise/{1}/{0}_{1}.png'.format(b, measure_type)
        else:
            path = os.getcwd() + '/input/FY/interval/ion_0627/piecewise/{1}/{0}_{1}_{2}.png'.format(b, measure_type, int(year))
        P.savefig(path, dpi = my_dpi, figsize = (2000/my_dpi, 500/my_dpi), bbox_inches='tight')
        shutil.copy(path, path.replace('input/FY/interval/ion_0627/piecewise', 'plot_FY_weather/html/interval/lean'))
        plt.close()
        lines.append(','.join([b] + save + err))
    if year is None:
        table_path = os.getcwd() + '/input/FY/interval/ion_0627/table/{0}_save.csv'.format(measure_type)
    else:
        table_path = os.getcwd() + '/input/FY/interval/ion_0627/table/{0}_save_{1}.csv'.format(measure_type, int(year))
    with open(table_path, 'w+') as wt:
        wt.write('\n'.join(lines))
    return
Exemplo n.º 22
0
def fit(measure_type, year=None, season=None):
    conn = uo.connect('interval_ion')
    with conn:
        df_bs = pd.read_sql('SELECT * FROM {0}_id_station'.format(measure_type), conn)
        df_area = pd.read_sql('SELECT * FROM area', conn)
        df_tz = pd.read_sql('SELECT Building_Number, rawOffset FROM EUAS_timezone', conn)
    df_tz.set_index('Building_Number', inplace=True)
    df_area.set_index('Building_Number', inplace=True)
    bs_pair = zip(df_bs['Building_Number'], df_bs['ICAO'])
    sns.set_style("whitegrid")
    sns.set_palette("Set2", 2)
    sns.set_context("talk", font_scale=1)
    # col_wrap_dict = {'hour': 6, 'month': 4, 'day': 5, 'status':2}
    # upper = {'electric': 600, 'gas': 2500}
    value_lb_dict = {'electric': 'Electric_(KWH)', 'gas':
                     'Gas_(CubicFeet)'}
    multiplier_dict = {'electric':  3.412, 'gas': 1.026}
    col = value_lb_dict[measure_type]
    m = multiplier_dict[measure_type]
    ylabel = {'electric': 'electric (kBtu/sq.ft)', 'gas': 'gas kBtu/sq.ft'}
    # test = ['TN0088ZZ', 'TX0057ZZ', 'NY0281ZZ', 'NY0304ZZ', 'MO0106ZZ']
    # test = ['FL0067ZZ']
    # bs_pair = [x for x in bs_pair if x[0] in test]
    lines = ['Building_Number,week night save%,weekend day save%,weekend night save%,aggregate save%,CVRMSE week day,CVRMSE week night,CVRMSE weekend day,CVRMSE weekend night']
    # bs_pair = bs_pair[:1]
    print len(bs_pair)
    for b, s in bs_pair:
        print b, s
        try:
            area = df_area.ix[b, 'Gross_Sq.Ft']
        except KeyError:
            print 'No area found'
            continue
        offset = df_tz.loc[b, 'rawOffset']
        df = join_interval(offset, b, s, area, col, m, measure_type, conn, year, season)
        if len(df) == 0:
            continue
        df.to_csv(homedir + 'temp/{0}.csv'.format(b))
        df = df[df[col] >= 0]
        points = df[col]
        # outliers = show_outlier(points, b, 'upper', measure_type, 5)
        outliers = show_outlier(points, b, 'upper', measure_type, 1.5)
        df['outlier'] = outliers
        df = df[~np.array(outliers)]
        df['status_week_day_night'] = \
            df.apply(lambda r: util.get_status(r['hour'], r['day']), axis=1)
        min_time = df['Timestamp'].min()
        max_time = df['Timestamp'].max()
        sns.set_style("whitegrid")
        colors = sns.color_palette('Paired', 16)
        colors_rgb = [util.float2hex(x) for x in colors]
        sns.set_context("talk", font_scale=1)
        gr = df.groupby('status_week_day_night')
        f, axarr = plt.subplots(2, 2, sharex=True, sharey=True)
        d0 = plot_piece(gr, axarr[0, 0], 'week day', colors_rgb[0], measure_type, b, s)
        if not d0 is None:
            axarr[0, 0].set_title('{0} ({3})\nbreak point {1}F, CV(RMSE): {2:.3f}'.format('week day', d0['breakpoint'], d0['CV(RMSE)'], b))
        d1 = plot_piece(gr, axarr[0, 1], 'week night', colors_rgb[1], measure_type, b, s)
        x0 = d0['x_range'][0]
        x1 = d0['x_range'][1]
        if type(d0['breakpoint']) == tuple:
            b0 = d['breakpoint'][0]
            b1 = d['breakpoint'][1]
            x = np.array([x0, b0, b1, x1])
        else:
            x = np.array([x0, d0['breakpoint'], x1])
        y = d0['fun'](x, *d0['regression_par'])
        d2 = plot_piece(gr, axarr[1, 0], 'weekend day', colors_rgb[2], measure_type, b, s)
        d3 = plot_piece(gr, axarr[1, 1], 'weekend night', colors_rgb[3], measure_type, b, s)
        axarr[0, 1].plot(x, y, ls='--', c='red')
        axarr[1, 0].plot(x, y, ls='--', c='red')
        axarr[1, 1].plot(x, y, ls='--', c='red')
        plt.ylim((0, 0.02))
        plt.xlim((0, 100))
        save, err = compute_saving_all(b, d0, d1, d2, d3, axarr)
        # save, err = compute_saving_all_setback(d0, d1, d2, d3, axarr)
        plt.suptitle('{0} -- {1}'.format(min_time, max_time))
        f.text(0.5, 0.04, 'Temperature_F', ha='center', va='center')
        if year is None:
            if season is None:
                path = os.getcwd() + '/input/FY/interval/ion_0627/piecewise/{1}/{0}_{1}.png'.format(b, measure_type)
            else:
                path = os.getcwd() + '/input/FY/interval/ion_0627/piecewise/{1}/{0}_{1}_{2}.png'.format(b, measure_type, season)
        else:
            path = os.getcwd() + '/input/FY/interval/ion_0627/piecewise/{1}/{0}_{1}_{2}.png'.format(b, measure_type, int(year))
        P.savefig(path, dpi = my_dpi, figsize = (2000/my_dpi, 500/my_dpi), bbox_inches='tight')
        shutil.copy(path, path.replace('input/FY/interval/ion_0627/piecewise', 'plot_FY_weather/html/interval/lean'))
        plt.close()
        lines.append(','.join([b] + save + err))
    if year is None:
        if season is None:
            table_path = os.getcwd() + '/input/FY/interval/ion_0627/table/{0}_save.csv'.format(measure_type)
        else:
            table_path = os.getcwd() + '/input/FY/interval/ion_0627/table/{0}_save_{1}.csv'.format(measure_type, season)
    else:
        table_path = os.getcwd() + '/input/FY/interval/ion_0627/table/{0}_save_{1}.csv'.format(measure_type, int(year))
    with open(table_path, 'w+') as wt:
        wt.write('\n'.join(lines))
    return