Пример #1
0
def snap_records(combined_seg,
                 segments_index,
                 infile,
                 record_type,
                 startyear=None,
                 endyear=None):

    records = util.read_records(infile, record_type, startyear, endyear)
    if record_type == 'concern' and not records:
        print "no concerns found"
        return

    # Find nearest crashes - 30 tolerance
    print "snapping " + record_type + " records to segments"
    util.find_nearest(records,
                      combined_seg,
                      segments_index,
                      30,
                      type_record=True)

    # Write out snapped records
    schema = records[0].schema
    shpfile = os.path.join(MAP_FP, record_type + '_joined.shp')
    util.records_to_shapefile(schema, shpfile, records)

    jsonfile = os.path.join(PROCESSED_DATA_FP, record_type + '_joined.json')

    print "output " + record_type + " data to " + jsonfile
    with open(jsonfile, 'w') as f:
        json.dump([r.properties for r in records], f)
Пример #2
0
    def test_find_nearest_float32(self):

        Afloat32 = np.array([0.0, 4.5, 6.2, 3.7, 2.4, 5.4, 3000.0],
                            dtype=np.float32)

        idx = find_nearest(Afloat32, 5.0)
        self.assertEqual(idx, 5)

        idx = find_nearest(Afloat32, 4.8)
        self.assertEqual(idx, 1)
Пример #3
0
    def insert_zoom(self, base_spec, zoom_spec, zoom_level=1):
        x_start = find_nearest(base_spec.x_axis, zoom_spec.x_axis[0])
        x_end = find_nearest(base_spec.x_axis, zoom_spec.x_axis[-1])
        y_start = find_nearest(base_spec.y_axis, zoom_spec.y_axis[0])
        y_end = find_nearest(base_spec.y_axis, zoom_spec.y_axis[-1])

        norm_ref = np.max(base_spec.spec[y_start:y_end, x_start:x_end])
        zoom_spec.spec = zoom_spec.spec * (norm_ref / np.max(zoom_spec.spec))

        base_spec.spec[y_start:y_end, x_start:x_end] = zoom_spec
        zoom_spec.parent = base_spec

        if zoom_level == 1:
            self.first_zoom = np.append(self.first_zoom, zoom_spec)
        elif zoom_level == 2:
            self.second_zoom = np.append(self.second_zoom, zoom_spec)
Пример #4
0
    def train(self, all_appliance_data):
        """ 
        When training, we are computing P(Y),P(X|Y), where P(Y) is the probability of a certain state occurs
        
        all_appliance_data = {"appliance_name":appliance history power data}

        """
        self.PY = collections.defaultdict(float)
        self.PXY = collections.defaultdict(list)
        self.P = collections.defaultdict(list)
        N = len(all_appliance_data[all_appliance_data.keys()[0]])
        mem = collections.defaultdict(list)
        for ind in range(N):
            state = [
                find_nearest(np.array(self.power_list[app]),
                             np.array([all_appliance_data[app][ind]]))[0][0]
                for app in self.power_list
            ]
            state = " ".join([str(i) for i in state])
            self.P[state].append(
                sum(all_appliance_data[app][ind] for app in self.power_list))
            self.PY[state] += 1
            mem[str(state)].append(
                sum([all_appliance_data[app][ind] for app in self.power_list]))
        # Compute PY
        for key in self.PY:
            self.PY[key] = float(self.PY[key] / N)

        #Compute P(X|Y), Assume normal distribution use mu and sigma computed above to obtain this probability
        for state in mem.keys():
            # collect history data corresponding to this state
            self.PXY[state].append(np.mean(mem[str(state)]))
            self.PXY[state].append(max(np.std(mem[str(state)]), 0.01))
Пример #5
0
    def insert_visualization(self, spec_img, zoom_spec):
        y_axis = np.linspace(0, 22050, 2049)

        x_start = find_nearest(self.base_spec.x_axis, zoom_spec.x_axis[0])
        x_end = find_nearest(self.base_spec.x_axis, zoom_spec.x_axis[-1])
        y_start = find_nearest(
            y_axis, zoom_spec.y_axis[0]
        ) - 1  # "discontinuidade" na fronteira do kernel
        y_end = find_nearest(y_axis, zoom_spec.y_axis[-1])

        zoom_img = PIL.Image.fromarray(
            MultiResSpectrogram.convert_to_visualization(
                zoom_spec.spec)).resize((x_end - x_start, y_end - y_start))
        box = (x_start, y_start, x_end, y_end)
        spec_img.paste(zoom_img, box)

        return spec_img
Пример #6
0
    def power_disaggregate(self,
                           total_power_usage,
                           r_blur=30,
                           all_change_point=True):
        # total_power_usage is simply a list
        n_equipment_type = len(self.power_list)

        t = np.array([i + 1 for i in range(len(total_power_usage))])
        y = total_power_usage

        if not all_change_point:  # The flag to determine whether treat all time step as a changepoint
            t_2, y_2 = bcp.rel_change_filter_0819_3(t, y)
            mu_list_list, sigma_list_list, prob_r_list_list, r_list_list = cp_detect.bayesian_change_point_4(
                y_2, r_blur=r_blur)
            changepoint, changepoint_p = cp_detect.get_change_point(
                prob_r_list_list)
            if len(changepoint) > 0 and changepoint[-1] != len(t_2) - 1:
                changepoint.append(len(t_2) - 1)
            cp_list = changepoint
        else:
            cp_list = [i for i in range(len(total_power_usage))]

        data_seg, n_seg, temp = self.segment_data(total_power_usage, cp_list)

        # compute the trace list
        trace_list, _ = find_nearest(
            np.array([sum(s) for s in self.state_combinations]),
            np.array([np.mean(np.array(seg)) for seg in data_seg]))
        # generated predicted profile
        predicted_profile = [[] for _ in range(n_equipment_type + 1)]

        if cp_list[-1] == len(total_power_usage) - 1:
            cp_list = cp_list[:-1]

        for i_cp in range(len(trace_list)):
            t_start = cp_list[i_cp]
            if i_cp == len(cp_list) - 1:
                t_end = len(total_power_usage)
            else:
                t_end = cp_list[i_cp + 1]

            for i_equipment in range(n_equipment_type):
                temp = self.state_combinations[trace_list[i_cp]][i_equipment]
                predicted_profile[i_equipment].extend(
                    [temp for _ in range(t_end - t_start)])

        power_sum = np.sum(predicted_profile[:-1], axis=0)
        others = predicted_profile[n_equipment_type]
        power_sum[power_sum == 0] = 1
        print len(total_power_usage), len(power_sum)
        predicted_profile_2 = [
            np.multiply(np.array(total_power_usage), np.divide(t, power_sum))
            for t in predicted_profile[:-1]
        ]

        return predicted_profile_2
Пример #7
0
def add_signals(inter_data, filename):
    """
    Add traffic signal feature to inter_data, write it back out
    to inter_data.json
    Args:
        inter_data
        filename - shape file for the osm signals data
    """
    signals = fiona.open(filename)
    signals = util.reproject_records(signals)
    records = [{
        'point': Point(x['geometry']['coordinates']),
        'properties': x['properties']
    } for x in signals]

    seg, segments_index = util.read_segments(dirname=MAP_FP,
                                             get_non_inter=False)

    util.find_nearest(records, seg, segments_index, 20)

    matches = {}
    for record in records:
        near = record['properties']['near_id']
        if near:
            matches[near] = 1

    new_inter_data = {}
    for key, segments in inter_data.iteritems():
        updated_segments = []
        for segment in segments:
            signal = '0'
            if key in matches.keys():
                signal = '1'
            segment.update({'signal': signal})
            updated_segments.append(segment)
        new_inter_data[key] = updated_segments

    with open(os.path.join(DATA_FP, 'inters_data.json'), 'w') as f:
        json.dump(inter_data, f)
Пример #8
0
    def test_power_disaggregate(self, total_power_usage, r_blur=30):
        # total_power_usage is simply a list
        n_equipment_type = len(self.power_list)

        t = np.array([i + 1 for i in range(len(total_power_usage))])
        y = total_power_usage
        cp_list = [i for i in range(len(total_power_usage))]

        data_seg, n_seg, temp = self.segment_data(total_power_usage, cp_list)

        # compute the trace list
        trace_list, _ = find_nearest(
            np.array([sum(s) for s in self.state_combinations]),
            np.array([np.mean(np.array(seg)) for seg in data_seg]))
        # generated predicted profile
        predicted_profile = [[] for _ in range(n_equipment_type + 1)]

        if cp_list[-1] == len(total_power_usage) - 1:
            cp_list = cp_list[:-1]

        for i_cp in range(len(trace_list)):
            t_start = cp_list[i_cp]
            if i_cp == len(cp_list) - 1:
                t_end = len(total_power_usage)
            else:
                t_end = cp_list[i_cp + 1]

            for i_equipment in range(n_equipment_type):
                temp = self.state_combinations[trace_list[i_cp]][i_equipment]
                predicted_profile[i_equipment].extend(
                    [temp for _ in range(t_end - t_start)])

        power_sum = np.sum(predicted_profile[:-1], axis=0)
        others = predicted_profile[n_equipment_type]
        power_sum[power_sum == 0] = 1
        print len(total_power_usage), len(power_sum)
        predicted_profile_2 = [
            np.multiply(np.array(total_power_usage), np.divide(t, power_sum))
            for t in predicted_profile[:-1]
        ]

        return predicted_profile_2
Пример #9
0
    def update_state(self, action, dt):
        if self.is_at_goal or self.ran_out_of_time or self.in_collision:
            if self.is_at_goal: self.was_at_goal_already = True
            if self.in_collision: self.was_in_collision_already = True
            self.vel_global_frame = np.array([0.0, 0.0])
            return

        # self.past_actions = np.roll(self.past_actions,1,axis=0)
        # self.past_actions[0,:] = action

        if self.action_time_lag > 0:
            # Store current action in dictionary, then look up the past action that should be executed this step
            self.chosen_action_dict[self.t] = action
            # print "-------------"
            # print "Agent id: %i" %self.id
            # print "Current t:", self.t
            # print "Current action:", action
            timestamp_of_action_to_execute = self.t - self.action_time_lag
            # print "timestamp_of_action_to_execute:", timestamp_of_action_to_execute
            if timestamp_of_action_to_execute < 0:
                # print "storing up actions...."
                action_to_execute = np.array([0.0, 0.0])
            else:
                nearest_timestamp, _ = util.find_nearest(
                    np.array(self.chosen_action_dict.keys()),
                    timestamp_of_action_to_execute)
                # print "nearest_timestamp:", nearest_timestamp
                action_to_execute = self.chosen_action_dict[
                    nearest_timestamp[0]]
            # print "action_to_execute:", action_to_execute
        else:
            action_to_execute = action

        selected_speed = action_to_execute[0] * self.pref_speed
        selected_heading = util.wrap(
            action_to_execute[1] +
            self.heading_global_frame)  # in global frame

        dx = selected_speed * np.cos(selected_heading) * dt
        dy = selected_speed * np.sin(selected_heading) * dt
        self.pos_global_frame += np.array([dx, dy])
        self.vel_global_frame[0] = selected_speed * np.cos(selected_heading)
        self.vel_global_frame[1] = selected_speed * np.sin(selected_heading)
        self.speed_global_frame = selected_speed
        self.heading_global_frame = selected_heading

        # Compute heading w.r.t. ref_prll, ref_orthog coordinate axes
        self.ref_prll, self.ref_orth = self.get_ref()
        ref_prll_angle_global_frame = np.arctan2(self.ref_prll[1],
                                                 self.ref_prll[0])
        self.heading_ego_frame = util.wrap(self.heading_global_frame -
                                           ref_prll_angle_global_frame)

        # Compute velocity w.r.t. ref_prll, ref_orthog coordinate axes
        cur_speed = np.linalg.norm(self.vel_global_frame)
        v_prll = cur_speed * np.cos(self.heading_ego_frame)
        v_orthog = cur_speed * np.sin(self.heading_ego_frame)
        self.vel_ego_frame = np.array([v_prll, v_orthog])

        # Update time left so agent does not run around forever
        self.time_remaining_to_reach_goal -= dt
        self.t += dt
        if self.time_remaining_to_reach_goal <= 0.0 and not Config.ROBOT_MODE:
            self.ran_out_of_time = True

        self._update_state_history()

        self._check_if_at_goal()

        return
Пример #10
0
    def test_find_nearest_int16(self):

        Aint16 = np.array([0, 4, 6, 3, 2, 54, 3], dtype=np.int16)

        idx = find_nearest(Aint16, 6)
        self.assertEqual(idx, 2)
Пример #11
0
        N = 2000
        WMAX = [50, 250, 600, 930, 2100]

        for wmax in WMAX:
            kappa, deltak = createKappa(minvel, wmax, N)
            self.assertAlmostEqual(kappa[N - 1], wmax / minvel, delta=sdelta)
            self.assertAlmostEqual(kappa[0], 0.0, delta=sdelta)
            self.assertEqual(N, len(kappa))
            self.assertAlmostEqual(deltak, kappa[1] - kappa[0], delta=sdelta)


#   ------------------------------------------

from util import find_nearest

find_nearest(np.array([1, 2, 3], dtype=np.int16), 2)


class Test_find_nearest(unittest.TestCase):

    #test for integers
    def test_find_nearest_int16(self):

        Aint16 = np.array([0, 4, 6, 3, 2, 54, 3], dtype=np.int16)

        idx = find_nearest(Aint16, 6)
        self.assertEqual(idx, 2)

    #test for floats
    def test_find_nearest_float32(self):
Пример #12
0
    # Have to use pandas read_csv, unicode trubs
    concern_raw = pd.read_csv(RAW_DATA_FP + '/Vision_Zero_Entry.csv')
    concern_raw = concern_raw.to_dict('records')
    for r in concern_raw:
        concern.append(
            util.read_record(r,
                             r['X'],
                             r['Y'],
                             orig=pyproj.Proj(init='epsg:4326')))
    print "Read in data from {} concerns".format(len(concern))

    combined_seg, segments_index = util.read_segments()

    # Find nearest crashes - 30 tolerance
    print "snapping crashes to segments"
    util.find_nearest(crash, combined_seg, segments_index, 30)

    # Find nearest concerns - 20 tolerance
    print "snapping concerns to segments"
    util.find_nearest(concern, combined_seg, segments_index, 20)

    # Write concerns
    concern_schema = make_schema('Point', concern[0]['properties'])
    print "output concerns shp to ", MAP_FP
    util.write_shp(concern_schema, MAP_FP + '/concern_joined.shp', concern,
                   'point', 'properties')
    print "output concerns data to ", PROCESSED_DATA_FP
    with open(PROCESSED_DATA_FP + '/concern_joined.json', 'w') as f:
        json.dump([c['properties'] for c in concern], f)

    # Write crash