def calculateLOS(self, max_long_link_ft = None, building_per_hash_bin = 5):
        if not max_long_link_ft is None:
            self.max_long_link = max_long_link_ft
        self.building_per_hash_bin = building_per_hash_bin
        self.hashXYBuilding()

        total_fso_tx = len( self.fso_tx )
        stat_fso_tx_pair_count = 0
        stat_los_time = 0.0
        stat_total_los_pairs = self.getNPairsForLOSCalc()
        with open(self.input_bldg_file+'.dyn', 'w') as output_dyn_file:
            for i in range(total_fso_tx-1):
                p0 = self.fso_tx[i][ 0, :]
                if np.isnan(p0[0]): continue
                for j in range(i+1,total_fso_tx):
                    p1 = self.fso_tx[j][0, :]
                    if np.isnan(p1[0]): continue
                    if not self.isLink(p0, p1):
                        continue
                    start_t = mytimer()
                    stat_fso_tx_pair_count += 1
                    if self.isLOS(p0, p1):
                        self.fso_los.append((i,j))
                        output_dyn_file.write(str(i+1)+", "+str(j+1)+"\n")
                        output_dyn_file.flush()
                    stat_los_time += mytimer() - start_t
                if stat_fso_tx_pair_count > 0:
                    stat_cur_avg_los_calc_time = stat_los_time / stat_fso_tx_pair_count
                    stat_expected_remaining_time = (stat_total_los_pairs - stat_fso_tx_pair_count)*stat_cur_avg_los_calc_time
                    print "Progress: fso_tx: ", i+1, "/", total_fso_tx, \
                        " LOS-pairs: ", stat_fso_tx_pair_count, "/", stat_total_los_pairs,\
                        " Avg. time per LOS-pair: ", stat_cur_avg_los_calc_time, "sec "\
                        " Expected Remaining time: ",  stat_expected_remaining_time,"sec"
        return
def driverDynamicGraphGenerator():

    #-----params--------------------#
    input_file = 'world_trade_center'
    max_link_length_km = 1.0 # affects run time
    fso_tower_height_ft = 30.0
    building_perimeter_req_ft = 80.0
    building_per_bin = 5
    #-------end params-----------------#

    start_t = mytimer()
    dgg = DynamicGraphGenerator()
    dgg.load3DBuildingData(input_file)
    dgg.setMaxLinkLength(max_link_length_km*3280.84)
    dgg.addFSOTowers(tower_height_ft=fso_tower_height_ft,
                     min_bldg_perimeter_req_ft=building_perimeter_req_ft)
    dgg.calculateLOS(building_per_hash_bin=building_per_bin)
    print 'Execution time:', np.round((mytimer() - start_t), 3), "seconds"
    dgg.visualize3Dbuilding(showFSOLinks=True, showFSOTowers=True ) #comment this before final run
    return
    def find_M_alpha(self, max_duration=-1):
        '''

        :param max_duration:
        :return:
        '''
        start_t2 = mytimer()
        best_score = -1
        best_matching = None
        best_alpha = -1
        alpha_set = self.generate_list_of_alphas()
        if len(alpha_set) <= 0:  #no traffic left
            return best_matching, best_alpha

        if max_duration >= 0:
            alpha_set = [i for i in alpha_set if i <= max_duration]
            if len(
                    alpha_set
            ) == 0 and max_duration > 0:  #in case no alpha after above, just use the remaining time i,e max_duration
                alpha_set = [max_duration]

        for alpha in alpha_set:
            if alpha <= 0:
                continue
            self.stat_matching_count += 1
            start_t = mytimer()

            start_t_w = mytimer()
            self.edge_weights = self.calculate_edge_weights(alpha)
            elapsed_t_w = mytimer() - start_t_w

            clipped_weights = np.clip(self.edge_weights, a_max=alpha, a_min=0)

            start_t_m = mytimer()
            row_indx, col_indx = linear_sum_assignment(-clipped_weights)
            elapsed_t_m = mytimer() - start_t_m

            matching_score = np.sum(clipped_weights[row_indx, col_indx]) / (
                1.0 * alpha + self.delta)

            if matching_score > best_score:
                best_score, best_alpha = matching_score, alpha
                best_matching = list(zip(row_indx, col_indx))
            self.stat_matching_time += (mytimer() - start_t)
            #print("debug: w_time, m_time:", 1000*elapsed_t_w, 1000*elapsed_t_m)
        elapsed_t = mytimer() - start_t2
        print("debug: elapsed time:", elapsed_t, " seconds ", " #-alphas: ",
              len(alpha_set))
        print("debug: list of alphas: ", alpha_set)
        return best_matching, best_alpha
예제 #4
0
    def extractSmallerScenario(self,
                               max_x=float('inf'),
                               max_y=float('inf'),
                               min_x=-float('inf'),
                               min_y=-float('inf'),
                               ofname='data_small.txt'):
        '''
        must call self.parseGeneratedPathlssData(..) first
        extracts smaller scenario from 80k datafile, must load
            -> find all the PU, SS and SU in the range and list them
            -> find the PU within the range and along its self.PR and populate self.PU, self.PR
            -> find all the SS within the range and populate self.SS and self.loss_PU_SS
            -> find all the SU within the range and populate self.SU and self.loss_PR_SU for each PR
            -> finally save all the above in the output file mentioned
        :return:
        '''
        start_t = mytimer()
        print "debug: extracting"
        if self.PU is None:
            self.parsePreGeneratedPathlossData()  #default for 80k data
        n_pu = None
        n_su = None
        n_ss = None

        x_indx, y_indx = 0, 1
        n_pu = self.findObjectsInBoundedRegion(self.PU,
                                               max_x=max_x,
                                               max_y=max_y,
                                               min_x=min_x,
                                               min_y=min_y,
                                               x_indx=x_indx,
                                               y_index=y_indx)
        print "Total PU:", n_pu.shape[0]

        x_indx, y_indx = 0, 1
        n_su = self.findObjectsInBoundedRegion(self.SU,
                                               max_x=max_x,
                                               max_y=max_y,
                                               min_x=min_x,
                                               min_y=min_y,
                                               x_indx=x_indx,
                                               y_index=y_indx)
        print "Total SU:", n_su.shape[0]

        x_indx, y_indx = 0, 1
        n_ss = self.findObjectsInBoundedRegion(self.SS,
                                               max_x=max_x,
                                               max_y=max_y,
                                               min_x=min_x,
                                               min_y=min_y,
                                               x_indx=x_indx,
                                               y_index=y_indx)
        print "Total SS: ", n_ss.shape[0]

        # plt.scatter( self.PU[n_pu, 0], self.PU[n_pu, 1], marker = 's', s=25, c='b' )
        # plt.scatter( self.SS[n_ss, 0], self.SS[n_ss, 1], marker = 'o', s=4,  c='r' )
        # plt.scatter( self.SU[n_su, 0], self.SU[n_su, 1], marker = '^', s=25, c='g' )
        # plt.show()

        #--RETHINK USING LOOP TO AVOID ERROR--!!
        with open(ofname, "w") as f:

            #---write the count-----#
            f_line = "COUNT" + " " + str(n_pu.shape[0]) + " " + str(
                n_su.shape[0]) + " " + str(n_ss.shape[0])
            f.write(f_line)
            # ------write the PU's----#
            #-------PU PU_ID X Y Z Transmit_power
            pu_counter = 0
            for i in n_pu:
                f_line = "\n"+"PU"+" "+str(pu_counter)+\
                                   " "+str( self.PU[i, 0] )+" "+str( self.PU[i, 1] )+\
                                   " "+str( self.PU[i, 2] )+" "+str( self.PU[i, 3] )
                f.write(f_line)
                pu_counter += 1

            #------write the SU's----#
            #-----SU SU_ID X Y Z
            su_counter = 0
            for i in n_su:
                f_line = "\n"+"SU"+" "+str(su_counter)+\
                                   " "+str( self.SU[i, 0] )+" "+str( self.SU[i, 1] )+\
                                   " "+str( self.SU[i, 2] )
                f.write(f_line)
                su_counter += 1

            #-------write the SS's----#
            #-----SS SS_ID X Y Z
            ss_counter = 0
            for i in n_ss:
                f_line = "\n"+"SS"+" "+str(ss_counter)+\
                                   " "+str( self.SS[i, 0] )+" "+str( self.SS[i, 1] )+\
                                   " "+str( self.SS[i, 2] )
                f.write(f_line)
                ss_counter += 1

            #---write the PR's-----#
            #---PR PU_ID PR_ID X Y Z Threshold
            pu_counter = 0
            for i in n_pu:
                for j in np.arange(0, self.pr_per_pu):
                    f_line = "\n"+"PR"+" "+str( pu_counter )+" "+str( j )+\
                                   " "+str( self.PR[i, j, 0] )+" "+str( self.PR[i, j, 1] )+\
                                   " "+str( self.PR[i, j, 2] )
                pu_counter += 1

            #-----write the A: PU-SS path-loss
            #-----A PU_ID SS_ID Path_loss---
            pu_counter = 0
            for i in n_pu:
                ss_counter = 0
                for j in n_ss:
                    f_line = "\n"+"A"+" "+str(pu_counter)+" "+str(ss_counter)+\
                                       " "+str( self.loss_PU_SS[i, j] )
                    f.write(f_line)
                    ss_counter += 1
                pu_counter += 1

            #-----write the B: PR-SU path-loss
            #-----B PU_ID PR_ID SU_ID Path_loss
            pu_counter = 0
            su_counter = 0
            for i in n_pu:
                su_counter = 0
                for j in n_su:
                    for k in np.arange(0, self.pr_per_pu):
                        f_line = "\n"+"B"+" "+str(pu_counter)+" "+str( k )+" "+str( su_counter )+\
                                           " "+str( self.loss_PR_SU[i, k, j] )
                        f.write(f_line)
                    su_counter += 1
                pu_counter += 1

        print 'Extraction time:', np.round((mytimer() - start_t), 3), "seconds"
        return
예제 #5
0
    :return:
    '''
    check_memory = False  #<---debug

    if check_memory:
        h = hpy()

    sua = SUAllocation()
    ifname = 'data_small.txt'
    sua.parsePreGeneratedPathlossData(ifname=ifname)
    #sua.extractSmallerScenario(max_x= 8500., max_y = 8500., min_x = 7500., min_y =  7500.)
    #sua.plotLocations()
    sua.processSURequest(suID=6)

    if check_memory:
        import pdb
        pdb.set_trace()
        print h.heap()
    return


if __name__ == '__main__':
    start_t = mytimer()

    profileCode = False  #<--debug
    if profileCode:
        cProfile.run('runExperiment()', 'expProfile.cprof')
    else:
        runExperiment()
    print 'Execution time:', np.round((mytimer() - start_t), 3), "seconds"
    print('Need two cmd line arguments: offset and num_images.')
    sys.exit(0)

offset = int(sys.argv[1])
num_images = int(sys.argv[2])

print('offset {},  num_images {}'.format(offset, num_images))

ti = tinyimages.TinyImages('/scratch/tinyimages')
cifar = cifar10.CIFAR10Data('/scratch/cifar10')

if num_images <= 0:
    num_images = ti.img_count

print('Reading {} images ...'.format(num_images))
start = mytimer()
imgs = ti.slice_to_numpy(offset, num_images)
imgs = imgs.reshape([num_images, 32 * 32 * 3])
stop = mytimer()
print('    done in {} seconds'.format(stop - start))


def normalize_data(data):
    n, dims = data.shape
    mean = np.sum(data, axis=0) / n
    data2 = data - mean.reshape((1, dims))
    norms = np.sqrt(np.sum(np.square(data2), axis=1))
    norms = np.maximum(norms, 0.1)
    data3 = data2 / norms.reshape((n, 1))
    data3 = data3.astype(np.float32)
    return data3, mean