示例#1
0
def howlong1(matrixSize,matrixNum):
    start = time.time()
    for i in range(matrixNum):        
        X=np.random.random((2*matrixSize,2*matrixSize))
        S=X.transpose()-X
        pf(S)
    end= time.time()
    return (end-start)
示例#2
0
    def track_object(self, obj_i, start_state):
        print "Tracking object %i of %i" % (obj_i + 1, len(self.start_states))
        print "Start state for object %i is %s." % (obj_i + 1, start_state)

        self.preresampled_particles[obj_i] = numpy.zeros((self.num_frames, self.num_particles, start_state.size))
        self.resampled_particles[obj_i] = numpy.zeros((self.num_frames, self.num_particles, start_state.size))
        self.highest_weight_particles.append([start_state])

        particles = numpy.array([start_state] * self.num_particles)

        track = numpy.zeros((self.num_frames, start_state.size))
        track[0, :] = start_state
        self.preresampled_particles[obj_i][0] = particles
        self.resampled_particles[obj_i][0] = particles

        for i, frame in enumerate(self.video[1:], 1):
            particles, intermediate_particles = pf(
                particles,
                self.preprocess_image(frame),
                self.goodness,
                sampling_function=self.sample,
                resampling_function=self.save_highest_weight_particle_and_resample,
            )
            track[i, :] = particles.mean(axis=0)
            self.resampled_particles[obj_i][i] = particles

            print "Tracked frame %i of %i" % (i + 1, self.num_frames)

        self.highest_weight_particles[obj_i] = numpy.array(self.highest_weight_particles[obj_i])

        print "Tracking complete."
        print

        self.tracks[obj_i] = track
示例#3
0
    def __init__(self, p1, p2, p, w0, ngrain,
                 filename='temp.txt', dist='g',
                 iplot=True, idot=False):
        path =os.getcwd()
        f=open(path+'\\'+filename, 'w')
        f.writelines('Designed texture using probability distributions \n')
        f.writelines('Given Euler angles are as below \n')
        f.writelines('ph1,   phi2,   PHI = ' + str(p1) + str(p2)+ str(p) )
        
        f.write('\nB   '+ str(ngrain))
        for i in range(ngrain):
            txt = text(p1=p1, p2=p2, p=p, w0=w0, dist=dist)
            angle = txt.angle

            f.write('\n %9.2f %9.2f %9.2f %9.3f'%(angle[0],angle[1],angle[2], 0.1))
        f.close()
        if iplot==True:
            pf.pf(ftex='temp.txt',idot=idot)
示例#4
0
    def test_pf(self):
        wconf = copy.copy(self.wconf)
        wconf.hexTiers = 0
        wconf.usersPerCell = 10
        wconf.mobileVelocity = 100
        world1 = world.World(wconf, self.phy)
        world1.associatePathlosses()
        world1.calculateSINRs()
        rate = 1
        avg_rate = np.ones(len(world1.mobiles))
        
        # when avg_rate is near zero for one user, that user 0 should receive all RBs
        avg_rate[0] = 1e-20
        alloc = pf.pf(world1, world1.cells[0], world1.mobiles, rate, avg_rate)
        np.testing.assert_array_equal(alloc, np.zeros([50,10]))

        rate = 1e6
        pSupplyPC = pf.pf_ba(world1, world1.cells[0], world1.mobiles, rate)
        pSupplyDTX = pf.pf_dtx(world1, world1.cells[0], world1.mobiles, rate)
示例#5
0
    def test_pf(self):
        wconf = copy.copy(self.wconf)
        wconf.hexTiers = 0
        wconf.usersPerCell = 10
        wconf.mobileVelocity = 100
        world1 = world.World(wconf, self.phy)
        world1.associatePathlosses()
        world1.calculateSINRs()
        rate = 1
        avg_rate = np.ones(len(world1.mobiles))

        # when avg_rate is near zero for one user, that user 0 should receive all RBs
        avg_rate[0] = 1e-20
        alloc = pf.pf(world1, world1.cells[0], world1.mobiles, rate, avg_rate)
        np.testing.assert_array_equal(alloc, np.zeros([50, 10]))

        rate = 1e6
        pSupplyPC = pf.pf_ba(world1, world1.cells[0], world1.mobiles, rate)
        pSupplyDTX = pf.pf_dtx(world1, world1.cells[0], world1.mobiles, rate)
示例#6
0
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 20 14:35:56 2018

@author: jian
"""

from pf import pfaffian, pf

import numpy as np

N = 200
steps = 10
dd = np.zeros(steps)
for i in range(steps):
    X = np.random.random((2 * N, 2 * N))
    T = X.transpose() - X
    dd[i] = abs(pf(T) - pfaffian(T)) / abs(pf(T))  # relative error makes sense
dd.max()
示例#7
0
 def correlator_dynamics_AABB(self, i, t, j, AABB):
     S = self.aux_pfaffian_constructor_t_AABB(i, j, t, AABB)
     return pf(S)
示例#8
0
 def correlator_dynamics(self, i, t, j):
     S = self.aux_pfaffian_constructor_t(i, j, t)
     return pf(S)
示例#9
0
 def correlator_equal_time(self, i, j):
     S = self.aux_pfaffian_constructor(i, j)
     return np.real(pf(S))
示例#10
0
def vf(newname, newname_in, newname_out, station_file, final_name):
    df2 = pd.read_csv(
        newname,
        encoding='GBK',
        names=['card_id', 'time', 'money', 'line', 'station', 'M1'])
    df_in = df2.ix[df2.money == 0.0,
                   ['card_id', 'time', 'money', 'line', 'station', 'M1']]
    print(df_in)
    df_out = df2.ix[df2.money != 0.0,
                    ['card_id', 'time', 'money', 'line', 'station', 'M1']]
    print(df_out)
    df_out.columns = [
        'card_id', 'time_out', 'money_out', 'line_out', 'station_out', 'M1_out'
    ]
    df_in.columns = [
        'card_id', 'time_in', 'money_in', 'line_in', 'station_in', 'M1_in'
    ]

    df_outsh34 = df_out.ix[(df_out.station_out == u'上海火车站') & (
        (df_out.line_out == 4) | (df_out.line_out == 3)
    ), [
        'card_id', 'time_out', 'money_out', 'line_out', 'station_out', 'M1_out'
    ]]
    df_insh1 = df_in.ix[
        (df_in.station_in == u'上海火车站') & (df_in.line_in == 1),
        ['card_id', 'time_in', 'money', 'line_in', 'station_in', 'M1_in']]
    result34_1 = pd.merge(df_outsh34, df_insh1)
    print(df_insh1)
    print(df_outsh34)
    print(result34_1)
    fun = lambda x, y: t1(x) - t1(y)
    #result['D']='ColumnD'
    #t1(result['time_out'])-t1(result['time_in'])<=60*30
    result34_1['duration'] = list(
        map(fun, result34_1['time_in'], result34_1['time_out']))
    #result[map(lambda x:datetime.datetime(x.year,x.month,x.day,x.hours,x.minutes+30,x.seconds),result['time_in'])>=result['time_out']]
    #result.groupby('card_id')
    print(result34_1)
    result0 = result34_1.ix[
        (result34_1.duration <= 60 * 30) & (result34_1.duration > 0), [
            'card_id', 'time_out', 'money_out', 'line_out', 'station_out',
            'M1_out', 'time_in'
            'money_in', 'line_in', 'station_in', 'M1_in', 'duration'
        ]]
    print(result0)
    #print(result0.groupby(result0['card_id']).agg({'time_in':['time_in'].min(),'card_id':['card_id']}))
    print(result0.groupby(['M1_in', 'card_id'])[['duration']].min())
    result1 = result0.groupby(['M1_in', 'card_id'])[['duration']].min()
    print("huhu")
    #result1['card_id']=result1.index
    result1.reset_index('M1_in', inplace=True)
    result1.reset_index('card_id', inplace=True)
    result2 = pd.merge(result0,
                       result1,
                       right_on=['card_id', 'duration'],
                       left_on=['card_id', 'duration'])
    print(
        pd.merge(result0,
                 result1,
                 right_on=['card_id', 'duration'],
                 left_on=['card_id', 'duration']))

    #print(result0['time_in'].groupby(result0['card_id']).min())
    #result1=result0.ix[result0.time_in==result0['duration'].groupby(result0['card_id']).min(),['card_id','money_out','line_out','station_out','time_out','money_in','line_in','station_in','time_in','duration']]

    #frames =[result2.ix[:,['card_id','money_out','line_out','station_out','time_out']], df_out0]
    #ix[:,['card_id','money_out','line_out','station_out','time_out']
    #In [5]: result = pd.concat(frames)
    #df_out1=pd.concat(frames, axis=0)
    df_out0 = Complement(
        result2.ix[:, [
            'card_id', 'time_out', 'money_out', 'line_out', 'station_out',
            'M1_out'
        ]], df_out).drop_duplicates()
    print(df_out0)
    #df_in1=pd.concat([result1['card_id','money_in','line_in','station_in','time_in'],df_in0], axis=0)
    df_in0 = Complement(
        result2.ix[:, [
            'card_id', 'time_in', 'money_in', 'line_in', 'station_in', 'M1_in'
        ]], df_in).drop_duplicates()
    print(df_in0)

    ###

    df_outsh1 = df_out.ix[(df_out.station_out == u'上海火车站') &
                          (df_out.line_out == 1), [
                              'card_id', 'time_out', 'money_out', 'line_out',
                              'station_out', 'M1_out'
                          ]]
    df_insh34 = df_in.ix[
        (df_in.station_in == u'上海火车站') & ((df_in.line_in == 4) |
                                          (df_in.line_in == 3)),
        ['card_id', 'time_in', 'money', 'line_in', 'station_in', 'M1_in']]
    result1_34 = pd.merge(df_outsh1, df_insh34)
    print(df_insh34)
    print(df_outsh1)
    print(result1_34)
    fun = lambda x, y: t1(x) - t1(y)
    #result['D']='ColumnD'
    #t1(result['time_out'])-t1(result['time_in'])<=60*30
    result1_34['duration'] = list(
        map(fun, result1_34['time_in'], result1_34['time_out']))
    #result[map(lambda x:datetime.datetime(x.year,x.month,x.day,x.hours,x.minutes+30,x.seconds),result['time_in'])>=result['time_out']]
    #result.groupby('card_id')
    print(result1_34)
    result3 = result1_34.ix[(result1_34.duration <= 60 * 30) &
                            (result1_34.duration > 0), [
                                'card_id', 'time_out', 'money_out', 'line_out',
                                'station_out', 'M1_out', 'time_in', 'money_in',
                                'line_in', 'station_in', 'M1_in', 'duration'
                            ]]
    print(result3)
    #print(result0.groupby(result0['card_id']).agg({'time_in':['time_in'].min(),'card_id':['card_id']}))
    print(result3.groupby(['M1_in', 'card_id'])[['duration']].min())
    result4 = result3.groupby(['M1_in', 'card_id'])[['duration']].min()
    print("huhu")
    #result4['card_id']=result4.index
    result4.reset_index('M1_in', inplace=True)
    result4.reset_index('card_id', inplace=True)
    result5 = pd.merge(result3,
                       result4,
                       right_on=['card_id', 'duration'],
                       left_on=['card_id', 'duration'])
    print(
        pd.merge(result3,
                 result4,
                 right_on=['card_id', 'duration'],
                 left_on=['card_id', 'duration']))

    #print(result0['time_in'].groupby(result0['card_id']).min())
    #result1=result0.ix[result0.time_in==result0['duration'].groupby(result0['card_id']).min(),['card_id','money_out','line_out','station_out','time_out','money_in','line_in','station_in','time_in','duration']]

    print(result4)
    #frames =[result2.ix[:,['card_id','money_out','line_out','station_out','time_out']], df_out0]
    #ix[:,['card_id','money_out','line_out','station_out','time_out']
    #In [5]: result = pd.concat(frames)
    #df_out1=pd.concat(frames, axis=0)
    df_out1 = Complement(
        result5.ix[:, [
            'card_id', 'time_out', 'money_out', 'line_out', 'station_out',
            'M1_out'
        ]], df_out0).drop_duplicates()
    print("finaout")
    print(df_out1)
    #df_in1=pd.concat([result1['card_id','money_in','line_in','station_in','time_in'],df_in0], axis=0)
    df_in1 = Complement(
        result5.ix[:, [
            'card_id', 'time_in', 'money_in', 'line_in', 'station_in', 'M1_in'
        ]], df_in0).drop_duplicates()
    print("finain")
    print(df_in1)

    df_out1.to_csv(newname_out, encoding='GBK', index=False, header=False)
    df_in1.to_csv(newname_in, encoding='GBK', index=False, header=False)

    # 调用pf
    pf.pf(station_file, newname_in, newname_out, final_name)