Exemplo n.º 1
0
 def __init__(self, seed, dist=None):
     if seed <= 0:
         self._rng = mt.RandomState()
     elif seed > 0:
         self._rng = mt.RandomState(seed)
     if dist is None:
         dist = default_distribution
     if not isinstance(dist, Distribution):
         raise error, "Not a distribution object"
     self._dist = dist
Exemplo n.º 2
0
        def f():
            randomstate = mtrand.RandomState(seed=42)
            rand0 = randomstate.rand()
            rand1 = randomstate.rand()
            rand2 = randomstate.rand()

            return (rand0, rand1, rand2)
Exemplo n.º 3
0
    def forward(self, inputs):

        if self.args.is_variable_block_len:
            block_len = inputs.shape[1]
            # reset interleaver
            if self.args.is_interleave != 0:  # fixed interleaver.
                seed = np.random.randint(0, self.args.is_interleave)
                rand_gen = mtrand.RandomState(seed)
                p_array = rand_gen.permutation(arange(block_len))
                self.set_interleaver(p_array)

        inputs = 2.0 * inputs - 1.0
        x_sys = self.enc_cnn_1(inputs)
        x_sys = self.enc_act(self.enc_linear_1(x_sys))

        x_p1 = self.enc_cnn_2(inputs)
        x_p1 = self.enc_act(self.enc_linear_2(x_p1))

        x_sys_int = self.interleaver(inputs)
        x_p2 = self.enc_cnn_3(x_sys_int)
        x_p2 = self.enc_act(self.enc_linear_3(x_p2))

        x_tx = torch.cat([x_sys, x_p1, x_p2], dim=2)

        codes = self.power_constraint(x_tx)

        return codes
Exemplo n.º 4
0
    def __init__(self, args, p_array):
        # turbofy only for code rate 1/3
        super(ENC_interCNN2Int, self).__init__(args)
        self.args             = args

        # Encoder

        self.enc_cnn_1       = SameShapeConv1d(num_layer=args.enc_num_layer, in_channels=args.code_rate_k,
                                                  out_channels= args.enc_num_unit, kernel_size = args.dec_kernel_size)


        self.enc_linear_1    = torch.nn.Linear(args.enc_num_unit, 1)

        self.enc_cnn_2       = SameShapeConv1d(num_layer=args.enc_num_layer, in_channels=args.code_rate_k,
                                                  out_channels= args.enc_num_unit, kernel_size = args.dec_kernel_size)

        self.enc_linear_2    = torch.nn.Linear(args.enc_num_unit, 1)

        self.enc_cnn_3       = SameShapeConv1d(num_layer=args.enc_num_layer, in_channels=args.code_rate_k,
                                                  out_channels= args.enc_num_unit, kernel_size = args.dec_kernel_size)

        self.enc_linear_3    = torch.nn.Linear(args.enc_num_unit, 1)

        self.interleaver1      = Interleaver(args, p_array)


        seed2 = 1000
        rand_gen2 = mtrand.RandomState(seed2)
        p_array2 = rand_gen2.permutation(arange(args.block_len))

        print('p_array1', p_array)
        print('p_array2', p_array2)

        self.interleaver2      = Interleaver(args, p_array2)
Exemplo n.º 5
0
 def f():
     rng = mtrand.RandomState(seed=250015)
     x = rng.uniform(size=1000000)
     res = sorted(x)
     return all(
         [res[ix] <= res[ix + 1]
         for ix in xrange(len(res) - 1)]
         )
 def test_call_within_randomstate(self):
     # Check that custom RandomState does not call into global state
     m = random.RandomState()
     res = np.array([0, 8, 7, 2, 1, 9, 4, 7, 0, 3])
     for i in range(3):
         random.seed(i)
         m.seed(4321)
         # If m.state is not honored, the result will change
         assert_array_equal(m.choice(10, size=10, p=np.ones(10) / 10.), res)
Exemplo n.º 7
0
    def __init__(self, args, p_array):
        super(DEC_LargeCNN2Int, self).__init__()
        self.args = args

        use_cuda = not args.no_cuda and torch.cuda.is_available()
        self.this_device = torch.device("cuda" if use_cuda else "cpu")

        self.interleaver1 = Interleaver(args, p_array)
        self.deinterleaver1 = DeInterleaver(args, p_array)

        seed2 = 1000
        rand_gen2 = mtrand.RandomState(seed2)
        p_array2 = rand_gen2.permutation(arange(args.block_len))

        print('p_array1 dec', p_array)
        print('p_array2 dec', p_array2)

        self.interleaver2 = Interleaver(args, p_array2)
        self.deinterleaver2 = DeInterleaver(args, p_array2)

        self.dec1_cnns = torch.nn.ModuleList()
        self.dec2_cnns = torch.nn.ModuleList()
        self.dec1_outputs = torch.nn.ModuleList()
        self.dec2_outputs = torch.nn.ModuleList()

        for idx in range(args.num_iteration):
            self.dec1_cnns.append(
                SameShapeConv1d(num_layer=args.dec_num_layer,
                                in_channels=2 + args.num_iter_ft,
                                out_channels=args.dec_num_unit,
                                kernel_size=args.dec_kernel_size))

            self.dec2_cnns.append(
                SameShapeConv1d(num_layer=args.dec_num_layer,
                                in_channels=2 + args.num_iter_ft,
                                out_channels=args.dec_num_unit,
                                kernel_size=args.dec_kernel_size))
            self.dec1_outputs.append(
                torch.nn.Linear(args.dec_num_unit, args.num_iter_ft))

            if idx == args.num_iteration - 1:
                self.dec2_outputs.append(torch.nn.Linear(args.dec_num_unit, 1))
            else:
                self.dec2_outputs.append(
                    torch.nn.Linear(args.dec_num_unit, args.num_iter_ft))
Exemplo n.º 8
0
    sys.stdout = Logger('./logs/ftae'+identity+'_log.txt', sys.stdout)

    args = get_args()
    print(args)

    use_cuda = not args.no_cuda and torch.cuda.is_available()
    device = torch.device("cuda" if use_cuda else "cpu")

    #################################################
    # Setup Channel AE: Encoder, Decoder, Channel
    #################################################
    # setup interleaver.

    if args.is_interleave == 1:
        seed = np.random.randint(0, 1)
        rand_gen = mtrand.RandomState(seed)
        p_array = rand_gen.permutation(arange(args.block_len))

    elif args.is_interleave == 0:
        p_array = range(args.block_len)
    else:
        seed = np.random.randint(0, args.is_interleave)
        rand_gen = mtrand.RandomState(seed)
        p_array = rand_gen.permutation(arange(args.block_len))

    if args.codec not in ['deepcode_cnn', 'deepcode_rnn']:
        print('using random interleaver', p_array)

    if args.send_error_back:
        model = Channel_Active_Block_Feedback(args, p_array).to(device)
    else:
Exemplo n.º 9
0
 def __init__(self, length, seed):
     rand_gen = mtrand.RandomState(seed)
     self.p_array = rand_gen.permutation(arange(length))
Exemplo n.º 10
0
    def forward(self, received):

        if self.args.is_variable_block_len:
            block_len = received.shape[1]
            # reset interleaver
            if self.args.is_interleave != 0:           # fixed interleaver.
                seed = np.random.randint(0, self.args.is_interleave)
                rand_gen = mtrand.RandomState(seed)
                p_array = rand_gen.permutation(arange(block_len))
                self.set_interleaver(p_array)
        else:
            block_len = self.args.block_len

        received = received.type(torch.FloatTensor).to(self.this_device)
        # Turbo Decoder
        r_sys     = received[:,:,0].view((self.args.batch_size, block_len, 1))
        r_sys_int = self.interleaver(r_sys)
        r_par1    = received[:,:,1].view((self.args.batch_size, block_len, 1))
        r_par2    = received[:,:,2].view((self.args.batch_size, block_len, 1))

        #num_iteration,
        prior = torch.zeros((self.args.batch_size, block_len, self.args.num_iter_ft)).to(self.this_device)

        for idx in range(self.args.num_iteration - 1):
            x_this_dec = torch.cat([r_sys, r_par1, prior], dim = 2)

            x_dec  = self.dec1_cnns[idx](x_this_dec)
            x_plr      = self.dec1_outputs[idx](x_dec)

            if self.args.extrinsic:
                x_plr = x_plr - prior

            x_plr_int  = self.interleaver(x_plr)

            x_this_dec = torch.cat([r_sys_int, r_par2, x_plr_int ], dim = 2)

            x_dec  = self.dec2_cnns[idx](x_this_dec)

            x_plr      = self.dec2_outputs[idx](x_dec)

            if self.args.extrinsic:
                x_plr = x_plr - x_plr_int

            prior      = self.deinterleaver(x_plr)

        # last round
        x_this_dec = torch.cat([r_sys,r_par1, prior], dim = 2)

        x_dec     = self.dec1_cnns[self.args.num_iteration - 1](x_this_dec)
        x_plr      = self.dec1_outputs[self.args.num_iteration - 1](x_dec)

        if self.args.extrinsic:
            x_plr = x_plr - prior

        x_plr_int  = self.interleaver(x_plr)

        x_this_dec = torch.cat([r_sys_int, r_par2, x_plr_int ], dim = 2)

        x_dec     = self.dec2_cnns[self.args.num_iteration - 1](x_this_dec)
        x_plr      = self.dec2_outputs[self.args.num_iteration - 1](x_dec)

        final      = torch.sigmoid(self.deinterleaver(x_plr))

        return final
Exemplo n.º 11
0
        def f():
            rng = mtrand.RandomState(seed=42)
            f = rng.rand(4)
            p = rng.rand()

            return p + f[0]
Exemplo n.º 12
0
 def f():
     randomstate = mtrand.RandomState(seed=42)
     randomstate.randn()
     return randomstate.randn(9)
Exemplo n.º 13
0
 def f():
     randomState = mtrand.RandomState(seed=42)
     return randomState.rand(10)
Exemplo n.º 14
0
 def f():
     randomstate = mtrand.RandomState(seed=42)
     return randomstate.uniform(low=-1.0, high=1.0, size=9)
Exemplo n.º 15
0
 def f():
     randomstate = mtrand.RandomState(seed=42)
     unif = randomstate.uniform(-1, 1)
     return unif