def plot_moving_inputs(inputs, labels, opts):
    rc = (2, 3)
    fig, ax = plt.subplots(rc[0], rc[1])
    state = [x[:, :opts.state_size] for x in inputs[:rc[0]]]
    extra = [x[:, opts.state_size:opts.state_size + 2] for x in inputs[:rc[0]]]
    labels = labels[:rc[0]]

    i = 0
    for batch in zip(state, extra, labels):
        for d in batch:
            plot_ix = np.unravel_index(i, rc)
            cur_ax = ax[plot_ix]
            adjust(cur_ax)
            plt.sca(cur_ax)
            plt.imshow(d, cmap='RdBu_r', vmin=-.3, vmax=.3)
            if i % 3 != 1:
                plt.xticks([0, 19])
            else:
                plt.xticks([])
            plt.yticks([0, 49])
            cb = plt.colorbar()
            cb.set_ticks([0, .3])
            i += 1
    path = os.path.join('./lab_meeting/images', 'input_non_stationary')
    plt.savefig(path + '.png', dpi=300)
Beispiel #2
0
def test_booleans(test_case):
    print("Running", "test_booleans")
    test_case.runAsJava(adjust("""
        for i in range(100000):
            x = True
            y = False

            x = (not x and y)
            y = x != y

            x = True and True and True
            y = x or False

            x = not True or (not x) or True
            y = y == y

            x = y != (not x)
            y = False or True or (not y)

            x = (x != x) or (not False)
            y = y and y and (not x)

            x = False or False or False or False
            y = True and True and True and True

            x = 1 != 0
            y = (not (not (not (not (not (not (not x)))))))

            x = ((not y) and y) == ((not (not x)) or y or (not y))
            y = False or False or (not True) or (not True) or (not True)

            x = True and (not False) and True and (not False) and False
            y = (False != False) != False
        """),
                        timed=True)
Beispiel #3
0
def test_cmp(test_case):
    print("Running", "test_cmp")
    test_case.runAsJava(adjust("""
        x = None
        s = "mary had a little lamb"
        t = "humpty dumpty sat on a wall"

        for i in range(1000):
            for j in range(1000):
                x = s < t
                x = s <= t
                x = s == t
                x = s != t
                x = s > t
                x = s >= t

                x = 3 < 5
                x = 3 <= 5
                x = 3 == 5
                x = 3 != 5
                x = 3 > 5
                x = 3 >= 5

                x = 3 < True
                x = 3.0 <= 5
                x = None == 5
                x = [3] != 5.0
                x = [3] > [5]
                x = [3.0] > [5.0]
    """),
                        timed=True)
Beispiel #4
0
def test_code(test_case):
    print("Running", "test_code")
    test_case.runAsJava(adjust("""
        def main(n):
            def foo(n):
                return n + 1
            def bar(n):
                return n + 1
            def baz(n):
                return n + 1
            def buzz(n):
                return n + 1
            def bizz(n):
                return n + 1
            def bozz(n):
                return n + 1
            def yam(n):
                return n + 1
            def yarn(n):
                return n + 1
            return foo(bar(baz(buzz(bizz(bozz(yam(yarn(n))))))))
        for i in range(100000):
            main(i)
    """),
                        timed=True)
Beispiel #5
0
def test_loops(test_case):
    print("Running", "test_loops")
    test_case.runAsJava(adjust("""
        for x in range(100):
            for y in range(100):
                for z in range(100):
                    for a in range(100):
                        pass
    """),
                        timed=True)
Beispiel #6
0
def test_small_integers(test_case):
    print("Running", "test_small_integers")
    test_case.runAsJava(adjust("""
        for i in range(100):
            for j in range(100):
                x = -5
                for k in range(-5, 257):
                    x = x + 1
        """),
                        timed=True)
def plot_stationary_inputs(inputs, labels, opts):
    rc = (2, 2)
    fig, ax = plt.subplots(rc[0], rc[1])
    state = inputs[:rc[0]]
    labels = labels[:rc[0]]
    i = 0
    for batch in zip(state, labels):
        for d in batch:
            plot_ix = np.unravel_index(i, rc)
            cur_ax = ax[plot_ix]
            adjust(cur_ax)
            plt.sca(cur_ax)
            # cbarBoo = True if i %2==1 else True
            plt.imshow(d, cmap='RdBu_r', vmin=-.3, vmax=.3)
            plt.xticks([0, 19])
            plt.yticks([0, 49])
            cb = plt.colorbar()
            cb.set_ticks([0, .3])
            i += 1
    path = os.path.join('./lab_meeting/images', 'input_stationary')
    plt.savefig(path + '.png', dpi=300)
Beispiel #8
0
def test_dict_get(test_case):
    print("Running", "test_dictionary_get")
    test_case.runAsJava(adjust("""
        dict = {1 : 2, "a" : "b"}
        for i in range(1000000):
            dict.get(i)
            dict.get(1)
            dict.get("a")
            dict.get(i)
            dict.get(1)
            dict.get("a")
    """),
                        timed=True)
Beispiel #9
0
def test_dict_set(test_case):
    print("Running", "test_dictionary_set")
    test_case.runAsJava(adjust("""
        dict = {}
        for i in range(1000000):
            dict["moo"] = 1
            dict["quack"] = 2
            dict["woof"] = 3
            dict["meow"] = 4
            dict["cockadoodledoo"] = 5
            dict["hiss"] = 6
    """),
                        timed=True)
Beispiel #10
0
    def adjust(self, method, frames=None, inplace=True, **kwargs):

        if frames is None and len(self.frames) == 0:
            raise ValueError("You did not specify any frames")

        if frames is not None:
            frames = frames
        else:
            frames = self.frames

        adjusted = []

        for frame in frames:
            adjusted.append(utils.adjust(frame, method, **kwargs))
        if inplace:
            self.frames = adjusted
        else:
            return adjusted
Beispiel #11
0
def create_normals(normal_map):
    h, w, _ = normal_map.shape
    iterations = w * h
    step_size = np.ceil(iterations / 100).astype(int)
    normals = np.zeros((h, w, NORMAL_DIMENSIONS))
    counter = 0
    bar = Bar("Processing Normals...", max=100, suffix='%(percent)d%%')
    bar.check_tty = False
    for i in range(w):
        for j in range(h):
            normals[j][i] = utils.adjust(normal_map[j][i][:3])
            counter += 1
            if counter % step_size == 0:
                bar.next()
    bar.finish()
    np.save(NORMAL_VECTORS_FILENAME, normals)
    print(f"Normal vectors stored in {NORMAL_VECTORS_FILENAME}")
    return normals
Beispiel #12
0
def test_global_var_load(test_case):
    print("Running", "test_global_var_load")
    test_case.runAsJava(adjust("""
        x = 1
        y = 2

        for i in range(100000):
            print(x)
            print(y)
            print(x+y)
            print(x-y)
            print(x)
            print(y)
            print(x*y)
            print(x/y)
            print(x)
            print(y)
    """),
                        timed=True)
Beispiel #13
0
def test_function_var_load(test_case):
    print("Running", "test_function_var_load")
    test_case.runAsJava(adjust("""
        def foo():
            x = 1
            y = 2
            print(x)
            print(y)
            print(x+y)
            print(x-y)
            print(x)
            print(y)
            print(x*y)
            print(x/y)
            print(x)
            print(y)

        for i in range(100000):
            foo()
    """),
                        timed=True)
Beispiel #14
0
def test_method(test_case):
    print("Running", "test_method")
    test_case.runAsJava(adjust("""
        class MyClass:

            def A(self):
                print("A!")

            def B(self):
                print("B!")

            def C(self):
                print("C!")

        obj = MyClass()

        for i in range(1000000):
            obj.A()
            obj.B()
            obj.C()
    """),
                        timed=True)
Beispiel #15
0
def test_class_init(test_case):
    print("Running", "test_class_init")
    test_case.runAsJava(adjust("""
        class A: pass
        class B: pass
        class C: pass
        class D: pass
        class E: pass
        class F: pass
        class G: pass
        class H: pass
        class I: pass
        class J: pass
        class K: pass
        class L: pass
        class M: pass
        class N: pass
        class O: pass
        class P: pass
        class Q: pass
        class R: pass
        class S: pass
        class T: pass
        class U: pass
        class V: pass
        class W: pass
        class X: pass
        class Y: pass
        class Z: pass

        a = "a"
        b = 1
        c = [1]
        d = {1 : 1}
        e = list([1])
        f = True
        g = 3.0
    """),
                        timed=True)
Beispiel #16
0
def adjust_normal_map(rgb_normal_map):
    """
    Return an adjusted normal map on which each element is a normalized vector
    from a RGB normal map.
    Args:
        rgb_normal_map(numpy.array): The RGB normal map as a numpy array.
    Returns:
        numpy.array: Map of normalized vector normals.
    """
    print("Creating normal vectors from RGB map...")
    h, w, _ = rgb_normal_map.shape
    iterations = w * h
    step_size = np.ceil((iterations * PERCENTAGE_STEP) / 100).astype('int')
    normals = np.zeros((h, w, NORMAL_DIMENSIONS))
    counter = 0
    for i in range(w):
        for j in range(h):
            if counter % step_size == 0:
                percent_done = int((counter / float(iterations)) * 100)
                print("{}% of normal vectors created".format(percent_done))
            normals[j][i] = utils.adjust(rgb_normal_map[j][i][:3])
            counter += 1
    return normals
Beispiel #17
0
def test_class_var_load(test_case):
    print("Running", "test_class_var_load")
    test_case.runAsJava(adjust("""
        class Animal:
            def __init__(self, name, sound):
                self.name = name
                self.sound = sound

            def speak(self):
                print(self.name)
                print(self.sound)
                print(self.name)
                print(self.sound)
                print(self.name)
                print(self.sound)

        lapo = Animal("Lapo", "Bow Wow")
        for i in range(100000):
            lapo.speak()
            lapo.speak()
            lapo.speak()

    """),
                        timed=True)
Beispiel #18
0
    if args.cuda:
        net.load_state_dict(copyStateDict(torch.load(args.trained_model)))
    else:
        net.load_state_dict(copyStateDict(torch.load(args.trained_model, map_location='cpu')))

    if args.cuda:
        net = net.cuda()
        net = torch.nn.DataParallel(net)
        cudnn.benchmark = False

    net.eval()

    t = time.time()

    # load data
    for k, image_path in enumerate(image_list):
        print("Test image {:d}/{:d}: {:s}".format(k+1, len(image_list), image_path), 'end=\r')
        image = imgproc.loadImage(image_path)

        bboxes, polys, score_text = test_net(net, image, args.text_threshold, args.link_threshold, args.low_text, args.cuda, args.poly)

        # save score text
        filename, file_ext = os.path.splitext(os.path.basename(image_path))
        mask_file = result_folder + "/res_" + filename + '_mask.jpg'
        cv2.imwrite(mask_file, score_text)
        polys = adjust(polys)
        file_utils.saveResult(image_path, image[:, :, ::-1], polys, dirname=result_folder)

    print("elapsed time : {}s".format(time.time() - t))
Beispiel #19
0
    def forward(self, x, std_val=0.01, constant_rep=True):

        #create the latent vector
        #-----------------------------------------------------------------------
        x = self.encoder(x)
        #-----------------------------------------------------------------------

        if constant_rep:
            N = self.num_points / self.nb_primitives
            ones = Variable(torch.ones(x.size(0), self.nb_primitives)).cuda()
            points_per_primitive = ones.contiguous() * N
            points_per_primitive = points_per_primitive.int()
            existance_prob = ones[1]

        else:
            #compute the means for the normal distributions
            #-------------------------------------------------------------------
            mean = self.existanceProbability(x.unsqueeze(2))
            mean = mean.view(x.size(0), self.nb_primitives).contiguous()
            #-------------------------------------------------------------------

            #fixed standart deviation for every normal distriutions
            #-------------------------------------------------------------------
            ones = Variable(torch.ones(x.size(0), self.nb_primitives)).cuda()
            stddev = (std_val * ones).contiguous()
            #-------------------------------------------------------------------

            #sample over the normal distriutions the existance probabilities and
            #make sure they are in [0,1]
            #-------------------------------------------------------------------
            existance_prob = torch.normal(mean, stddev).contiguous()
            existance_prob[existance_prob < 0] = 0
            existance_prob[existance_prob > 1] = 1
            # existance_prob_ = existance_prob
            #-------------------------------------------------------------------

            # normalize the probabilities for every batch
            #-------------------------------------------------------------------
            batch_sum_prob = torch.sum(existance_prob, 1).contiguous()
            batch_sum_prob = batch_sum_prob.expand(self.nb_primitives,
                                                   x.size(0))
            batch_sum_prob = batch_sum_prob.permute(1, 0)
            batch_sum_prob = batch_sum_prob.contiguous()
            existance_prob = torch.div(existance_prob, batch_sum_prob)
            existance_prob = existance_prob.contiguous()
            #-------------------------------------------------------------------

            #computing the number of points per primitives wrt the batch
            #-------------------------------------------------------------------
            points_per_primitive = torch.round(existance_prob *
                                               self.num_points)
            points_per_primitive = points_per_primitive.int().contiguous()
            points_per_primitive = adjust(points_per_primitive,
                                          self.num_points)
            #-------------------------------------------------------------------

        #create on spatial transformation per primitive
        #-----------------------------------------------------------------------
        linear_list = []
        for primitive in range(self.nb_primitives):
            linear_list.append(self.rotBias[primitive](
                x.unsqueeze(2).contiguous()))
        linear_list = torch.cat(linear_list, 2)
        #-----------------------------------------------------------------------

        out_batch = []

        for batch in range(x.size(0)):

            out_primitive = []

            for primitive in range(self.nb_primitives):

                #find the number of points to generate wrt the batch and the
                #primitive number
                #---------------------------------------------------------------
                N = points_per_primitive[batch, primitive].data[0]
                #---------------------------------------------------------------

                #ignore the primitive is there is no point to associated
                #---------------------------------------------------------------
                if (N == 0):
                    continue
                #---------------------------------------------------------------

                #generete the 2D-primitive wrt the number of points
                #---------------------------------------------------------------
                rand_grid = Variable(torch.cuda.FloatTensor(1, 2, N))
                rand_grid.data.uniform_(0, 1)
                #---------------------------------------------------------------

                #allowing the network to modify the plan before the spatial
                #transformation
                #---------------------------------------------------------------
                if self.D3_is_on:

                    #tranform the 2D primitive into a 3D surface
                    #-----------------------------------------------------------
                    y = self.d2Tod3[primitive](rand_grid)
                    #-----------------------------------------------------------

                    #generate the roation matrix & bias wrt the primitive
                    #-----------------------------------------------------------
                    linear = linear_list[batch, :, primitive]
                    q = linear[0:9]
                    q = q.view(3, 3)
                    q = q.contiguous()

                    t = linear[9:12]
                    t = t.view(1, 3).expand(y.size(2), -1, -1).permute(1, 2, 0)
                    t = t.contiguous()
                    #-----------------------------------------------------------
                #---------------------------------------------------------------

                #not allowing the network to modify the plan before the spatial
                #transformation (ie using simple plan)
                #---------------------------------------------------------------
                else:

                    y = rand_grid

                    #generate the roation matrix & bias wrt the primitive
                    #-----------------------------------------------------------
                    linear = linear_list[batch, :, primitive]
                    q = linear[0:6]
                    q = q.view(2, 3)
                    q = q.contiguous()

                    t = linear[6:9]
                    t = t.view(1, 3)
                    t = t.expand(y.size(2), -1, -1).permute(1, 2, 0)
                    t = t.contiguous()
                    #-----------------------------------------------------------
                #---------------------------------------------------------------

                #apply the transformation matrix and the bias
                #---------------------------------------------------------------
                yq = torch.matmul(y.permute(0, 2, 1).contiguous(), q)
                yq = yq.permute(0, 2, 1).contiguous()
                yqt = torch.add(yq, t)
                #---------------------------------------------------------------

                #combine all the predictions of the primitives of one sample
                #---------------------------------------------------------------
                out_primitive.append(yqt)
            out_batch.append(torch.cat(out_primitive, 2).contiguous())
            #-------------------------------------------------------------------

        #combine all the predictions of the sample
        #-----------------------------------------------------------------------
        out_batch = torch.cat(out_batch, 0).contiguous()
        #-----------------------------------------------------------------------

        #configuration and its probability
        #-----------------------------------------------------------------------
        existance_prob[existance_prob == 0] = 1
        configuration = out_batch.transpose(2, 1).contiguous()

        if constant_rep:
            ones = Variable(torch.ones(x.size(0))).cuda().contiguous()
            configuration_probability = ones
        else:
            configuration_probability = torch.prod(existance_prob, 1)
        #-----------------------------------------------------------------------

        return configuration, configuration_probability, points_per_primitive