コード例 #1
0
 def __init__(self, dest, size=256, room_size=6.05):
     self.dest = dest
     self.size = size
     self.count = 0
     self.renderer = TopDownView(size=self.size, length_cap=room_size)
     data_dir = utils.get_data_root_dir()
     self.dest_dir = f"{data_dir}/{dest}"
     if not os.path.exists(self.dest_dir):
         os.makedirs(self.dest_dir)
コード例 #2
0
class DatasetRenderer(DatasetAction):
    """
    Pre-render top-down view of
    each room in the house (floor, walls and objects)
    """
    def __init__(self, dest, size=256, room_size=6.05):
        self.dest = dest
        self.size = size
        self.count = 0
        self.renderer = TopDownView(size=self.size, length_cap=room_size)
        data_dir = utils.get_data_root_dir()
        self.dest_dir = f"{data_dir}/{dest}"
        if not os.path.exists(self.dest_dir):
            os.makedirs(self.dest_dir)

    def step(self, houses, num_threads=1):
        rooms = []

        def render_and_save(room, dest):
            img, data = self.renderer.render(room)
            with open(f"{self.dest_dir}/{dest}.pkl", "wb") as f:
                pickle.dump((data, room), f, pickle.HIGHEST_PROTOCOL)

            img = m.toimage(img, cmin=0, cmax=1)
            img.save(f"{self.dest_dir}/{dest}.jpg")

        for house in houses:
            if house.rooms:
                for room in house.rooms:
                    #print(room.__dict__)
                    if num_threads == 1:
                        render_and_save(room, self.count)
                        print(f"Rendering room {self.count}...", end="\r")
                        if hasattr(room, 'augmented'):
                            for (i, room_a) in enumerate(room.augmented):
                                self.count += 1
                                render_and_save(room_a, self.count)
                        self.count += 1
                    else:
                        rooms.append(room)
            yield house

        def render(i):
            room = rooms[i]
            img, data = self.renderer.render(room)
            with open(f"{self.dest_dir}/{self.count+i}.pkl", "wb") as f:
                pickle.dump((data, room), f, pickle.HIGHEST_PROTOCOL)

            img = m.toimage(img, cmin=0, cmax=1)
            img.save(f"{self.dest_dir}/{self.count+i}.jpg")
            print(f"Rendering room {self.count+i}...")

        if num_threads > 1:
            raise NotImplementedError
            pool = ThreadPool(num_threads)
            _ = pool.map(render, range(len(rooms)))
            self.count += len(rooms)
コード例 #3
0
class DatasetRenderer(DatasetAction):
    """
    Pre-render top-down view of
    each room in the house (floor, walls and objects)
    """
    def __init__(self, dest, size=512):
        self.dest = dest
        self.size = size
        self.count = 0
        self.renderer = TopDownView(size=self.size)
        data_dir = utils.get_data_root_dir()
        self.dest_dir = f"{data_dir}/{dest}"
        if not os.path.exists(self.dest_dir):
            os.makedirs(self.dest_dir)

    def step(self, houses):
        for house in houses:
            if house.rooms:
                for room in house.rooms:
                    img, data = self.renderer.render(room)
                    with open(f"{self.dest_dir}/{self.count}.pkl", "wb") as f:
                        pickle.dump((data, room), f, pickle.HIGHEST_PROTOCOL)

                    img = m.toimage(img, cmin=0, cmax=1)
                    img.save(f"{self.dest_dir}/{self.count}.jpg")
                    print(f"Rendering room {self.count}...", end="\r")
                    self.count += 1
            yield house
        print()
コード例 #4
0
ファイル: dataset.py プロジェクト: zebrajack/planit
class DatasetRenderer(DatasetAction):
    """
    Pre-render top-down view of
    each room in the house (floor, walls and objects)
    """
    def __init__(self, dest, size=256, room_size=6.05):
        # self.debug_graphs = True
        self.debug_graphs = False
        self.dest = dest
        self.size = size
        self.count = 0
        self.renderer = TopDownView(size=self.size, length_cap=room_size)
        data_dir = utils.get_data_root_dir()
        self.dest_dir = f"{data_dir}/{dest}"
        if not os.path.exists(self.dest_dir):
            os.makedirs(self.dest_dir)

    def step(self, houses, num_threads=1):
        rooms = []

        def render_and_save(room, dest):
            img, data = self.renderer.render(room)
            with open(f"{self.dest_dir}/{dest}.pkl", "wb") as f:
                pickle.dump((data, room), f, pickle.HIGHEST_PROTOCOL)

            img = m.toimage(img, cmin=0, cmax=1)
            img.save(f"{self.dest_dir}/{dest}.jpg")

        for house in houses:
            if house.rooms:
                for room in house.rooms:

                    # START_ROOM_ID = 2330
                    # if self.count < START_ROOM_ID:
                    #     self.count += 1
                    #     continue

                    # SINGLE_ROOM_ID = 230
                    # if self.count != SINGLE_ROOM_ID:
                    #     self.count += 1
                    #     continue

                    # STANDARD
                    if not self.debug_graphs:
                        #print(room.__dict__)
                        if num_threads == 1:
                            render_and_save(room, self.count)
                            print(f"Rendering room {self.count}...", end="\r")
                            if hasattr(room, 'augmented'):
                                for (i, room_a) in enumerate(room.augmented):
                                    self.count += 1
                                    render_and_save(room_a, self.count)
                            self.count += 1
                        else:
                            rooms.append(room)

                    # RENDERING DEBUG GRAPHS
                    else:
                        # LOAD THE GRAPH FOR THIS ROOM (hoping that the indices match up)
                        # also the path the the graphs are hardcoded, so there's that
                        roots_to_targets = {}
                        data_dir = utils.get_data_root_dir()
                        graph = RelationshipGraph().load(
                            self.count, self.dest, data_dir)
                        # Convert this into the format that renderer.render_graph expects
                        for node in graph.nodes:
                            root = node.category_name
                            roots_to_targets[root] = []
                            for edge in node.out_edges:
                                target = edge.end_node.category_name
                                relation = edge.edge_type.name
                                roots_to_targets[root].append(
                                    (target, relation))
                        for root in roots_to_targets:
                            img, data = self.renderer.render_graph(
                                room, root, roots_to_targets[root])
                            img = m.toimage(img, cmin=0, cmax=1)
                            out_dir = f"{self.dest_dir}/{self.count}"
                            if not os.path.exists(out_dir):
                                os.makedirs(out_dir)
                            img.save(f"{out_dir}/{self.count}_{root}.jpg")

                        print(f"Rendering room {self.count}...", end="\r")
                        self.count += 1
            yield house

        def render(i):
            room = rooms[i]
            img, data = self.renderer.render(room)
            with open(f"{self.dest_dir}/{self.count+i}.pkl", "wb") as f:
                pickle.dump((data, room), f, pickle.HIGHEST_PROTOCOL)

            img = m.toimage(img, cmin=0, cmax=1)
            img.save(f"{self.dest_dir}/{self.count+i}.jpg")
            print(f"Rendering room {self.count+i}...")

        if num_threads > 1:
            raise NotImplementedError
            pool = ThreadPool(num_threads)
            _ = pool.map(render, range(len(rooms)))
            self.count += len(rooms)
コード例 #5
0
    def __getitem__(self, index):
        if self.seed:
            random.seed(self.seed)

        i = index + self.scene_indices[0]
        scene = RenderedScene(i, self.data_dir, self.data_root_dir)
        composite = scene.create_composite()  #get empty composite

        object_nodes = scene.object_nodes
        #Since we need to at least rotate one object, this differs from location dataset slightly
        num_objects = random.randint(0, len(object_nodes) - 1)
        num_categories = len(scene.categories)

        for i in range(num_objects):
            node = object_nodes[i]
            composite.add_node(node)

        #Select the node we want to rotate
        node = object_nodes[num_objects]

        modelId = node["modelId"]
        #Just some made up distribution of different cases
        #Focusing on 180 degree, then 90, then others
        ran = random.uniform(0, 1)
        if ran < 0.2:
            r = math.pi
            target = 0
        elif ran < 0.4:
            r = math.pi / 2 * random.randint(1, 3)
            target = 0
        elif ran < 0.6:
            r = math.pi / 8 * random.randint(1, 15)
            target = 0
        else:
            r = 0
            target = 1

        o = Obj(modelId)
        #Get the transformation matrix from object space to scene space
        t = RotationDataset.pgen.get_projection(scene.room).to_2d(
            np.asarray(node["transform"]).reshape(4, 4))
        #Since centered already in object space, rotating the object in object space is the easier option
        sin, cos = math.sin(r), math.cos(r)
        t_rot = np.asarray([[cos, 0, -sin, 0], \
                            [0, 1, 0, 0], \
                            [sin, 0, cos, 0], \
                            [0, 0, 0, 1]])
        o.transform(np.dot(t_rot, t))
        #Render the rotated view of the object
        rotated = torch.from_numpy(
            TopDownView.render_object_full_size(o, composite.size))
        #Calculate the relevant info needed to composite it to the input
        sin, cos = composite.get_transformation(node["transform"])
        original_r = math.atan2(sin, cos)
        sin = math.sin(original_r + r)
        cos = math.cos(original_r + r)
        composite.add_height_map(rotated, node["category"], sin, cos)

        inputs = composite.get_composite(ablation=self.ablation)
        #Add attention channel, which is just the outline of the targeted object
        rotated[rotated > 0] = 1
        inputs[-1] = rotated

        return inputs, target