Esempio n. 1
0
def initialisePointsFixed(filename):
    data = reader.init([filename], None, None, None, fraction=100)
    totalCoords = []

    print("TOTAL : {}".format(data['point_count']))
    print()
    rotation_matrix = None

    for portionData in data['portions']:
        (filename, portion) = portionData

        root_aabb = data['aabb'] - data['avg_min']

        base_spacing = compute_spacing(root_aabb)
        if base_spacing > 10:
            root_scale = np.array([0.01, 0.01, 0.01])
        elif base_spacing > 1:
            root_scale = np.array([0.1, 0.1, 0.1])
        else:
            root_scale = np.array([1, 1, 1])

        root_aabb = root_aabb * root_scale
        root_spacing = compute_spacing(root_aabb)

        offset_scale = (-data['avg_min'], root_scale, rotation_matrix[:3, :3].T if rotation_matrix is not None else None, data['color_scale'])

        print(offset_scale)

        coords, colors = reader.runSingle(filename, portion, offset_scale)

        for (i, p) in enumerate(coords):

            point = {
                'x': p[0],
                'y': p[1],
                'z': p[2],
                'r': colors[i][0],
                'g': colors[i][1],
                'b': colors[i][2],
            }
            totalCoords.append(point)
            prog.printProgressBar(i, len(coords), 'Loading Data')

        return (totalCoords, root_aabb, root_spacing)
Esempio n. 2
0
def main(args):
    folder = args.out

    # create folder
    if os.path.isdir(folder):
        if args.overwrite:
            shutil.rmtree(folder)
        else:
            print('Error, folder \'{}\' already exists'.format(folder))
            sys.exit(1)

    os.makedirs(folder)
    working_dir = folder + '/tmp'
    os.makedirs(working_dir)

    node_store = SharedNodeStore(working_dir)

    # read all input files headers and determine the aabb/spacing
    infos = las_reader.init(args)

    avg_min = infos['avg_min']
    rotation_matrix = None
    # srs stuff
    projection = None
    if args.srs_out is not None:
        p2 = pyproj.Proj(init='epsg:{}'.format(args.srs_out))
        if args.srs_in is not None:
            p1 = pyproj.Proj(init='epsg:{}'.format(args.srs_in))
        else:
            p1 = infos['srs_in']
        projection = [p1, p2]

        bl = np.array(
            list(
                pyproj.transform(projection[0], projection[1],
                                 infos['aabb'][0][0], infos['aabb'][0][1],
                                 infos['aabb'][0][2])))
        tr = np.array(
            list(
                pyproj.transform(projection[0], projection[1],
                                 infos['aabb'][1][0], infos['aabb'][1][1],
                                 infos['aabb'][1][2])))
        br = np.array(
            list(
                pyproj.transform(projection[0], projection[1],
                                 infos['aabb'][1][0], infos['aabb'][0][1],
                                 infos['aabb'][0][2])))

        avg_min = np.array(
            list(
                pyproj.transform(projection[0], projection[1], avg_min[0],
                                 avg_min[1], avg_min[2])))

        x_axis = br - bl

        bl = bl - avg_min
        tr = tr - avg_min

        if args.srs_out == '4978':
            # Transform geocentric normal => (0, 0, 1)
            # and 4978-bbox x axis => (1, 0, 0),
            # to have a bbox in local coordinates that's nicely aligned with the data
            rotation_matrix = make_rotation_matrix(avg_min, np.array([0, 0,
                                                                      1]))
            rotation_matrix = np.dot(
                make_rotation_matrix(x_axis, np.array([1, 0, 0])),
                rotation_matrix)

            bl = np.dot(bl, rotation_matrix[:3, :3].T)
            tr = np.dot(tr, rotation_matrix[:3, :3].T)

        root_aabb = np.array([np.minimum(bl, tr), np.maximum(bl, tr)])
    else:
        # offset
        root_aabb = infos['aabb'] - avg_min

    original_aabb = root_aabb

    if True:
        base_spacing = compute_spacing(root_aabb)
        if base_spacing > 10:
            root_scale = np.array([0.01, 0.01, 0.01])
        elif base_spacing > 1:
            root_scale = np.array([0.1, 0.1, 0.1])
        else:
            root_scale = np.array([1.0, 1.0, 1.0])

    root_aabb = root_aabb * root_scale
    root_spacing = compute_spacing(root_aabb)

    octree_metadata = OctreeMetadata(aabb=root_aabb,
                                     spacing=root_spacing,
                                     scale=root_scale[0])

    if args.verbose >= 1:
        print('Summary:')
        print('  - points to process: {}'.format(infos['point_count']))
        print('  - offset to use: {}'.format(avg_min))
        print('  - root spacing: {}'.format(root_spacing / root_scale[0]))
        print('  - root aabb: {}'.format(root_aabb))
        print('  - original aabb: {}'.format(original_aabb))

    startup = time.time()

    initial_portion_count = len(infos['portions'])

    if args.graph:
        progression_log = open('progression.csv', 'w')

    def add_tasks_to_process(state, name, task, point_count):
        assert point_count > 0
        tasks_to_process = state.node_process.input
        if name not in tasks_to_process:
            tasks_to_process[name] = ([task], point_count)
        else:
            tasks, count = tasks_to_process[name]
            tasks.append(task)
            tasks_to_process[name] = (tasks, count + point_count)

    processed_points = 0
    points_in_progress = 0
    previous_percent = 0
    points_in_pnts = 0

    max_splitting_jobs_count = max(1, args.jobs // 2)

    # zmq setup
    context = zmq.Context()

    zmq_skt = context.socket(zmq.ROUTER)
    zmq_skt.bind('ipc:///tmp/py3dtiles1')

    zmq_idle_clients = []

    state = State(infos['portions'])

    zmq_processes_killed = -1

    zmq_processes = [
        multiprocessing.Process(target=zmq_process,
                                args=(args.graph, projection, node_store,
                                      octree_metadata, folder, args.rgb,
                                      args.verbose)) for i in range(args.jobs)
    ]

    for p in zmq_processes:
        p.start()
    activities = [p.pid for p in zmq_processes]

    time_waiting_an_idle_process = 0

    while True:
        # state.print_debug()
        now = time.time() - startup
        at_least_one_job_ended = False

        all_processes_busy = not can_queue_more_jobs(zmq_idle_clients)
        while all_processes_busy or zmq_skt.poll(timeout=0, flags=zmq.POLLIN):
            # Blocking read but it's fine because either all our child processes are busy
            # or we know that there's something to read (zmq.POLLIN)
            start = time.time()
            result = zmq_skt.recv_multipart()

            client_id = result[0]
            result = result[1:]

            if len(result) == 1:
                if len(result[0]) == 0:
                    assert client_id not in zmq_idle_clients
                    zmq_idle_clients += [client_id]

                    if all_processes_busy:
                        time_waiting_an_idle_process += time.time() - start
                    all_processes_busy = False
                elif result[0] == b'halted':
                    zmq_processes_killed += 1
                    all_processes_busy = False
                else:
                    result = pickle.loads(result[0])
                    processed_points += result['total']
                    points_in_progress -= result['total']

                    if 'save' in result and len(result['save']) > 0:
                        node_store.put(result['name'], result['save'])

                    if result['name'][0:4] == b'root':
                        state.las_reader.active.remove(result['name'])
                    else:
                        del state.node_process.active[result['name']]

                        if len(result['name']) > 0:
                            state.node_process.inactive.append(result['name'])

                            if not state.las_reader.input and not state.las_reader.active:
                                if state.node_process.active or state.node_process.input:
                                    finished_node = result['name']
                                    if not can_pnts_be_written(
                                            finished_node, finished_node,
                                            state.node_process.input,
                                            state.node_process.active):
                                        pass
                                    else:
                                        state.node_process.inactive.pop(-1)
                                        state.to_pnts.input.append(
                                            finished_node)

                                        for i in range(
                                                len(state.node_process.inactive
                                                    ) - 1, -1, -1):
                                            candidate = state.node_process.inactive[
                                                i]

                                            if can_pnts_be_written(
                                                    candidate, finished_node,
                                                    state.node_process.input,
                                                    state.node_process.active):
                                                state.node_process.inactive.pop(
                                                    i)
                                                state.to_pnts.input.append(
                                                    candidate)

                                else:
                                    for c in state.node_process.inactive:
                                        state.to_pnts.input.append(c)
                                    state.node_process.inactive.clear()

                    at_least_one_job_ended = True
            elif result[0] == b'pnts':
                points_in_pnts += struct.unpack('>I', result[1])[0]
                state.to_pnts.active.remove(result[2])
            else:
                count = struct.unpack('>I', result[2])[0]
                add_tasks_to_process(state, result[0], result[1], count)

        while state.to_pnts.input and can_queue_more_jobs(zmq_idle_clients):
            node_name = state.to_pnts.input.pop()
            datas = node_store.get(node_name)
            assert len(datas) > 0, '{} has no data??'.format(node_name)
            zmq_send_to_process(zmq_idle_clients, zmq_skt,
                                [b'pnts', node_name, datas])
            node_store.remove(node_name)
            state.to_pnts.active.append(node_name)

        if can_queue_more_jobs(zmq_idle_clients):
            potential = sorted([(k, v)
                                for k, v in state.node_process.input.items()
                                if k not in state.node_process.active],
                               key=lambda f: -len(f[0]))

            while can_queue_more_jobs(zmq_idle_clients) and potential:
                target_count = 100000
                job_list = []
                count = 0
                idx = len(potential) - 1
                while count < target_count and potential and idx >= 0:
                    name, (tasks, point_count) = potential[idx]
                    if name not in state.node_process.active:
                        count += point_count
                        job_list += [name]
                        job_list += [node_store.get(name)]
                        job_list += [struct.pack('>I', len(tasks))]
                        job_list += tasks
                        del potential[idx]
                        del state.node_process.input[name]
                        state.node_process.active[name] = (len(tasks),
                                                           point_count, now)

                        if name in state.node_process.inactive:
                            state.node_process.inactive.pop(
                                state.node_process.inactive.index(name))
                    idx -= 1

                if job_list:
                    zmq_send_to_process(zmq_idle_clients, zmq_skt, job_list)

        while (state.las_reader.input and
               (points_in_progress < 60000000 or not state.las_reader.active)
               and len(state.las_reader.active) < max_splitting_jobs_count
               and can_queue_more_jobs(zmq_idle_clients)):
            if args.verbose >= 1:
                print('Submit next portion {}'.format(
                    state.las_reader.input[-1]))
            _id = 'root_{}'.format(len(state.las_reader.input)).encode('ascii')
            file, portion = state.las_reader.input.pop()
            points_in_progress += portion[1] - portion[0]

            zmq_send_to_process(zmq_idle_clients, zmq_skt, [
                pickle.dumps({
                    'filename':
                    file,
                    'offset_scale':
                    (-avg_min, root_scale, rotation_matrix[:3, :3].T
                     if rotation_matrix is not None else None,
                     infos['color_scale']),
                    'portion':
                    portion,
                    'id':
                    _id
                })
            ])

            state.las_reader.active.append(_id)

        # if at this point we have no work in progress => we're done
        if len(zmq_idle_clients
               ) == args.jobs or zmq_processes_killed == args.jobs:
            if zmq_processes_killed < 0:
                zmq_send_to_all_process(zmq_idle_clients, zmq_skt,
                                        [pickle.dumps(b'shutdown')])
                zmq_processes_killed = 0
            else:
                assert points_in_pnts == infos[
                    'point_count'], '!!! Invalid point count in the written .pnts (expected: {}, was: {})'.format(
                        infos['point_count'], points_in_pnts)
                if args.verbose >= 1:
                    print('Writing 3dtiles {}'.format(infos['avg_min']))
                write_tileset(working_dir, folder, octree_metadata, avg_min,
                              root_scale, projection, rotation_matrix,
                              args.rgb)
                shutil.rmtree(working_dir)
                if args.verbose >= 1:
                    print('Done')

                if args.benchmark is not None:
                    print('{},{},{},{}'.format(
                        args.benchmark,
                        ','.join([os.path.basename(f) for f in args.files]),
                        points_in_pnts, round(time.time() - startup, 1)))

                for p in zmq_processes:
                    p.terminate()
                break

        if at_least_one_job_ended:
            if args.verbose >= 3:
                print('{:^16}|{:^8}|{:^8}'.format('Name', 'Points', 'Seconds'))
                for name, v in state.node_process.active.items():
                    print('{:^16}|{:^8}|{:^8}'.format(
                        '{} ({})'.format(name.decode('ascii'), v[0]), v[1],
                        round(now - v[2], 1)))
                print('')
                print('Pending:')
                print('  - root: {} / {}'.format(len(state.las_reader.input),
                                                 initial_portion_count))
                print('  - other: {} files for {} nodes'.format(
                    sum([len(f[0])
                         for f in state.node_process.input.values()]),
                    len(state.node_process.input)))
                print('')
            elif args.verbose >= 2:
                state.print_debug()
            if args.verbose >= 1:
                print('{} % points in {} sec [{} tasks, {} nodes, {} wip]'.
                      format(
                          round(100 * processed_points / infos['point_count'],
                                2), round(now, 1),
                          args.jobs - len(zmq_idle_clients),
                          len(state.node_process.active), points_in_progress))
            elif args.verbose >= 0:
                percent = round(100 * processed_points / infos['point_count'],
                                2)
                time_left = (100 - percent) * now / (percent + 0.001)
                print('\r{:>6} % in {} sec [est. time left: {} sec]'.format(
                    percent, round(now), round(time_left)),
                      end='',
                      flush=True)
                if False and int(percent) != previous_percent:
                    print('')
                    previous_percent = int(percent)

            if args.graph:
                percent = round(100 * processed_points / infos['point_count'],
                                3)
                print('{}, {}'.format(time.time() - startup, percent),
                      file=progression_log)

        node_store.control_memory_usage(args.cache_size, args.verbose)

    if args.verbose >= 1:
        print('destroy', round(time_waiting_an_idle_process, 2))

    if args.graph:
        progression_log.close()

    # pygal chart
    if args.graph:
        import pygal
        from datetime import timedelta

        dateline = pygal.XY(x_label_rotation=25,
                            secondary_range=(0, 100))  #, show_y_guides=False)
        for pid in activities:
            activity = []
            filename = 'activity.{}.csv'.format(pid)
            i = len(activities) - activities.index(pid) - 1
            # activities.index(pid) =
            with open(filename, 'r') as f:
                content = f.read().split('\n')
                for line in content[1:]:
                    line = line.split(',')
                    if line[0]:
                        ts = float(line[0])
                        value = int(line[1]) / 3.0
                        activity.append((ts, i + value * 0.9))

            os.remove(filename)
            if activity:
                activity.append((activity[-1][0], activity[0][1]))
                activity.append(activity[0])
                dateline.add(str(pid), activity, show_dots=False, fill=True)

        with open('progression.csv', 'r') as f:
            values = []
            for line in f.read().split('\n'):
                if line:
                    line = line.split(',')
                    values += [(float(line[0]), float(line[1]))]
        os.remove('progression.csv')
        dateline.add('progression',
                     values,
                     show_dots=False,
                     secondary=True,
                     stroke_style={
                         'width': 2,
                         'color': 'black'
                     })

        dateline.render_to_file('activity.svg')

    context.destroy()
    return float(np.linalg.norm(aabb[1] - aabb[0]) / 125)


def exportToXYZ(points):
    import os
    with open("output/root.xyz", "w") as writer:
        for pnt in points:
            p = pnt
            if (p['x'] and p['y'] and p['z']):
                writer.write(
                    str(p['x']) + " " + str(p['y']) + " " + str(p['z']) + "\n")


data = reader.init(["C:/Users/Ajay/Desktop/ripple.las"],
                   None,
                   None,
                   None,
                   fraction=100)

totalCoords = []

rotation_matrix = None

for portionData in data['portions']:
    (filename, portion) = portionData

    root_aabb = data['aabb'] - data['avg_min']

    base_spacing = compute_spacing(root_aabb)
    if base_spacing > 10:
        root_scale = np.array([0.01, 0.01, 0.01])