Exemple #1
0
    def test_arnold_b85_encoding_real_world_data(self):
        """testing encoding with some real world data
        """
        # b85UINT
        raw_data = [
            0, 1, 9, 8, 1, 2, 10, 9, 2, 3, 11, 10, 3, 4, 12, 11, 4, 5, 13, 12,
            5, 6, 14, 13, 6, 7, 15, 14
        ]
        encoded_data = "&UOP6&psb:'7Bt>'Rg1B'n6CF(4ZUJ(P)gN"
        data_format = '%sB' % len(raw_data)
        self.assertEqual(
            encoded_data,
            base85.arnold_b85_encode(struct.pack(data_format, *raw_data)))
        self.assertEqual(
            raw_data,
            list(
                struct.unpack('%sB' % len(raw_data),
                              base85.arnold_b85_decode(encoded_data))))

        # b85POINT2
        raw_data = [
            0, 0.75, 0.0625, 0.75, 0.125, 0.75, 0.1875, 0.75, 0.25, 0.75,
            0.3125, 0.75, 0.375, 0.75, 0.4375, 0.75, 0, 1, 0.0625, 1, 0.125, 1,
            0.1875, 1, 0.25, 1, 0.3125, 1, 0.375, 1, 0.4375, 1
        ]
        encoded_data = "z8?r5N7e-P78?r5N7reTb8?r5N8$W,M8?r5N8+HY88?r5N8.koX8" \
                       "?r5N82:0x8?r5N85]GC8?r5Nzy7e-P7y7reTby8$W,My8+HY8y8." \
                       "koXy82:0xy85]GCy"
        data_format = '%sf' % len(raw_data)
        self.assertEqual(
            encoded_data,
            base85.arnold_b85_encode(struct.pack(data_format, *raw_data)))
        self.assertEqual(
            raw_data,
            list(
                struct.unpack('%sf' % len(raw_data),
                              base85.arnold_b85_decode(encoded_data))))

        # b85POINT
        raw_data = [
            0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
            19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
            36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47
        ]
        encoded_data = "zy8TFfd8[8>O8b)k:8eM,Z8hpC%8l>YE8oaoe8qI%u8s0108tl<@" \
                       "8vSGP8x:R`9$v]p9&]i+9(Dt;9)8OC9*,*K9*tZS9+h5[9,[ec9-" \
                       "[email protected]/6L&90*'.90rW691f2>92YbF93M=N94@mV954H^96'x" \
                       "f96L;j96pSn97?kr97d.v983G%98W_)99&w-99K:199oR59:>j99" \
                       ":c-=9;2EA9;V]E9<%uI9<J8M"
        data_format = '%sf' % len(raw_data)
        self.assertEqual(
            encoded_data,
            base85.arnold_b85_encode(struct.pack(data_format, *raw_data)))
        self.assertEqual(
            raw_data,
            list(
                struct.unpack('%sf' % len(raw_data),
                              base85.arnold_b85_decode(encoded_data))))
Exemple #2
0
    def test_arnold_b85_encoding_real_world_data(self):
        """testing encoding with some real world data
        """
        # b85UINT
        raw_data = [0, 1, 9, 8, 1, 2, 10, 9, 2, 3, 11, 10, 3, 4, 12, 11, 4, 5,
                    13, 12, 5, 6, 14, 13, 6, 7, 15, 14]
        encoded_data = "&UOP6&psb:'7Bt>'Rg1B'n6CF(4ZUJ(P)gN"
        data_format = '%sB' % len(raw_data)
        self.assertEqual(
            encoded_data,
            base85.arnold_b85_encode(struct.pack(data_format, *raw_data))
        )
        self.assertEqual(
            raw_data,
            list(struct.unpack('%sB' % len(raw_data),
                          base85.arnold_b85_decode(encoded_data)))
        )

        # b85POINT2
        raw_data = [0, 0.75, 0.0625, 0.75, 0.125, 0.75, 0.1875, 0.75, 0.25,
                    0.75, 0.3125, 0.75, 0.375, 0.75, 0.4375, 0.75, 0, 1,
                    0.0625, 1, 0.125, 1, 0.1875, 1, 0.25, 1, 0.3125, 1, 0.375,
                    1, 0.4375, 1]
        encoded_data = "z8?r5N7e-P78?r5N7reTb8?r5N8$W,M8?r5N8+HY88?r5N8.koX8" \
                       "?r5N82:0x8?r5N85]GC8?r5Nzy7e-P7y7reTby8$W,My8+HY8y8." \
                       "koXy82:0xy85]GCy"
        data_format = '%sf' % len(raw_data)
        self.assertEqual(
            encoded_data,
            base85.arnold_b85_encode(struct.pack(data_format, *raw_data))
        )
        self.assertEqual(
            raw_data,
            list(struct.unpack('%sf' % len(raw_data),
                          base85.arnold_b85_decode(encoded_data)))
        )

        # b85POINT
        raw_data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
                    17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
                    32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
                    47]
        encoded_data = "zy8TFfd8[8>O8b)k:8eM,Z8hpC%8l>YE8oaoe8qI%u8s0108tl<@" \
                       "8vSGP8x:R`9$v]p9&]i+9(Dt;9)8OC9*,*K9*tZS9+h5[9,[ec9-" \
                       "[email protected]/6L&90*'.90rW691f2>92YbF93M=N94@mV954H^96'x" \
                       "f96L;j96pSn97?kr97d.v983G%98W_)99&w-99K:199oR59:>j99" \
                       ":c-=9;2EA9;V]E9<%uI9<J8M"
        data_format = '%sf' % len(raw_data)
        self.assertEqual(
            encoded_data,
            base85.arnold_b85_encode(struct.pack(data_format, *raw_data))
        )
        self.assertEqual(
            raw_data,
            list(struct.unpack('%sf' % len(raw_data),
                               base85.arnold_b85_decode(encoded_data)))
        )
Exemple #3
0
    def test_arnold_b85_encode_packs_ones_properly(self):
        """testing if arnold_b85_encode is packing ones properly
        """
        raw_data = [
            struct.pack('f', 1.0),
            struct.pack('ffff', 1.0, 1.0, 3.484236717224121, 1.0)
        ]

        encoded_data = ['y', 'yy8^RH(y']

        for i in range(len(raw_data)):
            self.assertEqual(encoded_data[i],
                             base85.arnold_b85_encode(raw_data[i]))
Exemple #4
0
    def test_arnold_b85_encode_packs_zeros_properly(self):
        """testing if arnold_b85_encode is packing zeros properly
        """
        raw_data = [
            struct.pack('f', 0.0),
            struct.pack('ffff', 0.0, 0.0, 3.484236717224121, 0.0)
        ]

        encoded_data = ['z', 'zz8^RH(z']

        for i in range(len(raw_data)):
            self.assertEqual(encoded_data[i],
                             base85.arnold_b85_encode(raw_data[i]))
Exemple #5
0
    def test_arnold_b85_encode_is_working_properly(self):
        """testing if arnold_b85_encode is working properly
        """
        raw_data = [
            struct.pack('f', 2),
            struct.pack('f', 3.484236717224121),
        ]

        encoded_data = [
            '8TFfd',
            '8^RH(',
        ]

        for i in range(len(raw_data)):
            self.assertEqual(encoded_data[i],
                             base85.arnold_b85_encode(raw_data[i]))
Exemple #6
0
    def test_arnold_b85_encode_packs_ones_properly(self):
        """testing if arnold_b85_encode is packing ones properly
        """
        raw_data = [
            struct.pack('f', 1.0),
            struct.pack('ffff', 1.0, 1.0, 3.484236717224121, 1.0) 
        ]

        encoded_data = [
            'y',
            'yy8^RH(y'
        ]

        for i in range(len(raw_data)):
            self.assertEqual(
                encoded_data[i],
                base85.arnold_b85_encode(raw_data[i])
            )
Exemple #7
0
    def test_arnold_b85_encode_packs_zeros_properly(self):
        """testing if arnold_b85_encode is packing zeros properly
        """
        raw_data = [
            struct.pack('f', 0.0),
            struct.pack('ffff', 0.0, 0.0, 3.484236717224121, 0.0) 
        ]

        encoded_data = [
            'z',
            'zz8^RH(z'
        ]

        for i in range(len(raw_data)):
            self.assertEqual(
                encoded_data[i],
                base85.arnold_b85_encode(raw_data[i])
            )
Exemple #8
0
    def test_arnold_b85_encode_is_working_properly(self):
        """testing if arnold_b85_encode is working properly
        """
        raw_data = [
            struct.pack('f', 2),
            struct.pack('f', 3.484236717224121),
        ]

        encoded_data = [
            '8TFfd',
            '8^RH(',
        ]

        for i in range(len(raw_data)):
            self.assertEqual(
                encoded_data[i],
                base85.arnold_b85_encode(raw_data[i])
            )
Exemple #9
0
def particle2ass(node,
                 name,
                 export_motion=False,
                 export_color=False,
                 render_type=0):
    """exports polygon geometry to ass format
    """
    sample_count = 2 if export_motion else 1

    geo = node.geometry()
    base_template = """
points
{
 name %(name)s
 points %(point_count)s %(sample_count)s b85POINT
%(point_positions)s
 radius %(point_count)s 1 b85FLOAT
%(point_radius)s
 mode %(render_as)s
 min_pixel_width 0
 step_size 0
 visibility 243
 receive_shadows on
 self_shadows on
 shader "initialParticleSE"
 opaque on
 matte off
 id -838484804
%(color_template)s
}"""
    #  uvidxs %(vertex_count)s 1 UINT
    #%(uv_ids)s
    # uvlist %(vertex_count)s 1 b85POINT2
    #%(vertex_uvs)s

    skip_normals = False
    skip_uvs = False
    skip_colors = False
    skip_radius = False

    intrinsic_values = geo.intrinsicValueDict()

    point_count = intrinsic_values['pointcount']

    # i = 0
    # j = 0
    # combined_vertex_ids = []
    # # combined_vertex_normals = []
    # combined_number_of_points_per_primitive = []

    #
    # Point Positions
    #
    point_positions = geo.pointFloatAttribValuesAsString('P')
    if export_motion:
        point_prime_positions = geo.pointFloatAttribValuesAsString('pprime')
        point_positions = '%s%s' % (point_positions, point_prime_positions)
        del point_prime_positions

    encode_start = time.time()
    # encoded_point_positions = base85.arnold_b85_encode_multithreaded(point_positions)
    encoded_point_positions = base85.arnold_b85_encode(point_positions)
    encode_end = time.time()
    print('Encoding Point Position    : %3.3f' % (encode_end - encode_start))
    del point_positions

    split_start = time.time()
    splitted_point_positions = split_data(encoded_point_positions, 500)
    split_end = time.time()
    print('Splitting Point Positions : %3.3f' % (split_end - split_start))
    del encoded_point_positions

    #
    # Point Radius
    #
    try:
        point_radius = geo.pointFloatAttribValuesAsString('pscale')
    except hou.OperationFailed:
        # no radius attribute skip it
        skip_radius = True
        point_radius = ''

    encode_start = time.time()
    encoded_point_radius = base85.arnold_b85_encode(point_radius)
    encode_end = time.time()
    print('Encoding Point Radius    : %3.3f' % (encode_end - encode_start))
    del point_radius

    split_start = time.time()
    splitted_point_radius = split_data(encoded_point_radius, 500)
    split_end = time.time()
    print('Splitting Point Radius : %3.3f' % (split_end - split_start))
    del encoded_point_radius

    render_type = render_type
    render_as = "disk"

    if render_type == 1:
        render_as = "sphere"
    elif render_type == 2:
        render_as = "quad"

    # #
    # # Vertex Colors
    # #
    color_template = ''
    if export_color:
        try:
            point_colors = geo.pointFloatAttribValuesAsString('particle_color')
        except hou.OperationFailed:
            # no color attribute skip it
            skip_colors = True
            point_colors = ''

        encode_start = time.time()
        encoded_point_colors = base85.arnold_b85_encode(point_colors)
        encode_end = time.time()
        print('Encoding Point colors     : %3.3f' %
              (encode_end - encode_start))
        del point_colors

        split_start = time.time()
        splitted_point_colors = split_data(encoded_point_colors, 100)
        split_end = time.time()
        print('Splitting Point Colors : %3.3f' % (split_end - split_start))
        del encoded_point_colors

        color_template = """
            declare rgbPP uniform RGB
            rgbPP %(point_count)s 1 b85RGB
            %(splitted_point_colors)s
        """

        color_template = color_template % {
            'point_count': point_count,
            'splitted_point_colors': splitted_point_colors
        }

    data = base_template % {
        'name': name,
        'point_count': point_count,
        'sample_count': sample_count,
        'render_as': render_as,
        'point_radius': splitted_point_radius,
        'point_positions': splitted_point_positions,
        'color_template': color_template,
    }
    del splitted_point_radius
    del splitted_point_positions
    return data
Exemple #10
0
def polygon2ass(node,
                name,
                export_motion=False,
                export_color=False,
                double_sided=True,
                invert_normals=False):
    """exports polygon geometry to ass format
    """
    sample_count = 2 if export_motion else 1

    # visibility flags
    # a binary value of
    # 00000000
    # ||||||||
    # |||||||+-> primary_visibility
    # ||||||+--> cast_shadows
    # |||||+---> visible_in_reflections
    # ||||+----> visible_in_refractions
    # |||+-----> (unknown)
    # ||+------> visible_in_diffuse
    # |+-------> visible_in_glossy
    # +--------> (unknown)

    geo = node.geometry()
    base_template = """
polymesh
{
 name %(name)s
 nsides %(primitive_count)i 1 UINT
%(number_of_points_per_primitive)s
 vidxs %(vertex_count)s 1 UINT
%(vertex_ids)s
 vlist %(point_count)s %(sample_count)s b85POINT
%(point_positions)s
 smoothing on
 visibility 255
 sidedness %(sidedness)s
 invert_normals %(invert_normals)s
 receive_shadows on
 self_shadows on
 opaque on
 matrix
%(matrix)s
 id 683108022
%(color_template)s
}"""
    #  uvidxs %(vertex_count)s 1 UINT
    #%(uv_ids)s
    # uvlist %(vertex_count)s 1 b85POINT2
    #%(vertex_uvs)s

    skip_normals = False
    skip_uvs = False
    skip_colors = False

    intrinsic_values = geo.intrinsicValueDict()

    primitive_count = intrinsic_values['primitivecount']
    point_count = intrinsic_values['pointcount']
    vertex_count = intrinsic_values['vertexcount']

    number_of_points_per_primitive = []
    vertex_ids = []
    # vertex_normals = []
    # vertex_uvs = []
    # uv_ids = []
    # combined_uv_ids = []
    ## uv_ids = split_data(' '.join(map(str, range(vertex_count))), 500)
    # j = 0
    # for i in range(vertex_count):
    #     uv_ids.append(str(i))
    #     j += 1
    #     if j > 500:
    #         j = 0
    #         combined_uv_ids.append(' '.join(uv_ids))
    #         uv_ids = []
    vertex_colors = []

    ## join for a last time
    # if uv_ids:
    #     combined_uv_ids.append(' '.join(uv_ids))
    #
    # uv_ids = combined_uv_ids

    # just for the first vertex try to read the uv to determine if we should
    # skip the uvs or not
    # skip_uvs = True

    i = 0
    j = 0
    combined_vertex_ids = []
    # combined_vertex_normals = []
    combined_number_of_points_per_primitive = []

    for prim in geo.iterPrims():
        number_of_points_per_primitive.append( ` prim.numVertices() `)
        i += 1
        if i > 500:
            i = 0
            combined_number_of_points_per_primitive.append(
                ' '.join(number_of_points_per_primitive))
            number_of_points_per_primitive = []
        for vertex in prim.vertices():
            point = vertex.point()
            point_id = point.number()
            vertex_ids.append( ` point_id `)
            # vertex_normals.extend(point.floatListAttribValue('N'))
            # vertex_uv = vertex.floatListAttribValue('uv')
            # vertex_uvs.append(vertex_uv[0])
            # vertex_uvs.append(vertex_uv[1])
            j += 1
            if j > 500:
                j = 0
                combined_vertex_ids.append(' '.join(vertex_ids))
                vertex_ids = []
                # combined_vertex_normals.append(' '.join(map(str, vertex_normals)))
                # vertex_normals = []
                vertex_colors = []

    # join for a last time
    if number_of_points_per_primitive:
        combined_number_of_points_per_primitive.append(
            ' '.join(number_of_points_per_primitive))

    if vertex_ids:
        combined_vertex_ids.append(' '.join(vertex_ids))

    # if vertex_normals:
    #     combined_vertex_normals.append(' '.join(map(str, vertex_normals)))

    ## encode uvs
    # encoded_vertex_uvs = '%s' % base85.arnold_b85_encode(
    #     struct.pack(
    #         '<%sd' % len(vertex_uvs),
    #         *vertex_uvs
    #     )
    # )
    # splitted_vertex_uvs = split_data(encoded_vertex_uvs, 500)

    point_positions = geo.pointFloatAttribValuesAsString('P')

    if export_motion:
        point_prime_positions = geo.pointFloatAttribValuesAsString('pprime')
        point_positions = '%s%s' % (point_positions, point_prime_positions)

    try:
        point_colors = geo.pointFloatAttribValuesAsString('color')
    except hou.OperationFailed:
        # no color attribute skip it
        skip_colors = True
        point_colors = ''

    # try:
    #    point_normals = geo.pointFloatAttribValuesAsString('N')
    #    # point_normals = geo.pointFloatAttribValues('N')
    # except hou.OperationFailed:
    #    # no normal attribute skip it
    #    skip_normals = True
    #    point_normals = ''

    #
    # Number Of Points Per Primitive
    #
    encode_start = time.time()
    #encoded_number_of_points_per_primitive = 'B%s' % base85.arnold_b85_encode(
    #    struct.pack(
    #        '>%sB' % len(number_of_points_per_primitive),
    #        *number_of_points_per_primitive
    #    )
    #)
    encoded_number_of_points_per_primitive = '\n'.join(
        combined_number_of_points_per_primitive)
    encode_end = time.time()
    print('Encoding Number of Points  : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    #splitted_number_of_points_per_primitive = \
    #    re.sub(
    #        "(.{500})", "\\1\n",
    #        #' '.join(number_of_points_per_primitive),
    #        encoded_number_of_points_per_primitive,
    #        0
    #    )
    #splitted_number_of_points_per_primitive = ' '.join(encoded_number_of_points_per_primitive)
    splitted_number_of_points_per_primitive = encoded_number_of_points_per_primitive
    split_end = time.time()
    print('Splitting Number of Points : %3.3f' % (split_end - split_start))

    #
    # Point Positions
    #
    encode_start = time.time()
    # encoded_point_positions = base85.arnold_b85_encode_multithreaded(point_positions)
    encoded_point_positions = base85.arnold_b85_encode(point_positions)
    encode_end = time.time()
    print('Encoding Point Position    : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    splitted_point_positions = split_data(encoded_point_positions, 500)
    split_end = time.time()
    print('Splitting Point Positions : %3.3f' % (split_end - split_start))

    # #
    # # Vertex Normals
    # #
    # encode_start = time.time()
    # encoded_vertex_normals = '\n'.join(combined_vertex_normals)#base85.arnold_b85_encode(point_normals)
    # encode_end = time.time()
    # print('Encoding Point Normals     : %3.3f' % (encode_end - encode_start))
    #
    # split_start = time.time()
    # # splitted_vertex_normals = re.sub("(.{500})", "\\1\n", encoded_point_normals, 0)
    # splitted_vertex_normals = encoded_vertex_normals
    # # # split every n-th data
    # # n = 100
    # # splitted_point_normals = []
    # # for i in range(len(point_normals) / n):
    # #     start_index = n * i
    # #     end_index = n * (i+1)
    # #     splitted_point_normals.extend(point_normals[start_index:end_index])
    # #     splitted_point_normals.append('\n')
    # #
    # # splitted_point_normals = ' '.join(map(str, splitted_point_normals))
    # split_end = time.time()
    # print('Splitting Vertex Normals    : %3.3f' % (split_end - split_start))

    # #
    # # Vertex Colors
    # #

    encode_start = time.time()
    encoded_point_colors = base85.arnold_b85_encode(point_colors)
    encode_end = time.time()
    print('Encoding Point colors     : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    splitted_point_colors = split_data(encoded_point_colors, 100)
    split_end = time.time()
    print('Splitting Vertex Colors    : %3.3f' % (split_end - split_start))

    #
    # Vertex Ids
    #
    encode_start = time.time()
    #encoded_vertex_ids = 'B%s' % base85.arnold_b85_encode(
    #    struct.pack(
    #        '>%sB' % len(vertex_ids),
    #        *vertex_ids
    #    )
    #)
    encoded_vertex_ids = '\n'.join(combined_vertex_ids)
    encode_end = time.time()
    print('Encoding Vertex Ids        : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    #splitted_vertex_ids = re.sub(
    #    "(.{500})", "\\1\n",
    #    #' '.join(vertex_ids),
    #    encoded_vertex_ids,
    #    0
    #)
    #splitted_vertex_ids = ' '.join(encoded_vertex_ids)
    splitted_vertex_ids = encoded_vertex_ids
    split_end = time.time()
    print('Splitting Vertex Ids       : %3.3f' % (split_end - split_start))

    matrix = """1 0 0 0
0 1 0 0
0 0 1 0
0 0 0 1
"""
    if export_motion:
        matrix += matrix

    color_template = ''
    if export_color:
        color_template = """
            declare colorSet1 varying RGBA
            colorSet1 %(point_count)s 1 b85RGBA
            %(splitted_point_colors)s
        """

        color_template = color_template % {
            'point_count': point_count,
            'splitted_point_colors': splitted_point_colors
        }

    data = base_template % {
        'name': name,
        'point_count': point_count,
        'vertex_count': vertex_count,
        'primitive_count': primitive_count,
        'sample_count': sample_count,
        'number_of_points_per_primitive':
        splitted_number_of_points_per_primitive,
        'vertex_ids': splitted_vertex_ids,
        'point_positions': splitted_point_positions,
        'matrix': matrix,
        'color_template': color_template,
        'sidedness': 255 if double_sided else 0,
        'invert_normals': 'on' if invert_normals else 'off',
        # 'uv_ids': uv_ids,
        # 'vertex_uvs': splitted_vertex_uvs,
        # 'normal_count': vertex_count,
        # 'vertex_normals': splitted_vertex_normals,
    }

    return data
Exemple #11
0
    # repeat every first and last point coordinates
    # (3 value each 3 * 4 = 12 characters) of every curve
    zip_start = time.time()
    point_positions = ''.join(
        map(
            lambda x: '%s%s%s' % (x[:12], x, x[-12:]),
            map(
                ''.join,
                zip(*[iter(point_positions)] *
                    (real_number_of_points_in_one_curve * 4 * 3)))))
    zip_end = time.time()
    print('Zipping Point Position       : %3.3f' % (zip_end - zip_start))

    # encoded_point_positions = base85.arnold_b85_encode_multithreaded(point_positions)
    encoded_point_positions = base85.arnold_b85_encode(point_positions)
    encode_end = time.time()
    print('Encoding Point Position      : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    splitted_point_positions = split_data(encoded_point_positions, 500)
    split_end = time.time()
    print('Splitting Point Positions    : %3.3f' % (split_end - split_start))

    # radius
    encode_start = time.time()
    encoded_radius = base85.arnold_b85_encode(radius)
    encode_end = time.time()
    print('Radius encode                : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
Exemple #12
0
    start = time.time()
    data = ''.join([pack('<I', i) for i in range(num_of_data)])
    end = time.time()
    generating_data = end - start
    print('Generating data         : %.3f seconds' % generating_data)

    start = time.time()
    unpacked_data = unpack('<%sI' % (len(data) // 4), data)
    end = time.time()
    unpacking_data = end - start
    print('length of unpacked data : %s' % len(unpacked_data))
    print('Unpacking data          : %.3f seconds' % unpacking_data)

    print('******** NORMAL ********')
    start = time.time()
    normal_encoded_data = base85.arnold_b85_encode(data)
    end = time.time()
    encode_duration = end - start
    print('Encoding %3i times took : %.3f seconds' % (repeat, encode_duration))
    print('Averaging               : %.3f seconds' %
          (encode_duration / repeat))

    print('**** MULTI-THREADED ****')
    start = time.time()
    thread_encoded_data = base85.arnold_b85_encode_multithreaded(data)
    end = time.time()
    encode_duration = end - start
    print('Encoding %3i times took : %.3f seconds' % (repeat, encode_duration))
    print('Averaging               : %.3f seconds' %
          (encode_duration / repeat))
Exemple #13
0
    start = time.time()
    data = ''.join([pack('<I', i) for i in range(num_of_data)])
    end = time.time()
    generating_data = end - start
    print('Generating data         : %.3f seconds' % generating_data)

    start = time.time()
    unpacked_data = unpack('<%sI' % (len(data) // 4), data)
    end = time.time()
    unpacking_data = end - start
    print('length of unpacked data : %s' % len(unpacked_data))
    print('Unpacking data          : %.3f seconds' % unpacking_data)

    print('******** NORMAL ********')
    start = time.time()
    normal_encoded_data = base85.arnold_b85_encode(data)
    end = time.time()
    encode_duration = end - start
    print('Encoding %3i times took : %.3f seconds' % (repeat, encode_duration))
    print('Averaging               : %.3f seconds' % (encode_duration / repeat))

    print('**** MULTI-THREADED ****')
    start = time.time()
    thread_encoded_data = base85.arnold_b85_encode_multithreaded(data)
    end = time.time()
    encode_duration = end - start
    print('Encoding %3i times took : %.3f seconds' % (repeat, encode_duration))
    print('Averaging               : %.3f seconds' % (encode_duration / repeat))

    assert normal_encoded_data == thread_encoded_data
Exemple #14
0
def particle2ass(node, name, export_motion=False, export_color=False, render_type=0):
    """exports polygon geometry to ass format
    """
    sample_count = 2 if export_motion else 1

    geo = node.geometry()
    base_template = """
points
{
 name %(name)s
 points %(point_count)s %(sample_count)s b85POINT
%(point_positions)s
 radius %(point_count)s 1 b85FLOAT
%(point_radius)s
 mode %(render_as)s
 min_pixel_width 0
 step_size 0
 visibility 243
 receive_shadows on
 self_shadows on
 shader "initialParticleSE"
 opaque on
 matte off
 id -838484804
%(color_template)s
}"""
    #  uvidxs %(vertex_count)s 1 UINT
    #%(uv_ids)s
    # uvlist %(vertex_count)s 1 b85POINT2
    #%(vertex_uvs)s

    skip_normals = False
    skip_uvs = False
    skip_colors = False
    skip_radius = False

    intrinsic_values = geo.intrinsicValueDict()

    point_count = intrinsic_values['pointcount']

    # i = 0
    # j = 0
    # combined_vertex_ids = []
    # # combined_vertex_normals = []
    # combined_number_of_points_per_primitive = []

    #
    # Point Positions
    #
    point_positions = geo.pointFloatAttribValuesAsString('P')
    if export_motion:
        point_prime_positions = geo.pointFloatAttribValuesAsString('pprime')
        point_positions = '%s%s' % (point_positions, point_prime_positions)
        del point_prime_positions

    encode_start = time.time()
    # encoded_point_positions = base85.arnold_b85_encode_multithreaded(point_positions)
    encoded_point_positions = base85.arnold_b85_encode(point_positions)
    encode_end = time.time()
    print('Encoding Point Position    : %3.3f' % (encode_end - encode_start))
    del point_positions

    split_start = time.time()
    splitted_point_positions = split_data(encoded_point_positions, 500)
    split_end = time.time()
    print('Splitting Point Positions : %3.3f' % (split_end - split_start))
    del encoded_point_positions

    #
    # Point Radius
    #
    try:
        point_radius = geo.pointFloatAttribValuesAsString('pscale')
    except hou.OperationFailed:
       # no radius attribute skip it
        skip_radius = True
        point_radius = ''

    encode_start = time.time()
    encoded_point_radius = base85.arnold_b85_encode(point_radius)
    encode_end = time.time()
    print('Encoding Point Radius    : %3.3f' % (encode_end - encode_start))
    del point_radius

    split_start = time.time()
    splitted_point_radius = split_data(encoded_point_radius, 500)
    split_end = time.time()
    print('Splitting Point Radius : %3.3f' % (split_end - split_start))
    del encoded_point_radius

    render_type = render_type
    render_as = "disk"

    if render_type == 1:
        render_as = "sphere"
    elif render_type == 2:
        render_as = "quad"

    # #
    # # Vertex Colors
    # #
    color_template = ''
    if export_color:
        try:
            point_colors = geo.pointFloatAttribValuesAsString('particle_color')
        except hou.OperationFailed:
           # no color attribute skip it
            skip_colors = True
            point_colors = ''

        encode_start = time.time()
        encoded_point_colors = base85.arnold_b85_encode(point_colors)
        encode_end = time.time()
        print('Encoding Point colors     : %3.3f' % (encode_end - encode_start))
        del point_colors

        split_start = time.time()
        splitted_point_colors = split_data(encoded_point_colors, 100)
        split_end = time.time()
        print('Splitting Point Colors : %3.3f' % (split_end - split_start))
        del encoded_point_colors

        color_template = """
            declare rgbPP uniform RGB
            rgbPP %(point_count)s 1 b85RGB
            %(splitted_point_colors)s
        """

        color_template = color_template % {
            'point_count': point_count,
            'splitted_point_colors': splitted_point_colors
        }

    data = base_template % {
        'name': name,
        'point_count': point_count,
        'sample_count': sample_count,
        'render_as': render_as,
        'point_radius': splitted_point_radius,
        'point_positions': splitted_point_positions,
        'color_template': color_template,
    }
    del splitted_point_radius
    del splitted_point_positions
    return data
Exemple #15
0
def polygon2ass(
        node, name, export_motion=False, export_color=False, double_sided=True,
        invert_normals=False
):
    """exports polygon geometry to ass format
    """
    sample_count = 2 if export_motion else 1

    # visibility flags
    # a binary value of
    # 00000000
    # ||||||||
    # |||||||+-> primary_visibility
    # ||||||+--> cast_shadows
    # |||||+---> visible_in_reflections
    # ||||+----> visible_in_refractions
    # |||+-----> (unknown)
    # ||+------> visible_in_diffuse
    # |+-------> visible_in_glossy
    # +--------> (unknown)

    geo = node.geometry()
    base_template = """
polymesh
{
 name %(name)s
 nsides %(primitive_count)i 1 UINT
%(number_of_points_per_primitive)s
 vidxs %(vertex_count)s 1 UINT
%(vertex_ids)s
 vlist %(point_count)s %(sample_count)s b85POINT
%(point_positions)s
 smoothing on
 visibility 255
 sidedness %(sidedness)s
 invert_normals %(invert_normals)s
 receive_shadows on
 self_shadows on
 opaque on
 matrix
%(matrix)s
 id 683108022
%(color_template)s
}"""
    #  uvidxs %(vertex_count)s 1 UINT
    #%(uv_ids)s
    # uvlist %(vertex_count)s 1 b85POINT2
    #%(vertex_uvs)s

    skip_normals = False
    skip_uvs = False
    skip_colors = False

    intrinsic_values = geo.intrinsicValueDict()

    primitive_count = intrinsic_values['primitivecount']
    point_count = intrinsic_values['pointcount']
    vertex_count = intrinsic_values['vertexcount']

    number_of_points_per_primitive = []
    vertex_ids = []
    # vertex_normals = []
    # vertex_uvs = []
    # uv_ids = []
    # combined_uv_ids = []
    ## uv_ids = split_data(' '.join(map(str, range(vertex_count))), 500)
    # j = 0
    # for i in range(vertex_count):
    #     uv_ids.append(str(i))
    #     j += 1
    #     if j > 500:
    #         j = 0
    #         combined_uv_ids.append(' '.join(uv_ids))
    #         uv_ids = []
    vertex_colors = []

    ## join for a last time
    # if uv_ids:
    #     combined_uv_ids.append(' '.join(uv_ids))
    #
    # uv_ids = combined_uv_ids

    # just for the first vertex try to read the uv to determine if we should
    # skip the uvs or not
    # skip_uvs = True

    i = 0
    j = 0
    combined_vertex_ids = []
    # combined_vertex_normals = []
    combined_number_of_points_per_primitive = []

    for prim in geo.iterPrims():
        number_of_points_per_primitive.append(`prim.numVertices()`)
        i += 1
        if i > 500:
            i = 0
            combined_number_of_points_per_primitive.append(' '.join(number_of_points_per_primitive))
            number_of_points_per_primitive = []
        for vertex in prim.vertices():
            point = vertex.point()
            point_id = point.number()
            vertex_ids.append(`point_id`)
            # vertex_normals.extend(point.floatListAttribValue('N'))
            # vertex_uv = vertex.floatListAttribValue('uv')
            # vertex_uvs.append(vertex_uv[0])
            # vertex_uvs.append(vertex_uv[1])
            j += 1
            if j > 500:
                j = 0
                combined_vertex_ids.append(' '.join(vertex_ids))
                vertex_ids = []
                # combined_vertex_normals.append(' '.join(map(str, vertex_normals)))
                # vertex_normals = []
                vertex_colors = []

    # join for a last time
    if number_of_points_per_primitive:
        combined_number_of_points_per_primitive.append(' '.join(number_of_points_per_primitive))

    if vertex_ids:
        combined_vertex_ids.append(' '.join(vertex_ids))

    # if vertex_normals:
    #     combined_vertex_normals.append(' '.join(map(str, vertex_normals)))

    ## encode uvs
    # encoded_vertex_uvs = '%s' % base85.arnold_b85_encode(
    #     struct.pack(
    #         '<%sd' % len(vertex_uvs),
    #         *vertex_uvs
    #     )
    # )
    # splitted_vertex_uvs = split_data(encoded_vertex_uvs, 500)

    point_positions = geo.pointFloatAttribValuesAsString('P')

    if export_motion:
        point_prime_positions = geo.pointFloatAttribValuesAsString('pprime')
        point_positions = '%s%s' % (point_positions, point_prime_positions)

    try:
        point_colors = geo.pointFloatAttribValuesAsString('color')
    except hou.OperationFailed:
       # no color attribute skip it
        skip_colors = True
        point_colors = ''

    # try:
    #    point_normals = geo.pointFloatAttribValuesAsString('N')
    #    # point_normals = geo.pointFloatAttribValues('N')
    # except hou.OperationFailed:
    #    # no normal attribute skip it
    #    skip_normals = True
    #    point_normals = ''

    #
    # Number Of Points Per Primitive
    #
    encode_start = time.time()
    #encoded_number_of_points_per_primitive = 'B%s' % base85.arnold_b85_encode(
    #    struct.pack(
    #        '>%sB' % len(number_of_points_per_primitive),
    #        *number_of_points_per_primitive
    #    )
    #)
    encoded_number_of_points_per_primitive = '\n'.join(combined_number_of_points_per_primitive)
    encode_end = time.time()
    print('Encoding Number of Points  : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    #splitted_number_of_points_per_primitive = \
    #    re.sub(
    #        "(.{500})", "\\1\n",
    #        #' '.join(number_of_points_per_primitive),
    #        encoded_number_of_points_per_primitive,
    #        0
    #    )
    #splitted_number_of_points_per_primitive = ' '.join(encoded_number_of_points_per_primitive)
    splitted_number_of_points_per_primitive = encoded_number_of_points_per_primitive
    split_end = time.time()
    print('Splitting Number of Points : %3.3f' % (split_end - split_start))

    #
    # Point Positions
    #
    encode_start = time.time()
    # encoded_point_positions = base85.arnold_b85_encode_multithreaded(point_positions)
    encoded_point_positions = base85.arnold_b85_encode(point_positions)
    encode_end = time.time()
    print('Encoding Point Position    : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    splitted_point_positions = split_data(encoded_point_positions, 500)
    split_end = time.time()
    print('Splitting Point Positions : %3.3f' % (split_end - split_start))

    # #
    # # Vertex Normals
    # #
    # encode_start = time.time()
    # encoded_vertex_normals = '\n'.join(combined_vertex_normals)#base85.arnold_b85_encode(point_normals)
    # encode_end = time.time()
    # print('Encoding Point Normals     : %3.3f' % (encode_end - encode_start))
    #
    # split_start = time.time()
    # # splitted_vertex_normals = re.sub("(.{500})", "\\1\n", encoded_point_normals, 0)
    # splitted_vertex_normals = encoded_vertex_normals
    # # # split every n-th data
    # # n = 100
    # # splitted_point_normals = []
    # # for i in range(len(point_normals) / n):
    # #     start_index = n * i
    # #     end_index = n * (i+1)
    # #     splitted_point_normals.extend(point_normals[start_index:end_index])
    # #     splitted_point_normals.append('\n')
    # #
    # # splitted_point_normals = ' '.join(map(str, splitted_point_normals))
    # split_end = time.time()
    # print('Splitting Vertex Normals    : %3.3f' % (split_end - split_start))

    # #
    # # Vertex Colors
    # #

    encode_start = time.time()
    encoded_point_colors = base85.arnold_b85_encode(point_colors)
    encode_end = time.time()
    print('Encoding Point colors     : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    splitted_point_colors = split_data(encoded_point_colors, 100)
    split_end = time.time()
    print('Splitting Vertex Colors    : %3.3f' % (split_end - split_start))

    #
    # Vertex Ids
    #
    encode_start = time.time()
    #encoded_vertex_ids = 'B%s' % base85.arnold_b85_encode(
    #    struct.pack(
    #        '>%sB' % len(vertex_ids),
    #        *vertex_ids
    #    )
    #)
    encoded_vertex_ids = '\n'.join(combined_vertex_ids)
    encode_end = time.time()
    print('Encoding Vertex Ids        : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    #splitted_vertex_ids = re.sub(
    #    "(.{500})", "\\1\n",
    #    #' '.join(vertex_ids),
    #    encoded_vertex_ids,
    #    0
    #)
    #splitted_vertex_ids = ' '.join(encoded_vertex_ids)
    splitted_vertex_ids = encoded_vertex_ids
    split_end = time.time()
    print('Splitting Vertex Ids       : %3.3f' % (split_end - split_start))

    matrix = """1 0 0 0
0 1 0 0
0 0 1 0
0 0 0 1
"""
    if export_motion:
        matrix += matrix

    color_template = ''
    if export_color:
        color_template = """
            declare colorSet1 varying RGBA
            colorSet1 %(point_count)s 1 b85RGBA
            %(splitted_point_colors)s
        """

        color_template = color_template % {
            'point_count': point_count,
            'splitted_point_colors':splitted_point_colors
        }

    data = base_template % {
        'name': name,
        'point_count': point_count,
        'vertex_count': vertex_count,
        'primitive_count': primitive_count,
        'sample_count': sample_count,
        'number_of_points_per_primitive': splitted_number_of_points_per_primitive,
        'vertex_ids': splitted_vertex_ids,
        'point_positions': splitted_point_positions,
        'matrix': matrix,
        'color_template': color_template,
        'sidedness': 255 if double_sided else 0,
        'invert_normals': 'on' if invert_normals else 'off',
        # 'uv_ids': uv_ids,
        # 'vertex_uvs': splitted_vertex_uvs,
        # 'normal_count': vertex_count,
        # 'vertex_normals': splitted_vertex_normals,
    }

    return data
Exemple #16
0
    # repeat every first and last point coordinates
    # (3 value each 3 * 4 = 12 characters) of every curve
    zip_start = time.time()
    point_positions = ''.join(
        map(
            lambda x: '%s%s%s' % (x[:12], x, x[-12:]),
            map(
                ''.join,
                zip(*[iter(point_positions)] * (real_number_of_points_in_one_curve*4*3)))
        )
    )
    zip_end = time.time()
    print('Zipping Point Position       : %3.3f' % (zip_end - zip_start))

    # encoded_point_positions = base85.arnold_b85_encode_multithreaded(point_positions)
    encoded_point_positions = base85.arnold_b85_encode(point_positions)
    encode_end = time.time()
    print('Encoding Point Position      : %3.3f' % (encode_end - encode_start))

    split_start = time.time()
    splitted_point_positions = split_data(encoded_point_positions, 500)
    split_end = time.time()
    print('Splitting Point Positions    : %3.3f' % (split_end - split_start))

    # radius
    encode_start = time.time()
    encoded_radius = base85.arnold_b85_encode(radius)
    encode_end = time.time()
    print('Radius encode                : %3.3f' % (encode_end - encode_start))

    split_start = time.time()