Esempio n. 1
0
def profile():
    import time

    from unpacking_flatten_lists.data import (generate_data,
                                              create_data_increasing_depth,
                                              create_data_decreasing_depth)

    all_data = generate_data()
    curr_data = all_data[-1][1]

    funcs_generated_data = [
        create_data_increasing_depth, create_data_decreasing_depth
    ]

    funcs = [
        outer_flatten_1, outer_flatten_2, niccolum_flatten, tishka_flatten,
        zart_flatten, recursive_flatten_generator, recursive_flatten_iterator,
        tishka_flatten_with_stack
    ]

    for func_generated_data in funcs_generated_data:
        creating_data = func_generated_data(**curr_data)
        for func in funcs:
            list(func(creating_data))
            time.sleep(0.3)
Esempio n. 2
0
def common_part(*, data_create_func: Callable, mode: str):
    for func in funcs:
        func_name = func.__name__

        print('\n', func_name, '\n')

        for data_example in generate_data():
            data = data_create_func(**data_example[1])
            data = json.dumps(
                data
            )  # crutch because timeit has s_push: parser stack overflow for list with 100 deep
            data_struct_name = data_example[0]

            with time_time(data_struct_name) as start_time:
                result[func_name][mode][data_struct_name] = mean(
                    timeit.repeat(RUNNING_TEMPLATE,
                                  setup=SETUP_IMPORT_TEMPLATE.format(
                                      func_name=func_name, data=data),
                                  number=RETRY_NUM))
            if time.monotonic() - start_time > TOO_LONG:
                break
Esempio n. 3
0
 def get_decrease_list() -> Union[List, Iterator]:
     for data in generate_data():
         params = data[1]
         flatten_result = copy.copy(params['data'])
         result = create_data_decreasing_depth(**params)
         yield result, list(flatten_result)