Exemplo n.º 1
0
    results = list(query.dicts())
    df = pd.DataFrame(results)
    df.drop(ignore_list + ['recipe', 'networks'], inplace=True, axis='columns')
    df['combo_id'] = df['combo_id'].apply(lambda el: 'combo_' + str(el))
    df.rename(columns={'combo_id': 'network'}, inplace=True)
    df.set_index('network', inplace=True)
    for compound_stat in compound_stats:
        df[compound_stat] = df[compound_stat].apply(
            lambda el: el[0] if el[1:] == el[:-1] else None)
    stats = pd.concat([stats, df])

query = (MultiNetwork.select(
    MultiNetwork.id, PostprocessSlice, Postprocess.rms).join(
        PostprocessSlice,
        on=(MultiNetwork.id == PostprocessSlice.multi_network_id)).join(
            Postprocess,
            on=(MultiNetwork.id == Postprocess.multi_network_id)).where(
                MultiNetwork.target_names == target_names).where(
                    (PostprocessSlice.dual_thresh_mismatch_median == 0)
                    | PostprocessSlice.dual_thresh_mismatch_median.is_null()))
for compound_stat in compound_stats:
    subquery = MultiNetwork.calc_op(compound_stat).alias(compound_stat)
    query = query.select(SQL('*')).join(
        subquery, on=(MultiNetwork.id == subquery.c.multi_id))

if query.count() > 0:
    results = list(query.dicts())
    df = pd.DataFrame(results)
    df.drop(ignore_list + ['network_partners', 'combo_network_partners'],
            inplace=True,
            axis='columns')
Exemplo n.º 2
0
                except Exception as e:
                    print(e)
                    sys.exit()

                average_elapsed_time = np.sum(elapsed_times) / (TEST_TRIAL *
                                                                batch_num)
                result[lr][node] = average_elapsed_time

                print(
                    '[Resolution: Size ({}x{}), OutputNode: {}] / Inference time per frame(sec) {} / Max-Min(sec) {}'
                    .format(
                        t_w, t_h, node, round(average_elapsed_time, 4),
                        round(
                            np.max(elapsed_times) - np.min(elapsed_times), 4)))

        for node in self.output_nodes:
            log = ''
            log += '{}\t'.format(node)
            for lr in self.node2res[node]:
                log += '{}\t'.format(round(result[lr][node], 4))
            summary_logger.info(log)


if __name__ == "__main__":
    model = MultiNetwork(template.get_nas_config(opt.quality))
    dataset = DatasetForDASH(opt)
    evaluator = Tester(opt, model, dataset)
    #evaluator.evaluate_quality()
    evaluator.evaluate_runtime()
Exemplo n.º 3
0
            print('decode [prepare_frames-{}]: {}sec'.format(frame_count, time.time() - start_time))
            #5. wait super-resolution, encode processes to be end
            data_queue.join()
            print('decode [super-resolution]: {}sec'.format(time.time() - start_time))
            encode_queue.join()
            encode_queue.put(('end', output_input))
            encode_queue.join()
            print('decode [encode] : {}sec'.format(time.time() - start_time))

        except (KeyboardInterrupt, SystemExit):
            print('exiting...')
            break

#Share following variables between multi-threads
inference_idx = 0
model = MultiNetwork(template.get_nas_config(opt.quality))
model = model.to(torch.device('cuda'))
model = model.half().to('cuda') #TODO: save the final model as half-precision

def load_dnn_chunk(dnn_queue):
    global inference_idx
    global model
    while True:
        try:
            input = dnn_queue.get()

            #load a pretrained model of which path is given
            if input[0] == 'load_model':
                start_time = time.time()

                pretrained_path = input[1]