Beispiel #1
0
 def __init__(
     self,
     args=utility.parse_args(),
     det_model_dir=r"D:\python_work\Ar_project\PaddleOCR\inference\ch_det_mv3_db",
     rec_model_dir=r"D:\python_work\Ar_project\PaddleOCR\inference\ch_rec_mv3_crnn",
     rec_char_dict_path=r"D:\python_work\Ar_project\PaddleOCR\ppocr\utils\ppocr_keys_v1.txt"
 ):
     print("init ocrsystem")
     args.det_model_dir = det_model_dir
     args.rec_model_dir = rec_model_dir
     args.rec_char_dict_path = rec_char_dict_path
     # print(args,111)
     # image_file_list = get_image_file_list(args.image_dir)
     self.text_sys = TextSystem(args)
     print("ocrsystem ready!")
Beispiel #2
0
def main():
    args = util.parse_args()
    session = init_oculus()
    
    port = args['port']
    baud = args['baud']
    dryrun = args['dryrun']

    gp = inputs.devices.gamepads
    if len(gp) == 0 or 'microsoft' not in str(gp[0]).lower():
        print("Xbox controller not detected")
        exit(session)
        return
    
    # Read from the gamepad in a different thread since the inputs library
    # blocks program execution
    try:
        gamepad_thread = Thread(target=gamepad_loop)
        gamepad_thread.daemon = True
        gamepad_thread.start()
    
        # If we are developing, we don't worry about the serial port
        if dryrun:
            stream_loop(session, -1, True)
        
        num_tries = 0
        while(True):
            try:
                with serial.Serial(port, baud, timeout=0) as ser:
                    print("Connected to embedded")
                    tx = Transmitter(ser)
                    stream_loop(session, tx)
            
            except serial.serialutil.SerialException as e:
                if(num_tries % 100 == 0):
                    if(str(e).find("FileNotFoundError")):
                        print("Port not found. Retrying...(attempt {0})".format(num_tries))
                    else:
                        print("Serial exception. Retrying...(attempt {0})".format(num_tries))
                
                time.sleep(0.01)
                num_tries = num_tries + 1
    except (KeyboardInterrupt, SystemExit) as e:
        print("Interrupted: {0}".format(e))
        pass

    exit(session)
            if (iter_id + 1) % cfg.snapshot_iter == 0:
                save_model("model_iter{}".format(iter_id))
                print("Snapshot {} saved, average loss: {}, \
                      average time: {}".format(
                    iter_id + 1, snapshot_loss / float(cfg.snapshot_iter),
                    snapshot_time / float(cfg.snapshot_iter)))
                if args.enable_ce and iter_id == cfg.max_iter - 1:
                    if devices_num == 1:
                        print("kpis\ttrain_cost_1card\t%f" %
                              (snapshot_loss / float(cfg.snapshot_iter)))
                        print("kpis\ttrain_duration_1card\t%f" %
                              (snapshot_time / float(cfg.snapshot_iter)))
                    else:
                        print("kpis\ttrain_cost_8card\t%f" %
                              (snapshot_loss / float(cfg.snapshot_iter)))
                        print("kpis\ttrain_duration_8card\t%f" %
                              (snapshot_time / float(cfg.snapshot_iter)))

                snapshot_loss = 0
                snapshot_time = 0
    except fluid.core.EOFException:
        py_reader.reset()

    save_model('model_final')


if __name__ == '__main__':
    args = parse_args()
    print_arguments(args)
    train()
Beispiel #4
0
                    kmer_sample_file=kmer_sample_file_ref,
                    kmer_pheno_file=kmer_pheno_file)

        process_file(create_kmer_sample_map, unique_kmers_file, q=q, lock=lock, **kwargs)
       
        sample_matrix = np.zeros((n_samples, n_samples))
        num_kmers = 0
        # write all chunks to output files sequentially
        while not q.empty():
            q_num_kmers, q_sample_matrix = q.get()
            num_kmers += q_num_kmers
            sample_matrix += q_sample_matrix
        
        # create sample similarity file if the similarities tsv does not exist
        if not file_exists(similar_sample_file) or not file_exists(dissimilar_sample_file):
            similar_sample(sample_matrix, num_kmers, similarities_tsv,
                hist_orig_file, hist_sim_scaled_file, hist_dissim_scaled_file,
                similar_sample_file, dissimilar_sample_file)
    if (not file_exists(similar_sample_file) or not file_exists(dissimilar_sample_file)) and file_exists(similarities_tsv):
        similar_sample(None, None, similarities_tsv, hist_orig_file,
            hist_sim_scaled_file, hist_dissim_scaled_file,
            similar_sample_file, dissimilar_sample_file)
    # create kmer int map
    if not file_exists(uim_file):
        int_maps.create_kmer_int_map(kmer_sample_file, uim_file)

if __name__ == '__main__':
    parse_args()
    main()


def visualize_read_write(model, criterion, optimizer, config_obj):
    T = 10
    config_obj.config_dict['num_batches'] = 20
    config_obj.config_dict['batch_size'] = 1
    seqs_loader = utility.load_dataset(config_obj, max=T, min=T)

    for batch_num, X, Y, act in seqs_loader:
        result = evaluate_single_batch(model, criterion, X, Y)
        plot_visualization(X, result, model.N)


if __name__ == '__main__':
    # pdb.set_trace()
    args = utility.parse_args()
    config_type = args['configtype']
    config_file = args['configfile']
    load_checkpoint = args['load_checkpoint']
    plot_all_average_flag = args['plot_all_average']
    visualize_read_write_flag = args['visualize_read_write']
    if plot_all_average_flag:
        plot_all_average_costs()
    else:
        config_obj = config.Configuration(config_type, config_file)
        config = config_obj.config_dict
        model, criterion, optimizer = models.build_model(config)
        seqs_loader = utility.load_dataset(config_obj)
        if visualize_read_write_flag:
            model, list_seq_num, list_loss, list_cost = loadCheckpoint(
                path=config['filename'])
Beispiel #6
0
                        auc_metric.accumulate(),
                        100 * args.batch_size / (time.time() - batch_begin)))
                batch_begin = time.time()
                total_loss = 0.0

            batch_id += 1
        logger.info("epoch %d is finished and takes %f s" %
                    (epoch, time.time() - begin))
        # save model and optimizer
        logger.info(
            "going to save epoch {} model and optimizer.".format(epoch))
        paddle.save(deepfm.state_dict(),
                    path=os.path.join(args.model_output_dir,
                                      "epoch_" + str(epoch), ".pdparams"))
        paddle.save(optimizer.state_dict(),
                    path=os.path.join(args.model_output_dir,
                                      "epoch_" + str(epoch), ".pdopt"))
        logger.info("save epoch {} finished.".format(epoch))
        # eval model
        deepfm.eval()
        eval(epoch)
        deepfm.train()
    paddle.enable_static()


if __name__ == '__main__':
    args = utils.parse_args()
    utils.print_arguments(args)

    train(args)
Beispiel #7
0
    for image_file in image_file_list:
        img, flag = check_and_read_gif(image_file)
        if not flag:
            img = cv2.imread(image_file)
        if img is None:
            logger.info("error in loading image:{}".format(image_file))
            continue
        valid_image_file_list.append(image_file)
        img_list.append(img)

    try:
        rec_res, predict_time = text_recognizer(img_list)
    except Exception as e:
        print(e)
        logger.info(
            "ERROR!!!! \n"
            "Please read the FAQ: https://github.com/PaddlePaddle/PaddleOCR#faq \n"
            "If your model has tps module:  "
            "TPS does not support variable shape.\n"
            "Please set --rec_image_shape='3,32,100' and --rec_char_type='en' "
        )
        exit()
    for ino in range(len(img_list)):
        print("Predicts of %s:%s" % (valid_image_file_list[ino], rec_res[ino]))
    print("Total predict time for %d images:%.3f" %
          (len(img_list), predict_time))


if __name__ == "__main__":
    main(utility.parse_args())
Beispiel #8
0
#!/usr/bin/env python

from bhmm import BHMM
from utility import parse_args

if __name__ == "__main__":
    args = parse_args()
    bhmm = BHMM(args)
    bhmm.run()