コード例 #1
0
ファイル: set1.py プロジェクト: Foryah/cryptopals
def recover_key_based_on_length(encripted_smart, key_length):
    # 1. Get the modulo letters
    encripted_str = encripted_smart.get_str()
    letters = []
    for i in range(key_length):
        sub_string = encripted_str[i::key_length]
        letters.append(sub_string)

    # 2. Try to decript using the best decryption
    decrypted_keys = []
    for letter_substring in letters:
        smart_str = Solver()
        smart_str.load_str(letter_substring)

        best_dict = single_byte_xor_cipher_str(smart_str)
        print(best_dict)
        key = best_dict['key']

        decrypted_keys.append(key)

    # 3. Construct the key
    final_key = ""
    for key in decrypted_keys:
        final_key += chr(key)

    # 4. Retrun the key
    return final_key
コード例 #2
0
def main():
    '''Main demo function'''
    # Save prediction into a file named 'prediction.obj' or the given argument
    pred_file_name = sys.argv[1] if len(sys.argv) > 1 else 'prediction.obj'

    # load images
    demo_imgs = load_demo_images()

    # Download and load pretrained weights
    download_model(DEFAULT_WEIGHTS)

    # Use the default network model
    NetClass = load_model('ResidualGRUNet')

    # Define a network and a solver. Solver provides a wrapper for the test function.
    net = NetClass(compute_grad=False)  # instantiate a network
    net.load(DEFAULT_WEIGHTS)  # load downloaded weights
    solver = Solver(net)  # instantiate a solver

    # Run the network
    voxel_prediction, _ = solver.test_output(demo_imgs)

    # Save the prediction to an OBJ file (mesh file).
    voxel2obj(pred_file_name,
              voxel_prediction[0, :, 1, :, :] > cfg.TEST.VOXEL_THRESH)

    # Use meshlab or other mesh viewers to visualize the prediction.
    # For Ubuntu>=14.04, you can install meshlab using
    # `sudo apt-get install meshlab`
    if cmd_exists('meshlab'):
        call(['meshlab', pred_file_name])
    else:
        print(
            'Meshlab not found: please use visualization of your choice to view %s'
            % pred_file_name)
コード例 #3
0
def test_net():
    ''' Evaluate the network '''
    # Make result directory and the result file.
    result_dir = os.path.join(cfg.DIR.OUT_PATH, cfg.TEST.EXP_NAME)
    if not os.path.exists(result_dir):
        os.makedirs(result_dir)
    result_fn = os.path.join(result_dir, 'result.mat')

    print("Exp file will be written to: " + result_fn)

    # Make a network and load weights
    NetworkClass = load_model(cfg.CONST.NETWORK_CLASS)
    print('Network definition: \n')
    print(inspect.getsource(NetworkClass.network_definition))
    net = NetworkClass(compute_grad=False)
    net.load(cfg.CONST.WEIGHTS)
    solver = Solver(net)

    # set constants
    batch_size = cfg.CONST.BATCH_SIZE

    # set up testing data process. We make only one prefetching process. The
    # process will return one batch at a time.
    queue = Queue(cfg.QUEUE_SIZE)
    data_pair = category_model_id_pair(
        dataset_portion=cfg.TEST.DATASET_PORTION)
    processes = make_data_processes(queue,
                                    data_pair,
                                    1,
                                    repeat=False,
                                    train=False)
    num_data = len(processes[0].data_paths)
    num_batch = int(num_data / batch_size)

    # prepare result container
    results = {'cost': np.zeros(num_batch)}
    for thresh in cfg.TEST.VOXEL_THRESH:
        results[str(thresh)] = np.zeros((num_batch, batch_size, 5))

    # Get all test data
    batch_idx = 0
    for batch_img, batch_voxel in get_while_running(processes[0], queue):
        if batch_idx == num_batch:
            break

        pred, loss, activations = solver.test_output(batch_img, batch_voxel)
        print('%d/%d, cost is: %f' % (batch_idx, num_batch, loss))

        for i, thresh in enumerate(cfg.TEST.VOXEL_THRESH):
            for j in range(batch_size):
                r = evaluate_voxel_prediction(pred[j, ...],
                                              batch_voxel[j, ...], thresh)
                results[str(thresh)][batch_idx, j, :] = r

        # record result for the batch
        results['cost'][batch_idx] = float(loss)
        batch_idx += 1

    print('Total loss: %f' % np.mean(results['cost']))
    sio.savemat(result_fn, results)
コード例 #4
0
def main():
    # Save prediction into a file named 'prediction.obj' or the given argument
    pred_file_name = sys.argv[1] if len(sys.argv) > 1 else 'prediction.obj'

    num_imgs = int(sys.argv[2]) if len(sys.argv) > 2 else 1
    img_file = sys.argv[3]

    # load images
    demo_imgs = load_demo_images(num_imgs, img_file)

    # Define a network and a solver. Solver provides a wrapper for the test function.
    my_net = My_ResidualGRUNet(batch=1)  # instantiate a network
    my_net.load('my_3DR2N2/weights.npy')  # load downloaded weights
    solver = Solver(my_net)  # instantiate a solver

    # Run the network
    voxel_prediction, _ = solver.test_output(demo_imgs)

    # Save the prediction to a mesh file).
    voxel2obj(pred_file_name, voxel_prediction[0, :, 1, :, :] > 0.4)

    if shutil.which('meshlab') is not None:
        call(['meshlab', pred_file_name])
    else:
        print(
            'Meshlab not found: please use visualization of your choice to view %s'
            % pred_file_name)
コード例 #5
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_challange3(self):
        s = Solver()
        input_str = "1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736"
        s.load_hex(input_str)

        result_dict = set1.single_byte_xor_cipher_str(s)
        result_str = result_dict["string"]
        self.assertEqual(result_str, "Cooking MC's like a pound of bacon")
コード例 #6
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_challange3(self):
        s = Solver()
        input_str = "1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736"
        s.load_hex(input_str)

        result_dict = set1.single_byte_xor_cipher_str(s)
        result_str = result_dict["string"]
        self.assertEqual(result_str, "Cooking MC's like a pound of bacon")
コード例 #7
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_challange1(self):
        s = Solver()
        s.load_hex("49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d")

        bin_str = s.get_str()
        self.assertEqual(bin_str, "I'm killing your brain like a poisonous mushroom")

        b64_str = s.get_b64()
        self.assertEqual(b64_str, "SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t")
コード例 #8
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_challange5(self):
        smart_input = Solver()
        input_str = "Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal"
        smart_input.load_str(input_str)

        key = "ICE"
        result = set1.repeating_key_xor_cipher_str(smart_input, key)
        result_str = result.get_hex()

        self.assertEqual(result_str, "0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f")
コード例 #9
0
ファイル: set1.py プロジェクト: Foryah/cryptopals
def repeating_key_xor_cipher_str(smart_string, key):
    string_bytes = smart_string.get_bytes()
    len_str_bytes = len(string_bytes)

    key_bytes = padded_key_bytes(key, len_str_bytes)

    smart_key = Solver()
    smart_key.load_bytes(key_bytes)

    result = heXor(smart_string, smart_key)
    return result
コード例 #10
0
def train_net():
    '''Main training function'''
    # Set up the model and the solver
    NetClass = load_model(cfg.CONST.NETWORK_CLASS)

    # print('Network definition: \n')
    # print(inspect.getsource(NetClass.network_definition))
    net = NetClass()

    # Check that single view reconstruction net is not used for multi view
    # reconstruction.
    if net.is_x_tensor4 and cfg.CONST.N_VIEWS > 1:
        raise ValueError('Do not set the config.CONST.N_VIEWS > 1 when using' \
                         'single-view reconstruction network')

    # Prefetching data processes
    #
    # Create worker and data queue for data processing. For training data, use
    # multiple processes to speed up the loading. For validation data, use 1
    # since the queue will be popped every TRAIN.NUM_VALIDATION_ITERATIONS.
    global train_queue, val_queue, train_processes, val_processes
    train_queue = Queue(cfg.QUEUE_SIZE)
    val_queue = Queue(cfg.QUEUE_SIZE)

    train_processes = make_data_processes(
        train_queue,
        category_model_id_pair(dataset_portion=cfg.TRAIN.DATASET_PORTION),
        cfg.TRAIN.NUM_WORKER,
        repeat=True)
    val_processes = make_data_processes(
        val_queue,
        category_model_id_pair(dataset_portion=cfg.TEST.DATASET_PORTION),
        1,
        repeat=True,
        train=False)

    import torch.cuda
    if torch.cuda.is_available():
        net.cuda()

    # print the queue
    # print(train_queue)
    # print(val_queue)

    # Generate the solver
    solver = Solver(net)

    # Train the network
    solver.train(train_queue, val_queue)

    # Cleanup the processes and the queue.
    kill_processes(train_queue, train_processes)
    kill_processes(val_queue, val_processes)
コード例 #11
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_challange5(self):
        smart_input = Solver()
        input_str = "Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal"
        smart_input.load_str(input_str)

        key = "ICE"
        result = set1.repeating_key_xor_cipher_str(smart_input, key)
        result_str = result.get_hex()

        self.assertEqual(
            result_str,
            "0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f"
        )
コード例 #12
0
ファイル: set1.py プロジェクト: Foryah/cryptopals
def single_byte_xor_cipher_file(file_name):
    best_dict = {"value": 0}

    with open(file_name, "r") as f:
        for line in f.readlines():
            clean_line = line.strip()

            smart_string = Solver()
            smart_string.load_hex(clean_line)

            single_byte_xor_cipher_str(smart_string, best_dict)

    return best_dict
コード例 #13
0
ファイル: set1.py プロジェクト: Foryah/cryptopals
def decrypt_single_byte(smart_string, key_int):
    message_bytes = smart_string.get_bytes()
    len_message = len(message_bytes)

    key_bytes = bytearray()
    for i in range(len_message):
        key_bytes.append(key_int)

    key_smart_string = Solver()
    key_smart_string.load_bytes(key_bytes)

    result = heXor(smart_string, key_smart_string)

    return result.get_str()
コード例 #14
0
ファイル: train_net.py プロジェクト: star-cold/3d_project
def train_net():
    '''Main training function'''
    # Set up the model and the solver
    NetClass = load_model(cfg.CONST.NETWORK_CLASS)

    net = NetClass()
    print('\nNetwork definition: ')
    print(net)

    # Check that single view reconstruction net is not used for multi view
    # reconstruction.
    if net.is_x_tensor4 and cfg.CONST.N_VIEWS > 1:
        raise ValueError('Do not set the config.CONST.N_VIEWS > 1 when using' \
                         'single-view reconstruction network')

    # Prefetching data processes
    #
    # Create worker and data queue for data processing. For training data, use
    # multiple processes to speed up the loading. For validation data, use 1
    # since the queue will be popped every TRAIN.NUM_VALIDATION_ITERATIONS.

    train_dataset = ShapeNetDataset(cfg.TRAIN.DATASET_PORTION)
    train_collate_fn = ShapeNetCollateFn()
    train_loader = DataLoader(
        dataset=train_dataset,
        batch_size=cfg.CONST.BATCH_SIZE,
        shuffle=True,
        num_workers=cfg.TRAIN.NUM_WORKER,
        collate_fn=train_collate_fn,
        pin_memory=True
    )

    val_dataset = ShapeNetDataset(cfg.TEST.DATASET_PORTION)
    val_collate_fn = ShapeNetCollateFn(train=False)
    val_loader = DataLoader(
        dataset=val_dataset,
        batch_size=cfg.CONST.BATCH_SIZE,
        shuffle=True,
        num_workers=1,
        collate_fn=val_collate_fn,
        pin_memory=True
    )

    net.cuda()

    # Generate the solver
    solver = Solver(net)

    # Train the network
    solver.train(train_loader, val_loader)
コード例 #15
0
ファイル: set1.py プロジェクト: Foryah/cryptopals
def heXor(smart_string_1, smart_string_2):
    bytes_1 = smart_string_1.get_bytes()
    bytes_2 = smart_string_2.get_bytes()

    if len(bytes_1) != len(bytes_2):
        raise ValueError("The two strings have different length")

    result = bytearray()

    for i in range(0, len(bytes_1)):
        result.append(bytes_1[i] ^ bytes_2[i])

    rs = Solver()
    rs.load_bytes(result)

    return rs
コード例 #16
0
        def analyze_fun():
            app_state.calibrating = True
            calibration_data = Solver().analyze_image(
                filepath,
                timeout,
                run_callback=lambda: app_state.calibrating,
            )
            app_state.calibrating = False

            timestamp = int(datetime.datetime.now().timestamp())

            if calibration_data is None:
                log_event('Calibration failed')
            else:
                rotation_angle = calibration_data.rotation_angle
                position = calibration_data.center_deg

                log_event(
                    f'Image center: {position} Rotation: {rotation_angle}')

                if app_state.target is not None:
                    target_distance = Coordinates(
                        app_state.target.ra - position.ra,
                        app_state.target.dec - position.dec)
                    log_event(f'Distance to target: {target_distance}')

                app_state.last_known_position = {
                    'timestamp': timestamp,
                    'position': position,
                }
コード例 #17
0
ファイル: train.py プロジェクト: yuhaonankaka/adl
def get_solver(args, dataloader, stamp):
    model = get_model(args)
    optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.wd)
    lang_cls_flag = not args.no_lang_cls
    solver = Solver(model, DC, dataloader, optimizer, stamp, args.val_step, lang_cls_flag, (not args.no_max_iou), args)
    num_params = get_num_params(model)

    return solver, num_params
コード例 #18
0
def demo(args):
    ''' Evaluate the network '''

    # Make a network and load weights
    NetworkClass = load_model(cfg.CONST.NETWORK_CLASS)
    print('Network definition: \n')
    print(inspect.getsource(NetworkClass.network_definition))
    net = NetworkClass(compute_grad=False)
    net.load(cfg.CONST.WEIGHTS)
    solver = Solver(net)

    # set up testing data process. We make only one prefetching process. The
    # process will return one batch at a time.
    queue = Queue(cfg.QUEUE_SIZE)
    data_pair = category_model_id_pair(dataset_portion=cfg.TEST.DATASET_PORTION)
    processes = make_data_processes(queue, data_pair, 1, repeat=False, train=False)
    num_data = len(processes[0].data_paths)
    num_batch = int(num_data / args.batch_size)

    # Get all test data
    batch_idx = 0
    for batch_img, batch_voxel in get_while_running(processes[0], queue):
        if batch_idx == num_batch:
            break

        pred, loss, activations = solver.test_output(batch_img, batch_voxel)

        if (batch_idx < args.exportNum):
            # Save the prediction to an OBJ file (mesh file).
            print('saving {}/{}'.format(batch_idx, args.exportNum - 1))
            voxel2obj('out/prediction_{}b_{}.obj'.format(args.batch_size, batch_idx),
                      pred[0, :, 1, :, :] > cfg.TEST.VOXEL_THRESH)
        else:
            break

        batch_idx += 1

    if args.file:
        # Use meshlab or other mesh viewers to visualize the prediction.
        # For Ubuntu>=14.04, you can install meshlab using
        # `sudo apt-get install meshlab`
        if cmd_exists('meshlab'):
            call(['meshlab', 'obj/{}.obj'.format(args.file)])
        else:
            print('Meshlab not found: please use visualization of your choice to view %s' %
                  args.file)
コード例 #19
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def test_break_known_key_length(self):
        smart_input = Solver()
        input_str = "SPEAKSOFTLYANDCARRYABIGSTICKYOUWILLGOFAR"
        smart_input.load_str(input_str)

        key = "SECRET"
        key_length = len(key)

        encripted_b64_str = "ABUGEw4HHAMXHhwVHQEAExcGCgQBGwIHBwwAGRwbBhIKHgkTHAMCAA=="

        encripted_smart_str = set1.repeating_key_xor_cipher_str(smart_input, key)
        encripted_result = encripted_smart_str.get_b64()

        self.assertEqual(encripted_result, encripted_b64_str)

        recovered_key = set1.recover_key_based_on_length(encripted_smart_str, key_length)

        self.assertEqual(recovered_key, key)
コード例 #20
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_hamming(self):
        smart_1 = Solver()
        str1 = "this is a test"
        smart_1.load_str(str1)

        smart_2 = Solver()
        str2 = "wokka wokka!!!"
        smart_2.load_str(str2)

        distance = set1.hamming_distance(smart_1, smart_2)
        self.assertEqual(distance, 37)
コード例 #21
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_challange2(self):
        s1 = Solver()
        str1 = "1c0111001f010100061a024b53535009181c"
        s1.load_hex(str1)

        s2 = Solver()
        str2 = "686974207468652062756c6c277320657965"
        s2.load_hex(str2)

        result = set1.heXor(s1, s2)
        hex_str = result.get_hex()
        self.assertEqual(hex_str, "746865206b696420646f6e277420706c6179")
コード例 #22
0
def render():
    # Save prediction into a file named 'prediction.obj' or the given argument
    pred_file_name = sys.argv[1] if len(
        sys.argv) > 1 else 'voxel/prediction.obj'

    demo_imgs, files = load_demo_images()
    NetClass = load_model('ResidualGRUNet')

    net = NetClass(compute_grad=False)  # instantiate a network
    net.load(DEFAULT_WEIGHTS)  # load downloaded weights
    solver = Solver(net)  # instantiate a solver

    voxel_prediction, _ = solver.test_output(demo_imgs)

    # Save the prediction to an OBJ file (mesh file).
    voxel2obj(pred_file_name,
              voxel_prediction[0, :, 1, :, :] > cfg.TEST.VOXEL_THRESH)

    return files, pred_file_name
コード例 #23
0
ファイル: inference.py プロジェクト: admshumar/3D_insight
def main():
    '''Main inference function'''
    # Download the input data
    input = get_input_data()

    # Download and load pretrained weights
    download_model(DEFAULT_WEIGHTS)

    # Use the default network model
    NetClass = load_model('ResidualGRUNet')

    # Define a network and a solver. Solver provides a wrapper for the test function.
    net = NetClass(compute_grad=False)  # instantiate a network
    net.load(DEFAULT_WEIGHTS)  # load downloaded weights
    solver = Solver(net)  # instantiate a solver

    for key, value in input.items():
        voxel_prediction, _ = solver.test_output(value[0:3])
        np.save(key, voxel_prediction)
コード例 #24
0
def train_net():

    # Set up the model and the solver
    my_net = My_ResidualGRUNet()

    # Generate the solver
    solver = Solver(my_net)

    # Load the global variables
    global train_queue, validation_queue, train_processes, val_processes

    # Initialize the queues
    train_queue = Queue(
        15)  # maximum number of minibatches that can be put in a data queue)
    validation_queue = Queue(15)

    # Train on 80 percent of the data
    train_dataset_portion = [0, 0.8]

    # Validate on 20 percent of the data
    test_dataset_portion = [0.8, 1]

    # Establish the training procesesses
    train_processes = make_data_processes(
        train_queue,
        category_model_id_pair(dataset_portion=train_dataset_portion),
        1,
        repeat=True)

    # Establish the validation procesesses
    val_processes = make_data_processes(
        validation_queue,
        category_model_id_pair(dataset_portion=test_dataset_portion),
        1,
        repeat=True,
        train=False)

    # Train the network
    solver.train(train_queue, validation_queue)

    # Cleanup the processes and the queue.
    kill_processes(train_queue, train_processes)
    kill_processes(validation_queue, val_processes)
コード例 #25
0
ファイル: demo.py プロジェクト: ymk1933230135/3D-R2N2-pytorch
def main():
    '''Main demo function'''
    # Save prediction into a file named 'prediction.obj' or the given argument
    global pred_file_name, demo_imgs
    if not cfg.TEST.MULTITEST or pred_file_name == '':
        pred_file_name = sys.argv[1] if len(sys.argv) > 1 else 'prediction.obj'

    # Download and load pretrained weights
    download_model(DEFAULT_WEIGHTS)

    # Use the default network model
    NetClass = load_model(MODEL_NAME)

    # print(NetClass)

    # Define a network and a solver. Solver provides a wrapper for the test function.
    net = NetClass()  # instantiate a network
    solver = Solver(net)  # instantiate a solver

    solver.load(DEFAULT_WEIGHTS)  # load pretrained weights
    # solver.graph_view(demo_imgs)
    # return
    # Run the network
    voxel_prediction, _ = solver.test_output(demo_imgs)
    # Save the prediction to an OBJ file (mesh file).
    # (self.batch_size, 2, n_vox, n_vox, n_vox)
    # print(type(voxel_prediction[0, :, 1, :, :].data.numpy()))
    # print(cfg.TEST.VOXEL_THRESH)
    voxel2obj(pred_file_name, voxel_prediction[0, 1, :, :, :].data.numpy() >
              cfg.TEST.VOXEL_THRESH)  # modified

    # Use meshlab or other mesh viewers to visualize the prediction.
    # For Ubuntu>=14.04, you can install meshlab using
    # `sudo apt-get install meshlab`
    print('writing voxel to %s' % (pred_file_name))
    if cfg.TEST.CALL_MESHLAB:  #需要打开meshlab
        if cmd_exists('meshlab'):
            call(['meshlab', pred_file_name])
        else:
            print(
                'Meshlab not found: please use visualization of your choice to view %s'
                % pred_file_name)
コード例 #26
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def test_break_known_key_length(self):
        smart_input = Solver()
        input_str = "SPEAKSOFTLYANDCARRYABIGSTICKYOUWILLGOFAR"
        smart_input.load_str(input_str)

        key = "SECRET"
        key_length = len(key)

        encripted_b64_str = "ABUGEw4HHAMXHhwVHQEAExcGCgQBGwIHBwwAGRwbBhIKHgkTHAMCAA=="

        encripted_smart_str = set1.repeating_key_xor_cipher_str(
            smart_input, key)
        encripted_result = encripted_smart_str.get_b64()

        self.assertEqual(encripted_result, encripted_b64_str)

        recovered_key = set1.recover_key_based_on_length(
            encripted_smart_str, key_length)

        self.assertEqual(recovered_key, key)
コード例 #27
0
def get_solver(args, dataloader, stamp, weight, is_wholescene):
    sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'pointnet2/'))
    Pointnet = importlib.import_module("pointnet2_msg_semseg")

    model = Pointnet.get_model(num_classes=21).cuda()
    num_params = get_num_params(model)
    criterion = WeightedCrossEntropyLoss()
    optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.wd)
    solver = Solver(model, dataloader, criterion, optimizer, args.batch_size, stamp, is_wholescene)

    return solver, num_params
コード例 #28
0
def demo(args):
    ''' Evaluate the network '''

    # Make a network and load weights
    NetworkClass = load_model(cfg.CONST.NETWORK_CLASS)
    print('Network definition: \n')
    print(inspect.getsource(NetworkClass.network_definition))
    net = NetworkClass(compute_grad=False)
    net.load(cfg.CONST.WEIGHTS)
    solver = Solver(net)

    # set up testing data process. We make only one prefetching process. The
    # process will return one batch at a time.
    queue = Queue(cfg.QUEUE_SIZE)
    data_pair = category_model_id_pair(
        dataset_portion=cfg.TEST.DATASET_PORTION)
    processes = make_data_processes(queue,
                                    data_pair,
                                    1,
                                    repeat=False,
                                    train=False)
    num_data = len(processes[0].data_paths)
    num_batch = int(num_data / args.batch_size)

    # Get all test data
    batch_idx = 0
    for batch_img, batch_voxel in get_while_running(processes[0], queue):
        if batch_idx == num_batch:
            break

        pred, loss, activations = solver.test_output(batch_img, batch_voxel)

        if (batch_idx < args.exportNum):
            # Save the prediction to an OBJ file (mesh file).
            print('saving {}/{}'.format(batch_idx, args.exportNum - 1))
            # voxel2obj('out/prediction_{}b_{}.obj'.format(args.batch_size, batch_idx),
            #           pred[0, :, 1, :, :] > cfg.TEST.VOXEL_THRESH)
        else:
            break

        batch_idx += 1
コード例 #29
0
def main():
    '''Main demo function'''

    # load images
    input_folder_name = sys.argv[1] if len(sys.argv) > 1 else 'in_demo'
    imgs = load_input_images(input_folder_name)

    # get output file name from command line argument
    output_file_name = sys.argv[2] if len(sys.argv) > 2 else 'demo.obj'

    # use the default network model
    NetClass = load_model('ResidualGRUNet')
    net = NetClass(compute_grad=False)
    net.load('output/ResidualGRUNet/default_model/weights.npy')
    solver = Solver(net)

    # run the network
    voxel_prediction, _ = solver.test_output(imgs)

    # save the prediction to an OBJ (mesh) file
    voxel2obj(output_file_name,
              voxel_prediction[0, :, 1, :, :] > cfg.TEST.VOXEL_THRESH)
コード例 #30
0
def main():
    '''Main demo function'''
    # Save prediction into a file named 'prediction.obj' or the given argument
    pred_file_name = sys.argv[1] if len(sys.argv) > 1 else 'prediction.obj'

    # load images
    demo_imgs = load_demo_images()

    # Use the default network model
    NetClass = load_model('ResidualGRUNet')

    # Define a network and a solver. Solver provides a wrapper for the test function.
    net = NetClass()  # instantiate a network
    if torch.cuda.is_available():
        net.cuda()

    net.eval()

    solver = Solver(net)  # instantiate a solver
    solver.load(DEFAULT_WEIGHTS)

    # Run the network
    voxel_prediction, _ = solver.test_output(demo_imgs)
    voxel_prediction = voxel_prediction.detach().cpu().numpy()

    # Save the prediction to an OBJ file (mesh file).
    voxel2obj(pred_file_name, voxel_prediction[0, 1] > cfg.TEST.VOXEL_THRESH)

    # Use meshlab or other mesh viewers to visualize the prediction.
    # For Ubuntu>=14.04, you can install meshlab using
    # `sudo apt-get install meshlab`
    if cmd_exists('meshlab'):
        call(['meshlab', pred_file_name])
    else:
        print(
            'Meshlab not found: please use visualization of your choice to view %s'
            % pred_file_name)
コード例 #31
0
def get_solver(args, dataset, dataloader):
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    model = get_model(args, dataset["train"], device)
    optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.wd)

    if args.use_checkpoint:
        print("loading checkpoint {}...".format(args.use_checkpoint))
        stamp = args.use_checkpoint
        root = os.path.join(CONF.PATH.OUTPUT, stamp)
        checkpoint = torch.load(os.path.join(CONF.PATH.OUTPUT, args.use_checkpoint, "checkpoint.tar"))
        model.load_state_dict(checkpoint["model_state_dict"])
        optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
    else:
        stamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
        if args.tag: stamp += "_"+args.tag.upper()
        root = os.path.join(CONF.PATH.OUTPUT, stamp)
        os.makedirs(root, exist_ok=True)

    # scheduler parameters for training solely the detection pipeline
    LR_DECAY_STEP = [80, 120, 160] if args.no_caption else None
    LR_DECAY_RATE = 0.1 if args.no_caption else None
    BN_DECAY_STEP = 20 if args.no_caption else None
    BN_DECAY_RATE = 0.5 if args.no_caption else None

    solver = Solver(
        model=model, 
        device=device,
        config=DC, 
        dataset=dataset,
        dataloader=dataloader, 
        optimizer=optimizer, 
        stamp=stamp, 
        val_step=args.val_step,
        detection=not args.no_detection,
        caption=not args.no_caption, 
        orientation=args.use_orientation,
        distance=args.use_distance,
        use_tf=args.use_tf,
        report_ap=args.report_ap,
        lr_decay_step=LR_DECAY_STEP,
        lr_decay_rate=LR_DECAY_RATE,
        bn_decay_step=BN_DECAY_STEP,
        bn_decay_rate=BN_DECAY_RATE,
        criterion=args.criterion
    )
    num_params = get_num_params(model)

    return solver, num_params, root
コード例 #32
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_hamming(self):
        smart_1 = Solver()
        str1 = "this is a test"
        smart_1.load_str(str1)

        smart_2 = Solver()
        str2 = "wokka wokka!!!"
        smart_2.load_str(str2)

        distance = set1.hamming_distance(smart_1, smart_2)
        self.assertEqual(distance, 37)
コード例 #33
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_challange2(self):
        s1 = Solver()
        str1 = "1c0111001f010100061a024b53535009181c"
        s1.load_hex(str1)

        s2 = Solver()
        str2 = "686974207468652062756c6c277320657965"
        s2.load_hex(str2)

        result = set1.heXor(s1, s2)
        hex_str = result.get_hex()
        self.assertEqual(hex_str, "746865206b696420646f6e277420706c6179")
コード例 #34
0
def get_solver(args, dataloader):
    model = get_model(args)
    optimizer = optim.Adam(model.parameters(),
                           lr=args.lr,
                           weight_decay=args.wd)

    if args.use_checkpoint:
        print("loading checkpoint {}...".format(args.use_checkpoint))
        stamp = args.use_checkpoint
        root = os.path.join(CONF.PATH.OUTPUT, stamp)
        checkpoint = torch.load(
            os.path.join(CONF.PATH.OUTPUT, args.use_checkpoint,
                         "checkpoint.tar"))
        model.load_state_dict(checkpoint["model_state_dict"])
        optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
    else:
        stamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
        if args.tag: stamp += "_" + args.tag.upper()
        root = os.path.join(CONF.PATH.OUTPUT, stamp)
        os.makedirs(root, exist_ok=True)

    # scheduler parameters for training solely the detection pipeline
    LR_DECAY_STEP = [80, 120, 160] if args.no_reference else None
    LR_DECAY_RATE = 0.1 if args.no_reference else None
    BN_DECAY_STEP = 20 if args.no_reference else None
    BN_DECAY_RATE = 0.5 if args.no_reference else None

    solver = Solver(  #TODO: Solver(
        model=model,
        config=DC,
        dataloader=dataloader,
        optimizer=optimizer,
        stamp=stamp,
        val_step=args.val_step,
        detection=not args.no_detection,
        reference=not args.no_reference,
        use_lang_classifier=not args.no_lang_cls,
        lr_decay_step=LR_DECAY_STEP,
        lr_decay_rate=LR_DECAY_RATE,
        bn_decay_step=BN_DECAY_STEP,
        bn_decay_rate=BN_DECAY_RATE,
        prepare_epochs=args.prepare_epochs)
    num_params = get_num_params(model)

    return solver, num_params, root
コード例 #35
0
ファイル: sparse_l0_hals.py プロジェクト: svandenhaute/nmf
 def _project_to_l0(self, H):
     '''
     projects matrix H to given l0 norm
     '''
     shape = H.shape
     n = np.prod(H.shape)
     to_zero = int(np.ceil((Solver.get_nonzeros(H) - self.l0) * n))
     if to_zero > 0:
         vec = H.flatten()
         vec_argsort = vec.argsort()
         vec1 = vec.copy()
         vec.sort()
         nonzero_ = np.nonzero(vec > 1e-12)[0][0]
         first_nonzero = vec_argsort[np.nonzero(vec > 1e-12)[0][0]]
         indices = vec_argsort[nonzero_:nonzero_ + to_zero]
         vec1[indices] = 0
         H = np.reshape(vec1, shape)
     return H
コード例 #36
0
ファイル: test_set1.py プロジェクト: Foryah/cryptopals
    def _test_challange1(self):
        s = Solver()
        s.load_hex(
            "49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d"
        )

        bin_str = s.get_str()
        self.assertEqual(bin_str,
                         "I'm killing your brain like a poisonous mushroom")

        b64_str = s.get_b64()
        self.assertEqual(
            b64_str,
            "SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t")
コード例 #37
0
ファイル: set1.py プロジェクト: Foryah/cryptopals
    # 2. Try to decript using the best decryption
    decrypted_keys = []
    for letter_substring in letters:
        smart_str = Solver()
        smart_str.load_str(letter_substring)

        best_dict = single_byte_xor_cipher_str(smart_str)
        print(best_dict)
        key = best_dict['key']

        decrypted_keys.append(key)

    # 3. Construct the key
    final_key = ""
    for key in decrypted_keys:
        final_key += chr(key)

    # 4. Retrun the key
    return final_key

initial_str = "This string is to short to be good"
initial_smart = Solver()
initial_smart.load_str(initial_str)

key = "Boy"

encrypted_smart = repeating_key_xor_cipher_str(initial_smart, key)

recovered_key = recover_key_based_on_length(encrypted_smart, len(key))
print(recovered_key)