def __init__(self, image_feature_datasets): image_feature_datasets = sorted(image_feature_datasets) self.image_feature_datasets = image_feature_datasets self.mean_pooled_feature_stores = [paths.mean_pooled_feature_store_paths[dataset] for dataset in image_feature_datasets] self.feature_dim = MeanPooledImageFeatures.MEAN_POOLED_DIM * len(image_feature_datasets) print('Loading image features from %s' % ', '.join(self.mean_pooled_feature_stores)) tsv_fieldnames = ['scanId', 'viewpointId', 'image_w','image_h', 'vfov', 'features'] self.features = defaultdict(list) for mpfs in self.mean_pooled_feature_stores: with open(mpfs, "rt") as tsv_in_file: reader = csv.DictReader(tsv_in_file, delimiter='\t', fieldnames = tsv_fieldnames) for item in reader: assert int(item['image_h']) == ImageFeatures.IMAGE_H assert int(item['image_w']) == ImageFeatures.IMAGE_W assert int(item['vfov']) == ImageFeatures.VFOV long_id = self._make_id(item['scanId'], item['viewpointId']) features = np.frombuffer(decode_base64(item['features']), dtype=np.float32).reshape((ImageFeatures.NUM_VIEWS, ImageFeatures.MEAN_POOLED_DIM)) self.features[long_id].append(features) assert all(len(feats) == len(self.mean_pooled_feature_stores) for feats in self.features.values()) self.features = { long_id: np.concatenate(feats, axis=1) for long_id, feats in self.features.items() }
def __init__(self, image_feature_file, device): print('Loading image features from %s' % image_feature_file) tsv_fieldnames = [ 'scanId', 'viewpointId', 'image_w', 'image_h', 'vfov', 'features' ] self.device = device not_found_features = np.zeros((NUM_VIEWS + 1, MEAN_POOLED_DIM), dtype=np.float32) self.features = defaultdict(lambda: not_found_features) with open(image_feature_file, "rt") as tsv_in_file: reader = csv.DictReader(tsv_in_file, delimiter='\t', fieldnames=tsv_fieldnames) for item in reader: assert int(item['image_h']) == IMAGE_H assert int(item['image_w']) == IMAGE_W assert int(item['vfov']) == VFOV long_id = self._make_id(item['scanId'], item['viewpointId']) features = np.frombuffer(utils.decode_base64(item['features']), dtype=np.float32).reshape( (NUM_VIEWS, MEAN_POOLED_DIM)) no_look_feature = np.zeros((1, MEAN_POOLED_DIM), dtype=np.float32) features = np.concatenate((features, no_look_feature), axis=0) self.features[long_id] = features
def handle_frame(data): print("Got Frame") start = timeit.default_timer() image = decode_base64(data['frame']) image = substract_background(img=image) boxes, scores = hand_detector.get_boxes(image, data["threshold"]) if len(boxes) > 0: boxes, scores = filter_small_boxes(boxes, scores, 0.2) print(f"Found {len(boxes)} hands, with max score of {max(scores or [0])}") emit("box", { 'boxes': boxes, 'scores': scores }) # Send the client the box to show print(f"Finished processing frame in {timeit.default_timer() - start}sec")
def get_allowed_service(token): """ Parses the authorization token, returning the service to be used when configuring the FIWARE backend :param token: JWT token to be parsed :returns: Fiware-service to be used on API calls :raises ValueError: for invalid token received """ if not token or len(token) == 0: raise ValueError("Invalid authentication token") payload = token.split('.')[1] try: data = json.loads(decode_base64(payload)) return data['service'] except Exception as ex: raise ValueError( "Invalid authentication token payload - not json object", ex)
from utils import decode_base64, hex_to_bytes, byte_byte_ecb_break unknown_string = b'Um9sbGluJyBpbiBteSA1LjAKV2l0aCBteSByYWctdG9wIGRvd24gc28gbXkgaGFpciBjYW4gYmxvdwpUaGUgZ2lybGllcy' \ b'BvbiBzdGFuZGJ5IHdhdmluZyBqdXN0IHRvIHNheSBoaQpEaWQgeW91IHN0b3A/IE5vLCBJIGp1c3QgZHJvdmUgYnkK' hex_decoded = hex_to_bytes(decode_base64(unknown_string)) print(byte_byte_ecb_break(unknown_str=hex_decoded, hard=True)) assert (byte_byte_ecb_break(unknown_str=hex_decoded, hard=True) == hex_decoded)
from utils import repeated_key_xor_breaker, decode_base64 from resources import RESOURCES_PATH with open(RESOURCES_PATH + '6.txt', 'r') as f: lines = f.readlines() encrypted_data = decode_base64(b''.join( bytes(line.strip(), 'utf-8') for line in lines)) decrypted_text, key = repeated_key_xor_breaker(encrypted_data) print(decrypted_text.decode('utf-8'), key) assert (key == b'Terminator X: Bring the noise')
def test_decode64(self): data = 'dGhpcyBpcyBhIHRlc3Q=' expected = 'this is a test' res = utils.decode_base64(data) self.assertEqual(expected, res)
from utils import AesCbc, decode_base64, hex_to_bytes from resources import RESOURCES_PATH with open(RESOURCES_PATH + '10.txt', 'r') as f: lines = f.readlines() key = b'YELLOW SUBMARINE' iv = b'\x00' * 16 encrypted_data = hex_to_bytes( decode_base64(b''.join(bytes(line.strip(), 'utf-8') for line in lines))) aes = AesCbc(key=key, iv=iv) print(aes.decrypt(cipher_text=encrypted_data).decode('utf-8')) assert (aes.decrypt( cipher_text=encrypted_data).decode('utf-8').startswith('I\'m back and'))
async def b64decode(ctx, *text): res = utils.decode_base64(' '.join(text)) await bot.say(res)