Beispiel #1
0
 def loop(self):
     topic = self.socket.recv()
     if topic == b"ping":
         self.socket.send(b"Hello, World!")
     if topic == b"login":
         args = unpack(self.socket.recv())
         res = self.login(**args)
         self.socket.send(pack(res))
     if topic == b"logout":
         self.logout()
         self.socket.send(b"ok")
     if topic == b"shutdown":
         self.done = True
     if topic == b"query":
         tr, args = unpack(self.socket.recv())
         try:
             res = Query(tr).send(**args)
             self.socket.send(b"ok", zmq.SNDMORE)
             self.socket.send(pack(res))
         except Exception as e:
             log.critical(e.args)
             self.socket.send(b"error", zmq.SNDMORE)
             self.socket.send(pack(e.args))
     else:
         pass
Beispiel #2
0
 def loop(self):
     topic = self.socket.recv()
     if topic == b"ping":
         self.socket.send(b"Hello, World!")
     if topic == b"login":
         args = unpack(self.socket.recv())
         res = self.login(**args)
         self.socket.send(pack(res))
     if topic == b"logout":
         self.logout()
         self.socket.send(b"ok")
     if topic == b"shutdown":
         self.done = True
     if topic == b"query":
         tr, args = unpack(self.socket.recv())
         try:
             res = Query(tr).send(**args)
             self.socket.send(b"ok", zmq.SNDMORE)
             self.socket.send(pack(res))
         except Exception as e:
             log.critical(e.args)
             self.socket.send(b"error", zmq.SNDMORE)
             self.socket.send(pack(e.args))
     else:
         pass
Beispiel #3
0
def verify_attached(message):
    input = io.BytesIO(message)
    output = io.BytesIO()
    public_key, header_hash, major_version = read_header(input)

    packetnum = 0
    while True:
        payload_packet = umsgpack.unpack(input)
        debug("payload packet:", json_repr(payload_packet))
        final_flag = False
        if major_version == 1:
            [detached_payload_sig, chunk, *_] = payload_packet
        else:
            [final_flag, detached_payload_sig, chunk, *_] = payload_packet
        packetnum_64 = packetnum.to_bytes(8, 'big')
        debug("packet number:", packetnum_64)
        if major_version == 1:
            final_flag_byte = b""
        else:
            final_flag_byte = b"\x01" if final_flag else b"\x00"
        payload_digest = hashlib.sha512(header_hash + packetnum_64 +
                                        final_flag_byte + chunk).digest()
        debug("digest:", payload_digest)
        payload_sig_text = b"saltpack attached signature\0" + payload_digest
        payload_sig = detached_payload_sig + payload_sig_text
        nacl.bindings.crypto_sign_open(payload_sig, public_key)
        output.write(chunk)
        if chunk == b"" or final_flag:
            break
        packetnum += 1

    verified_message = output.getvalue()
    return verified_message
Beispiel #4
0
def loadEofs(fin):
  """ Loads flattened Breeze eofs stored by msgpack from a given file handle.
  Note that Breeze and Numpy use different major order for storing matrices """
  numcols = umsgpack.unpack(fin)
  numrows = umsgpack.unpack(fin)
  listofvals = umsgpack.unpack(fin)
  u = np.array(listofvals).reshape(numcols, len(listofvals)/numcols)
  U = np.matrix(u.T)

  listofvals = umsgpack.unpack(fin)
  v = np.array(listofvals).reshape(numcols, len(listofvals)/numcols)
  V = np.matrix(v.T)

  s = np.array(umsgpack.unpack(fin))

  return [U, s, V]
def main(args):
    prediction_files = []
    gt_files = []
    seq_ids = []
    seq = {}
    count_seq = 0
    for _file in listdir(args.gt_dir):
        if 'metadata' in _file:
            continue
        elif _file.split('_')[0] not in seq:
            seq[_file.split('_')[0]] = count_seq * 1000000
            count_seq += 1

        gt_files.append(path.join(args.gt_dir, _file))
        seq_ids.append(_file.split('_')[0])
        if path.exists(path.join(args.prediction_dir, _file)):
            prediction_files.append(path.join(args.prediction_dir, _file))
        else:
            print('prediction doesn\'t exist for {}'.format(
                path.join(args.prediction_dir, _file)))

    with open(path.join(args.gt_dir, "metadata.bin"), "rb") as fid:
        meta = umsgpack.unpack(fid, encoding="utf-8")

    num_stuff = meta['num_stuff']
    num_classes = meta['num_stuff'] + meta['num_thing']
    panoptic_buffer = torch.zeros(4, num_classes, dtype=torch.double)
    seq_trajectories = defaultdict(list)
    class_trajectories = defaultdict(list)
    iou_trajectories = defaultdict(list)

    #Accumulation
    count = 0
    for gt, prediction, seq_id in zip(gt_files, prediction_files, seq_ids):
        gt_i = np.load(gt)
        prediction_i = np.load(prediction)
        msk_gt, cat_gt, track_gt = get_processing_format(
            gt_i, num_stuff, seq[seq_id])
        msk_pred, cat_pred, track_pred = get_processing_format(
            prediction_i, num_stuff, seq[seq_id])
        iou, tp, fp, fn, seq_trajectories, class_trajectories, iou_trajectories = panoptic_compute(
            msk_gt, cat_gt, track_gt, seq_trajectories, class_trajectories,
            iou_trajectories, msk_pred, cat_pred, track_pred, num_classes,
            num_stuff)
        panoptic_buffer += torch.stack((iou, tp, fp, fn), dim=0)
        count += 1
        print("\rFiles Processed: {}".format(count), end=' ')
        sys.stdout.flush()
    print()
    MOTSA, sMOTSA, MOTSP, PTQ, sPTQ = get_MOTSP_metrics(
        panoptic_buffer, num_stuff, seq_trajectories, class_trajectories,
        iou_trajectories)
    print_results(
        {
            'MOTSA': MOTSA,
            'sMOTSA': sMOTSA,
            'MOTSP': MOTSP,
            'PTQ': PTQ,
            'sPTQ': sPTQ
        }, meta)
def verify_attached(message):
    input = io.BytesIO(message)
    output = io.BytesIO()
    public_key, header_hash = read_header(input)

    packetnum = 0
    while True:
        payload_packet = umsgpack.unpack(input)
        debug("payload packet:", json_repr(payload_packet))
        [detached_payload_sig, chunk, *_] = payload_packet
        packetnum_64 = packetnum.to_bytes(8, 'big')
        debug("packet number:", packetnum_64)
        payload_digest = hashlib.sha512(
            header_hash + packetnum_64 + chunk).digest()
        debug("digest:", payload_digest)
        payload_sig_text = b"saltpack attached signature\0" + payload_digest
        payload_sig = detached_payload_sig + payload_sig_text
        nacl.bindings.crypto_sign_open(payload_sig, public_key)
        if chunk == b"":
            break
        output.write(chunk)
        packetnum += 1

    verified_message = output.getvalue()
    return verified_message
Beispiel #7
0
    def test_objects_comitted(self):
        """Do a backup and then assert the objects actually get committed to
        the backing store"""
        self.create_file("dir/file1", "file contents")
        self.backathon.scan()
        self.backathon.backup()

        for obj in self.object.all():
            # Assert that an object file exists in the backing store and is
            # named properly. In particular, make sure we're naming them with
            # the hex representation of the objid
            obj_filepath = pathlib.Path(
                self.datadir,
                "objects",
                obj.objid.hex()[:3],
                obj.objid.hex(),
            )
            self.assertTrue(obj_filepath.is_file())

            remote_payload = self.repo.get_object(obj.objid)

            self.assertEqual(
                obj.type,
                umsgpack.unpack(util.BytesReader(remote_payload))
            )
Beispiel #8
0
    def test_objects_comitted(self):
        """Do a backup and then assert the objects actually get committed to
        the backing store"""
        self.create_file("dir/file1", "file contents")
        scan.scan()
        backup.backup()
        ds = datastore.default_datastore

        for obj in models.Object.objects.all():
            # Assert that an object file exists in the backing store and is
            # named properly. In particular, make sure we're naming them with
            # the hex representation of the objid
            obj_filepath = pathlib.Path(
                self.datadir,
                "objects",
                obj.objid.hex()[:3],
                obj.objid.hex(),
            )
            self.assertTrue(obj_filepath.is_file())

            cached_payload = obj.payload
            remote_payload = ds.get_object(obj.objid)

            if cached_payload is None:
                # Only blob type objects don't have a cached payload
                self.assertEqual(
                    "blob", umsgpack.unpack(util.BytesReader(remote_payload)))
            else:
                self.assertEqual(
                    cached_payload,
                    remote_payload,
                )
Beispiel #9
0
    def _parse_legacy_file(self, fp):
        state = umsgpack.unpack(fp)

        if PY3 and b'py_version' in state:
            for key in list(state.keys()):
                state[key.decode("ASCII")] = state[key]
                del state[key]

        version = state["version"]
        py_version = state["py_version"]

        fails = set()

        if version >= 0x0200:
            self.config.warn("W1",
                             "test expectation file in unsupported version")
        elif version >= 0x0100:
            xfail = state["expect_xfail"]
            for s in xfail:
                fails.add(s)
                if PY3 and py_version == 2 and isinstance(s, binary_type):
                    fails.add(s.decode('latin1'))
                elif PY2 and py_version == 3 and isinstance(s, text_type):
                    try:
                        fails.add(s.encode("latin1"))
                    except UnicodeEncodeError:
                        pass
        else:
            self.config.warn("W1",
                             "test expectation file in unsupported version")
        return fails
Beispiel #10
0
def main(stdscr):
    dev = sys.argv[1] if len(sys.argv) > 1 else '/dev/ttyUSB0'

    stdscr = curses.initscr()
    curses.curs_set(0)
    y = 0
    for l in COREY.split('\n'):
        stdscr.addstr(y, 0, l)
        y += 1
    stdscr.box(0, 0)
    stdscr.refresh()
    win = curses.newwin(curses.LINES // 2 - 2, curses.COLS - 2,
                        curses.LINES // 2 + 1, 1)
    win.scrollok(True)
    win.addstr(1, 10, '')
    win.refresh()

    found_cores = []
    with serial.Serial(dev, 57600) as ser:
        while (True):
            u = umsgpack.unpack(ser)
            if type(u) == str:
                (y, x) = win.getyx()
                win.addstr(curses.LINES // 2 - 3, 6, u[0:-1])
                win.refresh()
                n = int(u[5:10])
                if not (n in found_cores):
                    found_cores.append(n)
                    stdscr.addstr(10, 35,
                                  "Found {} cores".format(len(found_cores)))
                    stdscr.refresh()
    def from_file(self, infile):
        """Load data from given file into the instance

        :returns: None
        """
        hashed_blocks = umsgpack.unpack(infile)
        self._from_hashed_blocks(hashed_blocks)
Beispiel #12
0
def register_data(path, prefix='yeast_cells_'):
    """Register all data sets as {prefix}_{setname}, i.e. yeast_cells_train"""
    assert (
        os.path.exists(f'{path}/labels.umsgpack')
        or os.path.exists(f'{path}/labels.json')), (
            "Labels not found, ensure either labels.umsgpack or labels.json "
            f"exists at {path}.")

    if os.path.exists(f'{path}/labels.umsgpack'):
        with open(f'{path}/labels.umsgpack', 'rb') as f:
            labels = umsgpack.unpack(f, encoding="utf-8")
    else:
        with open(f'{path}/labels.json', 'r') as f:
            labels = json.load(f)

    labels = validate_labels(labels, path)

    DatasetCatalog.clear()
    for label in labels:
        DatasetCatalog.register(f"{prefix}{label}",
                                lambda label_=label: labels[label_])
        MetadataCatalog.get(f"{prefix}{label}").set(
            thing_classes=["yeast_cell"])

    # yeast_cells_metadata = MetadataCatalog.get(f"{prefix}train")
    return labels
Beispiel #13
0
def main():
    args = get_args()
    with open(args['input'], 'rb') as f:
        payload = umsgpack.unpack(f)

    keys = ['lobby', 'rule', 'map', 'weapon', 'result', 'kill', 'death',
            'rank', 'rank_exp', 'rank_after', 'rank_exp_after', 'link_url']
    for key in keys:
        value = args.get(key)
        if not value:
            continue
        if value == 'DELETE':
            prev_value = payload.pop(key)
        else:
            prev_value = payload.get(key, '')
            payload[key] = args[key]
        print('Modified %s : %s -> %s' % (key, str(prev_value), str(value)))

    # Gears. Primary ability is only supported.
    gear_keys = ['clothing', 'headgear', 'shoes']
    for key in gear_keys:
        value = args.get(key)
        if not value:
            continue
        if value == 'DELETE':
            prev_value = payload['gears'].pop(key)
        else:
            prev_value = payload['gears'][key].get('primary_ability', '')
            payload['gears'][key]['primary_ability'] = args[key]
        print('Modified %s : %s -> %s' % (key, str(prev_value), str(value)))

    output = args.get('output') or args['input']
    with open(output, 'wb') as f:
        umsgpack.pack(payload, f)
Beispiel #14
0
def deserialize_repo(filename: str) -> Repository:
    with open(filename, 'rb') as fp:
        repo_data = umsgpack.unpack(fp)

    func_callees = dict(
        map(lambda x: (x[b'id'], x[b'callees']), repo_data[b'funcs'].values()))
    funcs = dict(
        map(
            lambda x:
            (x[b'id'], Function(x[b'id'], x[b'name'], np.array(x[b'vec']))),
            repo_data[b'funcs'].values()))
    for fn in funcs.values():
        for callee_id in func_callees[fn.id()]:
            fn.add_callee(funcs[callee_id])

    repo = Repository()
    for program_data in repo_data[b'programs']:
        program = Program(program_data[b'name'],
                          ProgramTag(program_data[b'label']))
        for fid in program_data[b'entries']:
            program.add_entry(funcs[fid])

        repo.add_program(program)

    return repo
Beispiel #15
0
def main():
    args = get_args()
    f = open(args['payload'], 'rb')
    payload = umsgpack.unpack(f)
    f.close()
    url = 'https://stat.ink/api/v1/battle'
    api_key = (args['api_key'] or IkaConfig.OUTPUT_ARGS['StatInk']['api_key'])
    error, response = UploadToStatInk(payload, api_key, url, args['video_id'])
    print(response.get('url'))
Beispiel #16
0
 def _recv_pack(self):
     try:
         self._pack = msgpack.unpack(self._io)
         return True
     except msgpack.UnpackException as e:
         print("Unpack failure", e)
         return False
     except Exception as e:
         print("Receive package failure", e)
         return False
 def login(self, **kwargs):
     assert all(key in kwargs for key in ("id", "passwd"))
     assert all(isinstance(val, str) for val in kwargs.values())
     self.socket.send(b"login", zmq.SNDMORE)
     self.socket.send(pack(kwargs))
     res = unpack(self.socket.recv())
     if not res:
         log.info("login falied")
     else:
         log.info("login ok")
     return res
Beispiel #18
0
def init_asm2vec(model_file_name: str) -> None:
    logging.info('Initializing asm2vec from file "%s"', model_file_name)

    memento = asm2vec.model.Asm2VecMemento()
    with open(model_file_name, 'rb') as fp:
        memento_data = umsgpack.unpack(fp)
    memento.populate(memento_data)

    global _model
    _model = asm2vec.model.Asm2Vec()
    _model.set_memento(memento)
 def query(self, tr, **kwargs):
     if not kwargs:
         kwargs = {}
     self.socket.send(b"query", zmq.SNDMORE)
     self.socket.send(pack([tr, kwargs]))
     res = self.socket.recv()
     msg = unpack(self.socket.recv())
     if res == b"ok":
         return msg
     else:
         return None
Beispiel #20
0
    def _load_split(self):
        with open(path.join(self.root_dir, ISSDataset._METADATA_FILE), "rb") as fid:
            metadata = umsgpack.unpack(fid, encoding="utf-8")

        with open(path.join(self._lst_dir, self.split_name + ".txt"), "r") as fid:
            lst = fid.readlines()
        lst = set(line.strip() for line in lst)

        meta = metadata["meta"]
        images = [img_desc for img_desc in metadata["images"] if img_desc["id"] in lst]

        return meta, images
def verify_detached(message, signature):
    input = io.BytesIO(signature)
    public_key, header_hash = read_header(input)

    detached_message_sig = umsgpack.unpack(input)
    debug("sig:", detached_message_sig)
    message_digest = hashlib.sha512(header_hash + message).digest()
    debug("digest:", message_digest)
    message_sig_text = b"saltpack detached signature\0" + message_digest
    message_sig = detached_message_sig + message_sig_text
    nacl.bindings.crypto_sign_open(message_sig, public_key)
    return message
Beispiel #22
0
def extract_data():
    """Get data dict from database.

    :return: data dict.
    """
    data = {}
    try:
        fh = open(database)
        data = umsgpack.unpack(fh)
        fh.close()
    except IOError:
        pass
    return data
Beispiel #23
0
 def load(self, path, area=np.empty(0)):
     serialized_weights = {}
     with open(path, 'rb') as f:
         serialized_weights = umsgpack.unpack(f)
     for worm_weights in serialized_weights:
         worm_position = worm_weights.pop('_position')
         if area:
             pos_id = npr.randint(len(area[0]))
             worm_position = (area[1][pos_id], area[0][pos_id],
                              npr.randint(4))
         for weight_name in worm_weights:
             worm_weights[weight_name] = torch.Tensor(
                 worm_weights[weight_name])
         self.emplace_worm(worm_position[0], worm_position[1],
                           worm_position[2], worm_weights)
Beispiel #24
0
    def unpack_payload(payload):
        """Returns an iterator over a payload, iterating over the msgpacked
        objects within

        This exists as a static method since callers may need to call it
        without wanting to load in into an Object instance
        """
        buf = util.BytesReader(payload)
        try:
            while True:
                try:
                    yield umsgpack.unpack(buf)
                except umsgpack.InsufficientDataException:
                    return
        finally:
            buf.close()
Beispiel #25
0
def unpack_payload(payload):
    """Returns an iterator over a payload, iterating over the msgpacked
    objects within

    :param payload: A byte-like object

    """
    buf = util.BytesReader(payload)
    try:
        while True:
            try:
                yield umsgpack.unpack(buf)
            except umsgpack.InsufficientDataException:
                return
    finally:
        buf.close()
Beispiel #26
0
def print_statink(filepath):
    with open(filepath, 'rb') as data:
        payload = umsgpack.unpack(data)

    if 'image_result' in payload:
        payload['image_result'] = '(PNG Data)'

    if 'image_judge' in payload:
        payload['image_judge'] = '(PNG Data)'

    if 'image_gear' in payload:
        payload['image_gear'] = '(PNG Data)'

    if 'events' in payload:
        payload['events'] = '(Events)'

    pprint.pprint(payload)
def read_header(stream):
    header_bytes = umsgpack.unpack(stream)
    header_hash = hashlib.sha512(header_bytes).digest()
    header = umsgpack.unpackb(header_bytes)
    debug("header packet:", json_repr(header))
    debug("header hash:", json_repr(header_hash))
    [
        format_name,
        [major_version, minor_version],
        mode,
        public_key,
        nonce,
        *_,  # ignore additional elements
    ] = header
    if format_name != "saltpack":
        raise error.BadFormatError(
            "Unrecognized format name: '{}'".format(format_name))
    if major_version != 1:
        raise error.BadVersionError(
            "Incompatible major version: {}".format(major_version))
    return public_key, header_hash
Beispiel #28
0
def main():
    args = get_args()
    with open(args['input'], 'rb') as f:
        payload = umsgpack.unpack(f)

    keys = ['lobby', 'rule', 'map', 'weapon', 'result', 'kill', 'death',
            'rank', 'rank_exp', 'rank_after', 'rank_exp_after', 'link_url']
    for key in keys:
        value = args.get(key)
        if not value:
            continue
        if value == 'DELETE':
            prev_value = payload.pop(key)
        else:
            prev_value = payload.get(key, '')
            payload[key] = args[key]
        print('Modified %s : %s -> %s' % (key, str(prev_value), str(value)))

    output = args.get('output') or args['input']
    with open(output, 'wb') as f:
        umsgpack.pack(payload, f)
Beispiel #29
0
    def _replay(cls, fp):
        fp.seek(0, os.SEEK_END)
        end = fp.tell()
        fp.seek(0, os.SEEK_SET)

        magic = fp.read(4)
        if magic != 'BUK0':
            raise BucketFileError('magic mismatch', 0)
        while True:
            position = fp.tell()
            if position >= end:
                break
            try:
                flag, key, _ = umsgpack.unpack(fp)
                if flag not in [cls.OP_DELETE, cls.OP_SET]:
                    raise BucketFileError('data corrupted', position)
            except umsgpack.InsufficientDataException:
                raise BucketFileError('data corrupted', position)
            else:
                yield flag, key, position
        raise StopIteration()
Beispiel #30
0
def print_statink(filepath, tsv_format=None):
    with open(filepath, 'rb') as data:
        payload = umsgpack.unpack(data)

    if 'image_result' in payload:
        payload['image_result'] = '(PNG Data)'

    if 'image_judge' in payload:
        payload['image_judge'] = '(PNG Data)'

    if 'image_gear' in payload:
        payload['image_gear'] = '(PNG Data)'

    if 'events' in payload:
        payload['events'] = '(Events)'

    if tsv_format:
        payload_summary = get_statink_summary(payload)
        print_tsv(payload_summary, tsv_format)
    else:
        pprint.pprint(payload)
Beispiel #31
0
def decode(data, tree):
    def decode_btree(obj):
        if hasattr(obj, '__iter__') and '__class__' in obj:
            cls_name = obj['__class__']
            data = obj['data']
            if cls_name == 'Leaf':
                obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
            elif cls_name == 'Node':
                bucket = bucket_to_lazynodes(data['bucket'], tree)
                obj = Node(tree,
                           bucket=bucket,
                           rest=LazyNode(offset=data['rest'], tree=tree))
            else:
                tree.max_size = data['max_size']
                tree.root = LazyNode(offset=data['root'], tree=tree)
                obj = tree
        return obj

    # Decompress the first data group that can be found in the data stream
    data = decompress(unpack(data))

    data = decode_btree(unpackb(check_integrity(data)))
    return data
Beispiel #32
0
def main():
    args = get_args()
    with open(args['input'], 'rb') as f:
        payload = umsgpack.unpack(f)

    keys = [
        'lobby', 'rule', 'map', 'weapon', 'result', 'kill', 'death', 'rank',
        'rank_exp', 'rank_after', 'rank_exp_after', 'link_url'
    ]
    for key in keys:
        value = args.get(key)
        if not value:
            continue
        if value == 'DELETE':
            prev_value = payload.pop(key)
        else:
            prev_value = payload.get(key, '')
            payload[key] = args[key]
        print('Modified %s : %s -> %s' % (key, str(prev_value), str(value)))

    output = args.get('output') or args['input']
    with open(output, 'wb') as f:
        umsgpack.pack(payload, f)
Beispiel #33
0
    def read(self, socket):
        """ Read JSON from TCP data from the Unreal Server. """
        rbufsize = 0
        rfile = socket.makefile('rb', rbufsize)
        raw_payload_size = rfile.read(8)
        data = struct.unpack('!II', raw_payload_size)
        magic = data[0]
        length = data[1]

        if magic == RESPONSE_HEADER and length > 8:
            if sys.version_info[0] == 3:
                data = umsgpack.unpack(rfile)
            else:
                packed = rfile.read(length - 8)
                data = umsgpack.unpackb(packed)

            self.message_class = data['class']
            self.status = data['status']
            self.messages = data['messages']
            try:
                self.data = json.loads(data['data'])
            except:
                self.data = data['data']
        rfile.close()
Beispiel #34
0
def main():
    args = get_args()
    with open(args['input'], 'rb') as f:
        payload = umsgpack.unpack(f)

    keys = [
        'lobby', 'rule', 'map', 'weapon', 'result', 'kill', 'death', 'rank',
        'rank_exp', 'rank_after', 'rank_exp_after', 'link_url'
    ]
    for key in keys:
        value = args.get(key)
        if not value:
            continue
        if value == 'DELETE':
            prev_value = payload.pop(key)
        else:
            prev_value = payload.get(key, '')
            payload[key] = args[key]
        print('Modified %s : %s -> %s' % (key, str(prev_value), str(value)))

    # Gears. Primary ability is only supported.
    gear_keys = ['clothing', 'headgear', 'shoes']
    for key in gear_keys:
        value = args.get(key)
        if not value:
            continue
        if value == 'DELETE':
            prev_value = payload['gears'].pop(key)
        else:
            prev_value = payload['gears'][key].get('primary_ability', '')
            payload['gears'][key]['primary_ability'] = args[key]
        print('Modified %s : %s -> %s' % (key, str(prev_value), str(value)))

    output = args.get('output') or args['input']
    with open(output, 'wb') as f:
        umsgpack.pack(payload, f)
Beispiel #35
0
def _get_database():
    # type: () -> Dict[str, Dict[bytes, List[Union[str, int]]]]
    with open(_datafile, 'rb') as database:
        database.seek(0)
        return unpack(database)
Beispiel #36
0
#!/usr/bin/env python

import umsgpack

packed = umsgpack.packb({u'compact': True, u'schema': 0})
unpacked = umsgpack.unpackb(packed)
print packed
print unpacked


with open('test.bin', 'w') as f:
    umsgpack.pack({u'compact': True, u'schema': 0}, f)
    umsgpack.pack([1, 2, 3], f)

with open('test.bin') as f:
    print umsgpack.unpack(f)
    print umsgpack.unpack(f)
Beispiel #37
0
            #ブキは プライムシューター リザルトは 999p 0k/1d です。
            #スペシャルは3回ためて 3回つかい、抱え落ちはしませんでした。
            msg = "\n%sをつかって %dp %dk/%ddでした。" % \
                (get_reason_name(data['weapon']), data['my_point'], data['kill'], data['death'])
            if sp_dead == 0:
                sp_dead_msg = "ありません。"
            else:
                sp_dead_msg = "%d回です。" % sp_dead
            msg2 = "\nスペシャルを%d回溜めて%d回発動し 抱え落ちは%s" % (sp_charge, sp_used, sp_dead_msg)
            msg = "%s %s" % (msg, msg2)
            at_sec = -1 #テキストの先頭に時分を付けない
            msg_write(fw, at_sec, msg)

if __name__ == "__main__":
    files = glob.glob('/tmp/*.msgpack')
    for file in files:
        f = open(file, 'rb')
        data = umsgpack.unpack(f)
        msgfn = f.name
        f.close()

        #必要な機能以外はコメントで対応
        ADD_AT_SEC = 10     #タイムラインに表示する秒数を 動画のスタートに合うように調整する値
        event_write(data)	#イベントをタイムライン形式でテキストファイルに保存する

        imgpng_write(data)  #画像データ3種類出力 ※必ずjson_writeより先に処理すること。
        json_write(data)    #jsonファイルとして保存する。        

        mvfn = '.%s' % msgfn        #処理済みのファイルを移動する
        shutil.move(msgfn, mvfn)
Beispiel #38
0
def decrypt(input, recipient_private):
    stream = io.BytesIO(input)
    # Parse the header.
    header_bytes = umsgpack.unpack(stream)
    header_hash = nacl.bindings.crypto_hash(header_bytes)
    header = umsgpack.unpackb(header_bytes)
    debug('header:', json_repr(header))
    debug('header hash:', header_hash)
    [
        format_name,
        [major_version, minor_version],
        mode,
        ephemeral_public,
        sender_secretbox,
        recipient_pairs,
        *_,  # ignore additional elements
    ] = header
    ephemeral_beforenm = nacl.bindings.crypto_box_beforenm(
        pk=ephemeral_public,
        sk=recipient_private)

    if format_name != "saltpack":
        raise error.BadFormatError(
            "Unrecognized format name: '{}'".format(format_name))
    if major_version != 1:
        raise error.BadVersionError(
            "Incompatible major version: {}".format(major_version))

    # Try decrypting each sender box, until we find the one that works.
    for recipient_index, pair in enumerate(recipient_pairs):
        [_, payload_key_box, *_] = pair
        try:
            payload_key = nacl.bindings.crypto_box_open_afternm(
                ciphertext=payload_key_box,
                nonce=PAYLOAD_KEY_BOX_NONCE,
                k=ephemeral_beforenm)
            break
        except CryptoError:
            continue
    else:
        raise RuntimeError('Failed to find matching recipient.')

    sender_public = nacl.bindings.crypto_secretbox_open(
        ciphertext=sender_secretbox,
        nonce=SENDER_KEY_SECRETBOX_NONCE,
        key=payload_key)

    mac_key_nonce = header_hash[:24]
    mac_key_box = nacl.bindings.crypto_box(
        message=b'\0'*32,
        nonce=mac_key_nonce,
        pk=sender_public,
        sk=recipient_private)
    mac_key = mac_key_box[16:48]

    debug('recipient index:', recipient_index)
    debug('sender key:', sender_public)
    debug('payload key:', payload_key)
    debug('mac key nonce:', mac_key_nonce)
    debug('mac key:', mac_key)

    # Decrypt each of the packets.
    output = io.BytesIO()
    chunknum = 0
    while True:
        packet = umsgpack.unpack(stream)
        debug('packet:', json_repr(packet))
        [hash_authenticators, payload_secretbox, *_] = packet
        hash_authenticator = hash_authenticators[recipient_index]

        # Verify the secretbox hash.
        payload_nonce = PAYLOAD_NONCE_PREFIX + chunknum.to_bytes(8, "big")
        debug('payload nonce:', payload_nonce)
        payload_hash = nacl.bindings.crypto_hash(
            header_hash + payload_nonce + payload_secretbox)
        debug('hash to authenticate:', payload_hash)
        hmac_digest = hmac.new(mac_key, digestmod=hashlib.sha512)
        hmac_digest.update(payload_hash)
        our_authenticator = hmac_digest.digest()[:32]
        if not hmac.compare_digest(hash_authenticator, our_authenticator):
            raise error.HMACError("HMAC failed to verify.")

        # Open the payload secretbox.
        chunk = nacl.bindings.crypto_secretbox_open(
            ciphertext=payload_secretbox,
            nonce=payload_nonce,
            key=payload_key)
        output.write(chunk)

        debug('chunk:', repr(chunk))

        # The empty chunk signifies the end of the message.
        if chunk == b'':
            break

        chunknum += 1

    return output.getvalue()
import umsgpack
import numpy as np
import gzip

f = open("testout.msgpack", "r")
res = umsgpack.unpack(f)
f.close()

numeofs = res[0]
numobs = res[1]
numgridpts = res[2]

# breeze's toDenseVector method converts matrices in fortran/column-major order, so specify this explicitly since numpy's default is C/row-major order
U = np.array(res[3]).reshape((numgridpts, numeofs), order="F")
V = np.array(res[4]).reshape((numeofs, numobs), order="F")
S = np.array(res[5])
mean = np.array(res[6])


f = gzip.open("testout.msgpack.gz", "r")
res = umsgpack.unpack(f)
f.close()

gnumeofs = res[0]
gnumobs = res[1]
gnumgridpts = res[2]

gU = np.array(res[3]).reshape((numgridpts, numeofs), order="F")
gV = np.array(res[4]).reshape((numeofs, numobs), order="F")
gS = np.array(res[5])
gmean = np.array(res[6])
 def test_streaming_reader(self):
     # Try first composite test vector
     (name, obj, data) = composite_test_vectors[0]
     reader = io.BytesIO(data)
     self.assertEqual(umsgpack.unpack(reader), obj)