Esempio n. 1
0
 def test_decompress_buffer_size(self):
     out = compress(SHORT_INPUT)
     with self.assertRaises(TypeError):
         decompress(out, buffer_size="1")
     with self.assertRaises(ValueError):
         decompress(out, buffer_size=0)
     out = compress(LONG_INPUT)
     for buffer_size in range(1, 1025, 128):
         self.assertEqual(LONG_INPUT, decompress(out, buffer_size=buffer_size))
Esempio n. 2
0
 def test_compress_checksum(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, checksum=None)
     self.check_compress_short(checksum=True)
     self.check_compress_short(checksum=False)
     for data in (SHORT_INPUT, LONG_INPUT):
         with self.assertRaisesLz4FramedError(LZ4F_ERROR_contentChecksum_invalid):
             # invalid checksum
             decompress(compress(data, checksum=True)[:-1] + b"0")
Esempio n. 3
0
 def test_decompress_buffer_size(self):
     out = compress(SHORT_INPUT)
     with self.assertRaises(TypeError):
         decompress(out, buffer_size='1')
     with self.assertRaises(ValueError):
         decompress(out, buffer_size=0)
     out = compress(LONG_INPUT)
     for buffer_size in range(1, 1025, 128):
         self.assertEqual(LONG_INPUT,
                          decompress(out, buffer_size=buffer_size))
Esempio n. 4
0
 def test_compress_checksum(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, checksum=None)
     self.check_compress_short(checksum=True)
     self.check_compress_short(checksum=False)
     for data in (SHORT_INPUT, LONG_INPUT):
         with self.assertRaisesLz4FramedError(
                 LZ4F_ERROR_contentChecksum_invalid):
             # invalid checksum
             decompress(compress(data, checksum=True)[:-1] + b'0')
Esempio n. 5
0
    def test_compress_end(self):
        with self.assertRaises(TypeError):
            compress_end()
        with self.assertRaises(ValueError):
            compress_end(create_decompression_context())

        ctx, header = self.__compress_begin()
        self.assertEqual(b'', decompress(header + compress_end(ctx)))

        ctx, header = self.__compress_begin()
        data = compress_update(ctx, SHORT_INPUT)
        self.assertEqual(decompress(header + data + compress_end(ctx)),
                         SHORT_INPUT)
Esempio n. 6
0
    def test_compress_end(self):
        with self.assertRaises(TypeError):
            compress_end()
        with self.assertRaises(ValueError):
            compress_end(create_decompression_context())

        ctx, header = self.__compress_begin()
        # without any compress_update calls frame is invalid
        with self.assertRaisesLz4FramedError(LZ4F_ERROR_frameHeader_incomplete):
            decompress(header + compress_end(ctx))

        ctx, header = self.__compress_begin()
        data = compress_update(ctx, SHORT_INPUT)
        self.assertEqual(decompress(header + data + compress_end(ctx)), SHORT_INPUT)
Esempio n. 7
0
def receive(sock):

    unpickler = pickle.Unpickler(sock)
    result = OrderedDict([])
    keylist = unpickler.load()

    for col in keylist:
        dt = unpickler.load()
        if (dt == object):
            result[col] = pickle.loads(lz4framed.decompress(unpickler.load()))
        else:
            result[col] = np.frombuffer(lz4framed.decompress(unpickler.load()),
                                        dtype=dt)

    return result
Esempio n. 8
0
def _fetch_autoclaved_measurement_body_from_s3(
    autoclaved_fn: str, frame_off: int, frame_size: int, intra_off: int, intra_size: int
) -> bytes:
    """Fetch autoclaved byte range from S3, decompress it"""
    log = current_app.logger
    REQID_HDR = "X-Request-ID"
    # This is the legacy / autoclaved S3 bucket
    BASEURL = "https://ooni-data.s3.amazonaws.com/autoclaved/jsonl.tar.lz4/"
    # Usual size of LZ4 frames is 256kb of decompressed text.
    # Largest size of LZ4 frame was ~55Mb compressed and ~56Mb decompressed.
    url = urljoin(BASEURL, autoclaved_fn)
    range_header = "bytes={}-{}".format(frame_off, frame_off + frame_size - 1)
    hdr = {"Range": range_header}
    log.info(f"Fetching {url} {range_header}")
    r = requests.get(url, headers=hdr)
    r.raise_for_status()
    blob = r.content
    if len(blob) != frame_size:
        raise RuntimeError("Failed to fetch LZ4 frame", len(blob), frame_size)

    blob = lz4framed.decompress(blob)[intra_off : intra_off + intra_size]
    if len(blob) != intra_size or blob[:1] != b"{" or blob[-1:] != b"}":
        raise RuntimeError(
            "Failed to decompress LZ4 frame to measurement.json",
            len(blob),
            intra_size,
            blob[:1],
            blob[-1:],
        )

    return blob
Esempio n. 9
0
 def load(self, fname):
     self.logger.info("loading compressed pickle stream from '{0}'".format(fname))
     import cPickle as pickle
     import lz4framed as LZ4
     self.data = pickle.loads(LZ4.decompress(file(fname,'rb').read()))
     self.L = len(self.data)
     self.logger.info("done.")
Esempio n. 10
0
def iter_rows(channel_ids, messages_xz_path, verbose):
    for guild in os.listdir(config.log_dir):
        guild_path = path.join(config.log_dir, guild)
        if not path.isdir(guild_path):
            continue
        for channel in os.listdir(guild_path):
            if verbose:
                print('processing', guild, '-', channel)
            channel_id = channel_ids[guild][channel]
            channel_path = path.join(guild_path, channel)
            for day_file in os.listdir(channel_path):
                with open(path.join(channel_path, day_file), 'rb') as f:
                    compressed = f.read()
                contents = lz4framed.decompress(compressed)
                lines = contents.split(b'\0')
                for line in lines:
                    if line == b'':
                        continue
                    message_id, _time, user_id, content = line.split(b'|', 3)
                    yield channel_id, int(user_id), int(
                        message_id), content.decode('utf-8')

    with lzma.open(messages_xz_path, 'rt', encoding='utf-8') as f:
        reader = csv.DictReader(f)
        for i, row in enumerate(reader):
            channel_id = row['channel_id']
            int_user_id = row['int_user_id']
            message_id = row['message_id']
            yield int(channel_id), int(int_user_id), int(
                message_id), row['content']
            if verbose and (i + 1) % 100000 == 0:
                print('processed', i + 1, 'messages')
Esempio n. 11
0
def get_one_fastpath_measurement(measurement_id, download):
    """Get one measurement from the fastpath table by measurement_id,
    fetching the file from the fastpath host
    """
    log = current_app.logger
    tid = measurement_id[len(FASTPATH_MSM_ID_PREFIX) :]

    path = "/measurements/{}.json.lz4".format(tid)
    log.info(
        "Incoming fastpath query %r. Fetching %s:%d%s",
        measurement_id,
        FASTPATH_SERVER,
        FASTPATH_PORT,
        path,
    )
    conn = http.client.HTTPConnection(FASTPATH_SERVER, FASTPATH_PORT)
    log.debug("Fetching %s:%d %r", FASTPATH_SERVER, FASTPATH_PORT, path)
    conn.request("GET", path)
    r = conn.getresponse()
    log.debug("Response status: %d", r.status)
    try:
        assert r.status == 200
        blob = r.read()
        conn.close()
        log.debug("Decompressing LZ4 data")
        blob = lz4framed.decompress(blob)
        response = make_response(blob)
        response.headers.set("Content-Type", "application/json")
        log.debug("Sending JSON response")
        return response
    except Exception:
        raise BadRequest("No measurement found")
Esempio n. 12
0
 def load(self, fname):
     self.logger.info(
         "loading compressed pickle stream from '{0}'".format(fname))
     import cPickle as pickle
     import lz4framed as LZ4
     self.data = pickle.loads(LZ4.decompress(file(fname, 'rb').read()))
     self.L = len(self.data)
     self.logger.info("done.")
Esempio n. 13
0
 def get_geo_data(self):
     if self._geo_data is None:
         geo_file = self.get_file_data('geo')
         buffer = lz4framed.decompress(geo_file)
         arr = np.frombuffer(buffer, dtype=np.float32)
         arr = arr.reshape(-1, 5)
         self._geo_data = [arr[:, :3], arr[:, 3:]]
     return self._geo_data
Esempio n. 14
0
def _parse(abspath):
	with open(abspath, 'rb') as f:
		compressed = f.read()
	contents = lz4framed.decompress(compressed)
	if contents[-1] != 0:
		raise Exception('corrupt log file', abspath)
	last_message = contents[contents.rfind(b'\0', 0, -1) + 1:-1]
	lmi = last_message.split(b'|', 1)[0].decode()
	return lmi, contents
Esempio n. 15
0
def decompress(data):
    if len(data) > 0:
        try:
            data = lz4framed.decompress(data)
        except:
            print 'error : ', data
    else:
        data = None
    return data
Esempio n. 16
0
    def _get_label(self, index):
        target = None
        env = self.env
        with env.begin(write=False) as txn:
            byteflow = txn.get(self.keys[index])

        unpacked = pa.deserialize(lz4framed.decompress(byteflow))
        target = unpacked[1]

        return target
Esempio n. 17
0
def get_measurement(measurement_id,
                    download=None) -> Response:  # pragma: no cover
    """Get one measurement by measurement_id,
    fetching the file from S3 or the fastpath host as needed
    Returns only the measurement without extra data from the database
    fetching the file from the fastpath host
    ---
    parameters:
      - name: measurement_id
        in: path
        required: true
        type: string
        description: The measurement_id to retrieve the measurement for
      - name: download
        in: query
        type: boolean
        description: If we should be triggering a file download
    responses:
      '200':
        description: Returns the JSON blob for the specified measurement
        schema:
          $ref: "#/definitions/MeasurementBlob"
    """
    if not measurement_id.startswith(FASTPATH_MSM_ID_PREFIX):
        raise BadRequest("No measurement found")

    log = current_app.logger
    tid = measurement_id[len(FASTPATH_MSM_ID_PREFIX):]
    path = "/measurements/{}.json.lz4".format(tid)
    log.info(
        "Incoming fastpath query %r. Fetching %s:%d%s",
        measurement_id,
        FASTPATH_SERVER,
        FASTPATH_PORT,
        path,
    )
    # FIXME
    conn = http.client.HTTPConnection(FASTPATH_SERVER, FASTPATH_PORT)
    conn.request("GET", path)
    r = conn.getresponse()
    log.debug("Response status: %d", r.status)
    try:
        assert r.status == 200
        blob = r.read()
        conn.close()
        log.debug("Decompressing LZ4 data")
        blob = lz4framed.decompress(blob)
        response = make_response(blob)
        response.headers.set("Content-Type", "application/json")
        log.debug("Sending JSON response")
        return response
    except Exception:
        raise BadRequest("No measurement found")
Esempio n. 18
0
    def __fp_test(self, in_raw=LONG_INPUT, **kwargs):
        in_bytes = BytesIO(in_raw)
        out_bytes = BytesIO()

        with Compressor(out_bytes, **kwargs) as compressor:
            try:
                while True:
                    compressor.update(in_bytes.read(1024))
            # raised by compressor.update() on empty data argument
            except Lz4FramedNoDataError:
                pass
        self.assertEqual(decompress(out_bytes.getvalue()), in_raw)
Esempio n. 19
0
    def __fp_test(self, in_raw=LONG_INPUT, **kwargs):
        in_bytes = BytesIO(in_raw)
        out_bytes = BytesIO()

        with Compressor(out_bytes, **kwargs) as compressor:
            try:
                while True:
                    compressor.update(in_bytes.read(1024))
            # raised by compressor.update() on empty data argument
            except Lz4FramedNoDataError:
                pass
        self.assertEqual(decompress(out_bytes.getvalue()), in_raw)
Esempio n. 20
0
 def __compress_with_data_and_args(self, data, **kwargs):
     ctx, header = self.__compress_begin(**kwargs)
     in_raw = BytesIO(data)
     out = BytesIO(header)
     out.seek(0, SEEK_END)
     try:
         while True:
             out.write(compress_update(ctx, in_raw.read(1024)))
     except Lz4FramedNoDataError:
         pass
     out.write(compress_end(ctx))
     self.assertEqual(decompress(out.getvalue()), data)
Esempio n. 21
0
 def __compress_with_data_and_args(self, data, **kwargs):
     ctx, header = self.__compress_begin(**kwargs)
     in_raw = BytesIO(data)
     out = BytesIO(header)
     out.seek(0, SEEK_END)
     try:
         while True:
             out.write(compress_update(ctx, in_raw.read(1024)))
     except Lz4FramedNoDataError:
         pass
     out.write(compress_end(ctx))
     self.assertEqual(decompress(out.getvalue()), data)
Esempio n. 22
0
    def load(self):
        # if size is not None, means already loaded.
        if self._geo_cache is not None:
            return True

        # open file
        load_path = (f'{setting.submit_job_path}{self._job_id}/export/')

        file_path = f'{load_path}{self._frame:06d}'
        old_format_path = file_path + '.4dr'
        new_format_path = file_path + '.4df'

        # old format
        if os.path.isfile(old_format_path):
            with open(old_format_path, 'rb') as f:
                data = f.read()

            geo_size, tex_size = struct.unpack('II', data[:8])
            seek = 8

            # load geo
            buffer = lz4framed.decompress(data[seek:seek + geo_size])
            arr = np.frombuffer(buffer, dtype=np.float32)
            seek += geo_size
            arr = arr.reshape(-1, 5)

            geo_data = [arr[:, :3], arr[:, 3:]]

            # load texture
            tex_data = jpeg_coder.decode(data[seek:seek + tex_size])

            self._cache_buffer(geo_data, tex_data)
            return True
        # new format
        elif os.path.isfile(new_format_path):
            fourd_frame = FourdFrameManager.load(new_format_path)
            geo_data = fourd_frame.get_geo_data()
            tex_data = fourd_frame.get_texture_data()
            self._resolution = fourd_frame.get_texture_resolution()

            # resize for better playback performance
            if self._resolution > setting.max_display_resolution:
                tex_data = cv2.resize(tex_data,
                                      dsize=(setting.max_display_resolution,
                                             setting.max_display_resolution),
                                      interpolation=cv2.INTER_CUBIC)
                self._resolution = setting.max_display_resolution

            self._cache_buffer(geo_data, tex_data)
            return True
        return None
Esempio n. 23
0
    def test_compressor__no_fp(self):
        in_bytes = BytesIO(LONG_INPUT)
        out_bytes = BytesIO()

        compressor = Compressor()
        try:
            while True:
                out_bytes.write(compressor.update(in_bytes.read(1024)))
        # raised by compressor.update() on empty data argument
        except Lz4FramedNoDataError:
            pass
        out_bytes.write(compressor.end())

        self.assertEqual(decompress(out_bytes.getvalue()), LONG_INPUT)
Esempio n. 24
0
    def test_compressor__no_fp(self):
        in_bytes = BytesIO(LONG_INPUT)
        out_bytes = BytesIO()

        compressor = Compressor()
        try:
            while True:
                out_bytes.write(compressor.update(in_bytes.read(1024)))
        # raised by compressor.update() on empty data argument
        except Lz4FramedNoDataError:
            pass
        out_bytes.write(compressor.end())

        self.assertEqual(decompress(out_bytes.getvalue()), LONG_INPUT)
Esempio n. 25
0
    def __init__(self, lmdb_store_path, transform=None):
        super().__init__()
        assert os.path.isfile(lmdb_store_path), f"LMDB store '{lmdb_store_path} does not exist"
        assert not os.path.isdir(lmdb_store_path), f"LMDB store name should a file, found directory: {lmdb_store_path}"

        self.lmdb_store_path = lmdb_store_path
        self.lmdb_connection = lmdb.open(lmdb_store_path,
                                         subdir=False, readonly=True, lock=False, readahead=False, meminit=False)

        with self.lmdb_connection.begin(write=False) as lmdb_txn:
            self.length = lmdb_txn.stat()['entries'] - 1
            self.keys = pyarrow.deserialize(lz4framed.decompress(lmdb_txn.get(b'__keys__')))
            print(f"Total records: {len(self.keys), self.length}")
        self.transform = transform
Esempio n. 26
0
    def __init__(self,
                 db_path,
                 transform=None,
                 target_transform=None,
                 train=True):
        self.db_path = db_path
        self.env = lmdb.open(db_path,
                             subdir=path.isdir(db_path),
                             readonly=True,
                             lock=False,
                             readahead=False,
                             meminit=False)

        with self.env.begin(write=False) as txn:
            # self.length = txn.stat()['entries'] - 1
            self.length = pa.deserialize(
                lz4framed.decompress(txn.get(b'__len__')))
            self.keys = pa.deserialize(
                lz4framed.decompress(txn.get(b'__keys__')))
            self.classnum = pa.deserialize(
                lz4framed.decompress(txn.get(b'__classnum__')))

        self.transform = transform
        self.target_transform = target_transform
Esempio n. 27
0
    def __init__(self,
                 datatype,
                 lmdb_store_path,
                 mask_path,
                 hr_shape=(192, 192),
                 gen_only=True):
        super().__init__()
        assert os.path.isfile(
            lmdb_store_path), f"LMDB store '{lmdb_store_path} does not exist"
        assert not os.path.isdir(
            lmdb_store_path
        ), f"LMDB store name should a file, found directory: {lmdb_store_path}"

        self.lmdb_store_path = lmdb_store_path
        self.lmdb_connection = lmdb.open(lmdb_store_path,
                                         subdir=False,
                                         readonly=True,
                                         lock=False,
                                         readahead=False,
                                         meminit=False)

        with self.lmdb_connection.begin(write=False) as lmdb_txn:
            self.length = lmdb_txn.stat()['entries'] - 1
            self.keys = pyarrow.deserialize(
                lz4framed.decompress(lmdb_txn.get(b'__keys__')))
            print(f"Total records: {len(self.keys), self.length}")
        self.transform = transform
        self.datatype = datatype
        self.mask_path = mask_path / datatype

        self.hr_height, self.hr_width = hr_shape
        self.mask_generator = MaskGenerator(height=self.hr_height,
                                            width=self.hr_width,
                                            channels=3,
                                            filepath=self.mask_path)

        self.gen_only = gen_only
        self.mean = np.array([0.485, 0.456, 0.406])
        self.std = np.array([0.229, 0.224, 0.225])
        self.image_transform = torchvision.transforms.Compose([
            transforms.Resize((self.hr_height, self.hr_height), Image.LANCZOS),
            transforms.ToTensor(),
            transforms.Normalize(self.mean, self.std)
        ])
        self.mask_transform = transforms.Compose([
            transforms.ToTensor()
            # transforms.Normalize(self.mean, self.std)
        ])
Esempio n. 28
0
 def test_decompress_invalid_input(self):
     with self.assertRaisesLz4FramedError(LZ4F_ERROR_frameType_unknown):
         decompress(b'invalidheader')
     with self.assertRaisesRegex(ValueError, 'frame incomplete'):
         decompress(compress(SHORT_INPUT)[:-5])
     # incomplete data (length not specified in header)
     with BytesIO() as out:
         with Compressor(out) as compressor:
             compressor.update(SHORT_INPUT)
         output = out.getvalue()
         with self.assertRaisesRegex(ValueError, 'frame incomplete'):
             decompress(output[:-20])
Esempio n. 29
0
 def test_decompress_invalid_input(self):
     with self.assertRaisesLz4FramedError(LZ4F_ERROR_frameHeader_incomplete):
         decompress(b"invalidheader")
     with self.assertRaisesRegex(ValueError, "frame incomplete"):
         decompress(compress(SHORT_INPUT)[:-5])
     # incomplete data (length not specified in header)
     with BytesIO() as out:
         with Compressor(out) as compressor:
             compressor.update(SHORT_INPUT)
         output = out.getvalue()
         with self.assertRaisesRegex(ValueError, "frame incomplete"):
             decompress(output[:-20])
Esempio n. 30
0
def unpack_4fr(file_path):
    jpeg_encoder = TurboJPEG('turbojpeg.dll')

    with open(file_path, 'rb') as f:
        data = f.read()

    header = data[:8]
    geo_size, texture_size = struct.unpack('II', header)

    geo_buffer = data[8:8 + geo_size]
    texture_buffer = data[8 + geo_size:]

    # obj
    geo_buffer = lz4framed.decompress(geo_buffer)
    point_list = np.frombuffer(geo_buffer, dtype=np.float32)
    point_list = point_list.reshape(-1, 5)

    pos_list = point_list[:, 0:3]
    uv_list = point_list[:, 3:5]

    uv_list = np.array(uv_list, np.float32)
    uv_list -= [0, 1.0]
    uv_list *= [1, -1]

    pos_strings = [f'v {x} {y} {z}' for x, y, z in pos_list]
    uv_strings = [f'vt {u} {v}' for u, v in uv_list]
    face_strings = [
        f'f {f}/{f} {f + 1}/{f + 1} {f + 2}/{f + 2}'
        for f in range(1, point_list.shape[0], 3)
    ]

    obj_data = ['g'] + pos_strings + uv_strings + ['g'] + face_strings
    obj_data = '\n'.join(obj_data)

    with open(file_path.replace('4dr', 'obj'), 'w') as f:
        f.write(obj_data)

    # jpg
    with open(file_path.replace('4dr', 'jpg'), 'wb') as f:
        im = jpeg_encoder.decode(texture_buffer, TJPF_RGB)
        f.write(jpeg_encoder.encode(im))

    return file_path
Esempio n. 31
0
def iter_rows(channel_ids, verbose):
    for guild in os.listdir(config.log_dir):
        guild_path = path.join(config.log_dir, guild)
        if not path.isdir(guild_path):
            continue
        for channel in os.listdir(guild_path):
            if verbose:
                print('processing', guild, '-', channel)
            channel_id = channel_ids[guild][channel]
            channel_path = path.join(guild_path, channel)
            for day_file in os.listdir(channel_path):
                with open(path.join(channel_path, day_file), 'rb') as f:
                    compressed = f.read()
                contents = lz4framed.decompress(compressed)
                lines = contents.split(b'\0')
                for line in lines:
                    if line == b'':
                        continue
                    message_id, _time, user_id, content = line.split(b'|', 3)
                    yield channel_id, int(user_id), int(
                        message_id), content.decode('utf-8')
Esempio n. 32
0
def main(host='127.0.0.1', port=5000):
    pygame.init()
    FPS_FONT = pygame.font.SysFont("Verdana", 20)
    GOLDENROD = pygame.Color("goldenrod")

    def show_fps(window, clock):
        pygame.draw.rect(window, (0, 0, 0), Rect((0, 0), (60, 30)))
        fps_overlay = FPS_FONT.render("{0:.2f}".format(clock.get_fps()), True,
                                      GOLDENROD)
        window.blit(fps_overlay, (0, 0))

    screen = pygame.display.set_mode((WIDTH, HEIGHT))
    clock = pygame.time.Clock()
    watching = True

    sock = socket()
    sock.connect((host, port))
    try:
        while watching:
            for event in pygame.event.get():
                if event.type == pygame.QUIT:
                    watching = False
                    break

            # Retreive the size of the pixels length,
            # the pixels length and pixels
            size_len = int.from_bytes(sock.recv(1), byteorder='big')
            size = int.from_bytes(sock.recv(size_len), byteorder='big')
            pixels = lz4framed.decompress(recvall(sock, size))

            # Create the Surface from raw pixels
            img = pygame.image.fromstring(pixels, (WIDTH, HEIGHT), 'RGB')

            # Display the picture
            screen.blit(img, (0, 0))
            show_fps(screen, clock)
            pygame.display.flip()
            clock.tick(60)
    finally:
        sock.close()
Esempio n. 33
0
    def _read_entries(self, reader):
        while reader.remaining() > 0:
            # Stop once internal header is found if only interrested in it
            if (self._options.print_header
                    or self._options.integrity) and self.internal_header:
                break

            try:
                entry_id = reader.read_u32()
                entry_len = reader.read_u32()
                entry_data = reader.read_data(entry_len)
            except LogError as ex:
                # Raise error and stop reading
                raise LogError("Truncated entry: %s" % str(ex))

            if self.version >= 3 and entry_id == LOGGERD_ID_AES_DESC:
                self._read_aes_desc(LogDataReader(entry_data))
            elif self.version >= 3 and entry_id == LOGGERD_ID_AES:
                new_data = self._aes_decrypt(entry_data)
                if not self.decrypt_only:
                    self._read_entries(LogDataReader(new_data))
                else:
                    self._out.write(new_data)
            elif self.decrypt_only:
                self._out.write(struct.pack("<II", entry_id, entry_len))
                self._out.write(entry_data)
            elif entry_id == LOGGERD_ID_SOURCE_DESC:
                desc = LogSourceDesc(LogDataReader(entry_data))
                self._add_source(desc)
            elif entry_id == LOGGERD_ID_LZ4:
                try:
                    new_data = lz4framed.decompress(entry_data)
                except lz4framed.Lz4FramedError as ex:
                    # Print message and try continue reading
                    LOG.warning("Truncated lz4 entry: %s", str(ex))
                    continue
                self._read_entries(LogDataReader(new_data))
            else:
                source = self._get_source(entry_id)
                source.add_entry(LogDataReader(entry_data))
Esempio n. 34
0
    def convert_to_pixmap(self, focus=False, save=False):
        """做一系列圖像轉換至 pixmap

        二進制JPEG > cv2 > QImage > QPixmap

        """
        if self._buf is None:
            if not self.is_cache():
                return None
            buf = lz4framed.decompress(self._cache)
            buf = np.frombuffer(buf, self._type)
            buf.shape = self._shape
        else:
            buf = np.copy(self._buf)

        _height, _width, _ = buf.shape

        if (self.is_live_view() and _width == self._ow):
            if focus:
                sim = cv2.resize(buf, (int(_width / 2), int(_height / 2)))
                edges = cv2.Canny(sim, 280, 380)
                edges = cv2.dilate(edges, self._kernel)
                edges = cv2.cvtColor(edges, cv2.COLOR_GRAY2BGR)
                edges *= np.array((1, 0, 0), np.uint8)
                edges = cv2.resize(edges, (_width, _height))
                buf = np.bitwise_or(buf, edges)
            tm = cv2.resize(buf, (int(_width / 2), int(_height / 2)))
            tm = cv2.cvtColor(tm, cv2.COLOR_RGB2BGR)
            server.set_buffer(tm)

        # 轉成 QImage
        q_image = QImage(buf.data, _width, _height, 3 * _width,
                         QImage.Format_RGB888)

        # 轉成 QPixmap
        if not save:
            self._buf = None
        return QPixmap.fromImage(q_image)
Esempio n. 35
0
    def __getitem__(self, index):
        img, target = None, None
        env = self.env
        with env.begin(write=False) as txn:
            byteflow = txn.get(self.keys[index])
        unpacked = pa.deserialize(lz4framed.decompress(byteflow))

        # load image
        imgbuf = unpacked[0]
        buf = six.BytesIO()
        buf.write(imgbuf)
        buf.seek(0)
        img = Image.open(buf).convert('RGB')

        # load label
        target = unpacked[1]

        if self.transform is not None:
            img = self.transform(img)

        if self.target_transform is not None:
            target = self.target_transform(target, self.classnum)

        return img, target
Esempio n. 36
0
 def check_compress_long(self, *args, **kwargs):
     self.assertEqual(LONG_INPUT,
                      decompress(compress(LONG_INPUT, *args, **kwargs)))
Esempio n. 37
0
 def test_decompress_minimal(self):
     with self.assertRaises(TypeError):
         decompress()
     with self.assertRaises(Lz4FramedNoDataError):
         decompress(b"")
     self.check_compress_short()
Esempio n. 38
0
 def check_compress_short(self, *args, **kwargs):
     self.assertEqual(SHORT_INPUT,
                      decompress(compress(SHORT_INPUT, *args, **kwargs)))
Esempio n. 39
0
 def check_compress_short(self, *args, **kwargs):
     self.assertEqual(SHORT_INPUT, decompress(compress(SHORT_INPUT, *args, **kwargs)))
Esempio n. 40
0
 def check_compress_long(self, *args, **kwargs):
     self.assertEqual(LONG_INPUT, decompress(compress(LONG_INPUT, *args, **kwargs)))
Esempio n. 41
0
    def processData(data):
        try:
            dep = DataExchangeProtocol()
            dep.ParseFromString(data)

        except Exception as e:
            logger.error("处理DEP错误,PB解析数据发生错误")
            logger.error(e, exc_info=True)
            logger.error("处理DEP错误,PB解析数据发生错误,原始数据:%s", data)
            return

        sourceNodeId = dep.sourceNodeId
        targetNodeId = dep.targetNodeId

        if targetNodeId != Config.nodeId:
            logger.error("处理DEP错误,目标节点ID不匹配当前节点ID:%s,目标节点ID:%s", Config.nodeId,
                         targetNodeId)
            return

        rpcId = dep.rpcId
        timestamp = dep.timestamp
        contentType = dep.contentType
        rpcType = dep.rpcType
        reqId = dep.reqId
        logger.info(
            "处理DEP记录,来源节点ID:%s,RPC类型:%s,RPC ID:%s,请求ID:%s内容类型:%s,时间戳:%s",
            sourceNodeId, rpcType, rpcId, reqId, contentType, timestamp)

        if contentType == DataExchangeProtocol.ContentType.COMPRESSED_LZ4:
            try:
                contentByteString = lz4framed.decompress(dep.contentBytes)
            except Exception:
                logger.error(
                    "处理DEP异常,来源节点ID:%s,RPC类型:%s,RPC ID:%s,请求ID:%s时间戳:%s,无法使用LZ4正确解析报文内容",
                    sourceNodeId,
                    rpcType,
                    rpcId,
                    reqId,
                    timestamp,
                    exc_info=True)
                RpcClientProcessService.sendExceptionRsp(
                    sourceNodeId, rpcId, reqId, timestamp, "无法使用LZ4正确解析报文内容")
                return
        elif contentType == DataExchangeProtocol.ContentType.ROUTINE:
            contentByteString = dep.contentBytes
        else:
            logger.error(
                "处理DEP错误,来源节点ID:%s,RPC类型:%s,RPC ID:%s,请求ID:%s内容类型:%s,时间戳:%s,不支持的报文类型",
                sourceNodeId, rpcType, rpcId, reqId, contentType, timestamp)
            RpcClientProcessService.sendExceptionRsp(sourceNodeId, rpcId,
                                                     reqId, timestamp,
                                                     "不支持的报文类型")
            return

        if not contentByteString or len(contentByteString) <= 0:
            logger.error(
                "处理DEP错误,来源节点ID:%s,RPC类型:%s,RPC ID:%s,请求ID:%s内容类型:%s,时间戳:%s,报文内容长度错误",
                sourceNodeId, rpcType, rpcId, contentType, timestamp)
            RpcClientProcessService.sendExceptionRsp(sourceNodeId, rpcId,
                                                     reqId, timestamp,
                                                     "报文内容长度错误")
            return

        if rpcType != DataExchangeProtocol.RpcType.CORE_RPC:
            logger.error(
                "处理DEP错误,来源节点ID:%s,RPC类型:%s,RPC ID:%s,请求ID:%s内容类型:%s,时间戳:%s,未能识别的RPC类型",
                sourceNodeId, rpcType, rpcId, reqId, contentType, timestamp)
            return

        RpcClientProcessService.doCoreRpc(sourceNodeId, rpcId, reqId,
                                          contentByteString, timestamp)
Esempio n. 42
0
 def get_chunk(self, i):
     self.lz_file.seek(self.chunk_starts[i])
     comp = self.lz_file.read(self.chunk_starts[i+1] - self.chunk_starts[i])
     return Z.decompress(comp)
Esempio n. 43
0
 def test_decompress_minimal(self):
     with self.assertRaises(TypeError):
         decompress()
     with self.assertRaises(Lz4FramedNoDataError):
         decompress(b'')
     self.check_compress_short()