def pack(self, data: list, fl: RawIOBase):
     byteorder = "little" if self.hl_lsb_first else "big"
     ln = len(data) - self.head_num_off
     assert ln >= 0
     fl.write(ln.to_bytes(self.head_len, byteorder, signed=False))
     for x in data:
         self.sub_dt.pack(x, fl)
 def writeToFile(self, file: io.RawIOBase) -> None:
     """Write this texture to SFA-format file."""
     header = self._makeHeader()
     imageData, paletteData, colors = encode_image(
         self.image, self.format, None, mipmap_count=self.numMipMaps)
     file.write(header)
     file.write(imageData.getbuffer())
Exemple #3
0
def copy_from_websocket(
    f: io.RawIOBase,
    ws: lomond.WebSocket,
    ready_sem: threading.Semaphore,
    cert_file: Optional[str],
    cert_name: Optional[str],
) -> None:
    try:
        for event in ws.connect(
            ping_rate=0,
            session_class=lambda socket: CustomSSLWebsocketSession(socket, cert_file, cert_name),
        ):
            if isinstance(event, lomond.events.Binary):
                f.write(event.data)
            elif isinstance(event, lomond.events.Ready):
                ready_sem.release()
            elif isinstance(
                event,
                (lomond.events.ConnectFail, lomond.events.Rejected, lomond.events.ProtocolError),
            ):
                raise Exception("Connection failed: {}".format(event))
            elif isinstance(event, (lomond.events.Closing, lomond.events.Disconnected)):
                break
    finally:
        f.close()
Exemple #4
0
    def dump_detection_output(self, idx: Union[int, tuple],
                              detections: Target3DArray,
                              fout: RawIOBase) -> None:
        '''
        :param detections: detection result
        :param ids: auxiliary information for output, each item contains context name and timestamp
        :param fout: output file-like object
        '''
        try:
            from waymo_open_dataset import label_pb2
            from waymo_open_dataset.protos import metrics_pb2
        except:
            _logger.error(
                "Cannot find waymo_open_dataset, install the package at "
                "https://github.com/waymo-research/waymo-open-dataset, output will be skipped now."
            )
            return

        label_map = {
            WaymoObjectClass.Unknown: label_pb2.Label.TYPE_UNKNOWN,
            WaymoObjectClass.Vehicle: label_pb2.Label.TYPE_VEHICLE,
            WaymoObjectClass.Pedestrian: label_pb2.Label.TYPE_PEDESTRIAN,
            WaymoObjectClass.Sign: label_pb2.Label.TYPE_SIGN,
            WaymoObjectClass.Cyclist: label_pb2.Label.TYPE_CYCLIST
        }

        waymo_array = metrics_pb2.Objects()
        for target in detections:
            waymo_target = metrics_pb2.Object()

            # convert box parameters
            box = label_pb2.Label.Box()
            box.center_x = target.position[0]
            box.center_y = target.position[1]
            box.center_z = target.position[2]
            box.length = target.dimension[0]
            box.width = target.dimension[1]
            box.height = target.dimension[2]
            box.heading = target.yaw
            waymo_target.object.box.CopyFrom(box)

            # convert label
            waymo_target.object.type = label_map[target.tag_top]
            waymo_target.score = target.tag_top_score

            waymo_target.context_name = idx[
                0]  # the name of the sequence is the context
            waymo_target.frame_timestamp_micros = int(
                self.timestamp(idx) * 1e6)
            waymo_array.objects.append(waymo_target)

        bindata = waymo_array.SerializeToString()
        if isinstance(fout, (str, Path)):
            Path(fout).write_bytes(bindata)
        else:
            fout.write(bindata)
Exemple #5
0
def write_header(outp: io.RawIOBase, header: Dict) -> None:
    """ Writes a header to the file.  The header must be a python dict
        that is convertable to JSON.

        After calling this, you can just write the events to the file.
    """
    dumped = json.dumps(header)
    encoded = dumped.encode('utf-8')
    outp.write(encoded)
    outp.write(b'\x00')
 def pack(self, data: dict, fl: RawIOBase):
     byteorder = "little" if self.hl_lsb_first else "big"
     ln = len(data) - self.head_num_off
     assert ln >= 0
     fl.write(ln.to_bytes(self.head_len, byteorder, signed=False))
     key_t = self.key_t
     val_t = self.val_t
     for key in data:
         key_t.pack(key, fl)
         val_t.pack(data[key], fl)
Exemple #7
0
    def _read_block_into(in_stream: io.RawIOBase, out_stream: io.RawIOBase):
        MAGIC = 0x00000010

        HEADER_LENGTH = 0x10
        MAGIC_OFFSET = 0x00
        SOURCE_SIZE_OFFSET = 0x08
        RAW_SIZE_OFFSET = 0x0C

        BLOCK_PADDING = 0x80

        COMPRESSION_THRESHOLD = 0x7D00

        # Block:
        # 10h   Header
        # *     Data
        #
        # Header:
        # 4h    Magic
        # 4h    Unknown / Zero
        # 4h    Size in source
        # 4h    Raw size
        # -> If size in source >= 7D00h then data is uncompressed

        header = in_stream.read(HEADER_LENGTH)
        if len(header) != HEADER_LENGTH:
            raise EOFError

        magic_check, = struct.unpack_from('<l', header, MAGIC_OFFSET)
        source_size, = struct.unpack_from('<l', header, SOURCE_SIZE_OFFSET)
        raw_size, = struct.unpack_from('<l', header, RAW_SIZE_OFFSET)

        if magic_check != MAGIC:
            raise NotImplementedError("Magic number not present")

        is_compressed = source_size < COMPRESSION_THRESHOLD

        block_size = source_size if is_compressed else raw_size

        if is_compressed and (
            (block_size + HEADER_LENGTH) % BLOCK_PADDING) != 0:
            block_size += BLOCK_PADDING - (
                (block_size + HEADER_LENGTH) % BLOCK_PADDING)

        buffer = in_stream.read(block_size)
        if len(buffer) != block_size:
            raise EOFError

        if is_compressed:
            current_position = out_stream.tell()
            if raw_size != out_stream.write(zlib.decompress(buffer, -15)):
                raise RuntimeError(
                    "Inflated block does not match indicated size")
        else:
            out_stream.write(buffer)
Exemple #8
0
 def download(self, remote_path: str, writable: io.RawIOBase) -> int:
     encoded = self.exploit('base64 {}'.format(remote_path))
     contents = base64.b64decode(encoded)
     try:
         return writable.write(contents)
     finally:
         writable.flush()
Exemple #9
0
 def wait_until_ready(self, channel:RawIOBase, timeout=60):
     """
     sends ' ' (space) and waits for the corresponding ACK message. Once we have 10 of these in a row we can be fairly
     certain the device is ready for ymodem.
     :param channel:
     :param timeout:
     :return:
     """
     success_count = 0
     while success_count < 10:
         while channel.readline():       # flush any existing data
             success_count = 0
         channel.write(b' ')
         result = channel.read()
         if result and result[0]==LightYModemProtocol.ack:
             success_count += 1
Exemple #10
0
    def wait_until_ready(self, channel:RawIOBase, timeout=60):
        """
        sends ' ' (space) and waits for the corresponding ACK message. Once we have 3 of these in a row we can be fairly
        certain the device is ready for ymodem.
        :param channel:
        :param timeout:
        :return:
        """
        success_count = 0
        while channel.readline():  # flush any existing data
            success_count = 0

        while success_count < 2:
            channel.write(b' ')
            result = channel.read()
            if result and result[0]==LightYModemProtocol.ack:
                success_count += 1
Exemple #11
0
    def compute(self, stream_in: RawIOBase, stream_out: RawIOBase = None):
        """Compute and return the hash for the given stream.

        The data is read from stream_in until EOF, given to the hasher, and
        written unmodified to the stream_out (unless it is None).

        :param stream_in: input stream.
        :type stream_in: io.RawIOBase

        :param stream_out: output stream.
        :type stream_out: io.RawIOBase
        """
        self._init_hashers()
        read_bytes = self.chunk_size
        while read_bytes == self.chunk_size:
            data = stream_in.read(self.chunk_size)
            read_bytes = len(data)
            for hasher in self._hashers.values():
                hasher.update(data)
            if stream_out is not None:
                stream_out.write(data)
def pack_str_len_fl(fl: RawIOBase, s: bytes, head_len: int):
    fl.write(len(s).to_bytes(head_len, "little"))
    fl.write(s)
def write_string_to_stream(stream: io.RawIOBase, s: str) -> None:
    stream.write(s)
    stream.flush()
Exemple #14
0
 def __call__(self, source: RawIOBase, destination: RawIOBase):
     for element in source:
         destination.write(self._clearer(element))
Exemple #15
0
def bulk_copy(read_from: io.RawIOBase, write_to: io.RawIOBase):
    while True:
        chunk = read_from.read(BUFFER_SIZE)
        if not chunk:
            break
        write_to.write(chunk)
def pack_list_str_fl(fl: RawIOBase, lst_str: List[bytes], head_len: int,
                     str_head_len: int):
    fl.write(len(lst_str).to_bytes(head_len, "little"))
    for s in lst_str:
        fl.write(len(s).to_bytes(str_head_len, "little"))
        fl.write(s)
 def pack(self, data: int, fl: RawIOBase):
     byteorder = "little" if self.lsb_first else "big"
     byts = (data - self.num_off).to_bytes(self.n_bytes,
                                           byteorder,
                                           signed=self.signed)
     fl.write(byts)
Exemple #18
0
def serialize(stream: io.RawIOBase, message: Message):
    stream.write(message.SerializeToString())
Exemple #19
0
def cook(
    ctx,
    config_file: str,
    swhid: CoreSWHID,
    outfile: io.RawIOBase,
    bundle_type: Optional[str],
):
    """
    Runs a vault cooker for a single object (identified by a SWHID),
    and outputs it to the given file.
    """
    from swh.core import config
    from swh.model.swhids import ObjectType
    from swh.objstorage.exc import ObjNotFoundError
    from swh.objstorage.factory import get_objstorage
    from swh.storage import get_storage

    from .cookers import get_cooker_cls
    from .in_memory_backend import InMemoryVaultBackend

    conf = config.read(config_file)

    try:
        from swh.graph.client import RemoteGraphClient  # optional dependency

        graph = RemoteGraphClient(**conf["graph"]) if conf.get("graph") else None
    except ModuleNotFoundError:
        if conf.get("graph"):
            raise EnvironmentError(
                "Graph configuration required but module is not installed."
            )
        else:
            graph = None

    backend = InMemoryVaultBackend()

    if bundle_type is None:
        if swhid.object_type in (
            ObjectType.RELEASE,
            ObjectType.SNAPSHOT,
        ):
            bundle_type = "git_bare"
        elif swhid.object_type in (ObjectType.DIRECTORY,):
            bundle_type = "flat"
        else:
            raise click.ClickException(
                "No default bundle type for this kind of object, "
                "use --bundle-type to choose one"
            )

    try:
        cooker_cls = get_cooker_cls(bundle_type, swhid.object_type)
    except ValueError as e:
        raise click.ClickException(*e.args)

    storage = get_storage(**conf["storage"])
    objstorage = get_objstorage(**conf["objstorage"]) if "objstorage" in conf else None
    cooker = cooker_cls(
        swhid=swhid,
        backend=backend,
        storage=storage,
        graph=graph,
        objstorage=objstorage,
        max_bundle_size=None,  # No need for a size limit, we are running locally
    )
    cooker.cook()

    try:
        bundle = backend.fetch(cooker_cls.BUNDLE_TYPE, swhid)
    except ObjNotFoundError:
        bundle = None
    if bundle is None:
        import pdb

        pdb.set_trace()
        raise click.ClickException("Cooker did not write a bundle to the backend.")
    outfile.write(bundle)
def bulk_copy(read_from: io.RawIOBase, write_to: io.RawIOBase):
    while True:
        chunk = read_from.read(BUFFER_SIZE)
        if not chunk:
            break
        write_to.write(chunk)