Esempio n. 1
0
 def execute(self, env: dict, stdin: IOBase, stdout: IOBase, stderr: IOBase) -> None:
     if not self.args:
         stdout.write(self._count(stdin))
         return
     for arg in self.args:
         with open(arg, "r") as f:
             stdout.write(self._count(f))
Esempio n. 2
0
def dump(obj: Any, fp: io.IOBase, cls=None, **kwargs):
    if cls is None:
        cls = JsonlEncoder
    text = cls(**kwargs).encode(obj)
    if(isinstance(fp, (io.RawIOBase, io.BufferedIOBase))):
        fp.write(text.encode())
    else:
        fp.write(text)
Esempio n. 3
0
def write_binary128(value: bytes, writer: io.IOBase) -> None:
    """
    Writes a binary128 floating point value to the writer. It will be
    encoded using IEEE 754-2008 in big endian value.
    """
    if len(value) != 16:
        raise ValueError('Invalid binary128.')
    writer.write(value)
Esempio n. 4
0
    def dumpf(self, obj, fp: IOBase, options: dict):
        'dump a obj into the file-like object.'

        assert fp.writable()
        if isinstance(fp, _TextIOBase):
            fp.write(self.dumps(obj, options))
        else:
            fp.write(self.dumpb(obj, options))
Esempio n. 5
0
 def dump(self, f: io.IOBase):
     """
     Dump data to json lines format. Each json object is formatted as `{key: [value1, value2, ...]}`.
     
     Args:
         f (io.IOBase): IO handler.
     """
     for k, ss in self:
         obj = {k: list(ss)}
         f.write(json.dumps(obj) + '\n')
Esempio n. 6
0
 def retrieve(self, key: str, stream: IOBase):
     try:
         blob_client = self.client.get_blob_client(key)
         download_stream = blob_client.download_blob()
         stream.write(download_stream.readall())
     except Exception as ex:
         logging.error(
             "ERROR: Error occurred while try to perform retrieve operation for key %s",
             key)
         raise ex
 def execute(self, env: dict, stdin: IOBase, stdout: IOBase,
             stderr: IOBase) -> None:
     if not self.args:
         for line in stdin:
             stdout.write(line)
         return
     for arg in self.args:
         with open(arg, "r") as f:
             for line in f:
                 stdout.write(line)
Esempio n. 8
0
def generate_code(atcoder_client: AtCoderClient,
                  problem_url: str,
                  config: Config,
                  output_file: IOBase):
    problem = get_problem_from_url(problem_url)
    template_code_path = config.code_style_config.template_file
    lang = config.code_style_config.lang

    def emit_error(text):
        logging.error(with_color(text, Fore.RED))

    def emit_warning(text):
        logging.warning(text)

    def emit_info(text):
        logging.info(text)

    emit_info('{} is used for template'.format(template_code_path))

    # Fetch problem data from the statement
    try:
        content = atcoder_client.download_problem_content(problem)
    except InputFormatDetectionError as e:
        emit_error("Failed to download input format.")
        raise e
    except SampleDetectionError as e:
        emit_error("Failed to download samples.")
        raise e

    try:
        prediction_result = predict_format(content)
        emit_info(
            with_color("Format prediction succeeded", Fore.LIGHTGREEN_EX))
    except (NoPredictionResultError, MultiplePredictionResultsError) as e:
        prediction_result = FormatPredictionResult.empty_result()
        if isinstance(e, NoPredictionResultError):
            msg = "No prediction -- Failed to understand the input format"
        else:
            msg = "Too many prediction -- Failed to understand the input format"
        emit_warning(with_color(msg, Fore.LIGHTRED_EX))

    constants = predict_constants(content.original_html)
    code_generator = config.code_style_config.code_generator
    with open(template_code_path, "r") as f:
        template = f.read()

    output_splitter()

    output_file.write(code_generator(
        CodeGenArgs(
            template,
            prediction_result.format,
            constants,
            config.code_style_config
        )))
    def _write_xml_output_to_file(self, file_io: io.IOBase):
        """Write the dump from self._dump() to file_io.

        Args:
          file_io: File to write to.

        """
        output_str = self._dump()
        ExtendedCommonRoadFileWriter.check_validity_of_commonroad_file(
            output_str)
        file_io.write(output_str)
Esempio n. 10
0
def write_cooccurrences(cooccurrences: List[ProductsCoOccurrence],
                        output_file: io.IOBase):
    '''
		write transactions to file in the format:
		<item set size (N)>, <co-occurrence frequency>, <item 1 id >, <item 2 id>, …. <item N id>
	'''
    for cooccurrence in cooccurrences:
        output_file.write('{:d}, {:d}, {:s}\n'.format(
            len(cooccurrence.products_set),
            cooccurrence.co_occurrence_frequency,
            ', '.join(cooccurrence.products_set)))
Esempio n. 11
0
 def report(
     self,
     fout: IOBase,
     ct_list: List["CellType"],
     space: int = 4,
 ) -> None:
     """ Write putative cell type reports to fout.
     """
     for ct in ct_list:
         fout.write(" " * space + str(ct) + "\n")
         if ct.subtypes is not None:
             self.report(fout, ct.subtypes, space + 4)
Esempio n. 12
0
def generate_code(atcoder_client: AtCoderClient, problem_url: str,
                  config: Config, output_file: IOBase):
    problem = get_problem_from_url(problem_url)
    template_code_path = config.code_style_config.template_file
    lang = config.code_style_config.lang

    def emit_error(text):
        logging.error(with_color(text, Fore.RED))

    def emit_warning(text):
        logging.warning(text)

    def emit_info(text):
        logging.info(text)

    emit_info('{} is used for template'.format(template_code_path))

    # Fetch problem data from the statement
    try:
        content = atcoder_client.download_problem_content(problem)
    except InputFormatDetectionError as e:
        emit_error("Failed to download input format.")
        raise e
    except SampleDetectionError as e:
        emit_error("Failed to download samples.")
        raise e

    try:
        prediction_result = predict_format(content)
        emit_info(with_color("Format prediction succeeded",
                             Fore.LIGHTGREEN_EX))
    except (NoPredictionResultError, MultiplePredictionResultsError) as e:
        prediction_result = FormatPredictionResult.empty_result()
        if isinstance(e, NoPredictionResultError):
            msg = "No prediction -- Failed to understand the input format"
        else:
            msg = "Too many prediction -- Failed to understand the input format"
        emit_warning(with_color(msg, Fore.LIGHTRED_EX))

    constants = predict_constants(content.original_html)
    code_generator = config.code_style_config.code_generator
    with open(template_code_path, "r") as f:
        template = f.read()

    output_splitter()

    output_file.write(
        code_generator(
            CodeGenArgs(template, prediction_result.format, constants,
                        config.code_style_config)))
Esempio n. 13
0
 def _log_fit_params(self, f: IOBase):
     f.write("Fit parameters\n")
     f.write("Epochs: {}\n".format(self.epochs))
     f.write("Batch size: {}\n".format(self.batch_size))
     f.write(
         "_________________________________________________________________\n\n"
     )
Esempio n. 14
0
 def _log_directories(self, f: IOBase):
     f.write("Dataset directory: {}\n".format(self.dataset_dir))
     f.write("dtype: {}\n".format(self.dtype))
     f.write("Checkpoints directory: {}\n".format(self.checkpoints_dir))
     f.write(
         "_________________________________________________________________\n\n"
     )
Esempio n. 15
0
    def generate(self, result: scanner.ScannerResult,
                 output: io.IOBase) -> None:
        for decl in filter(
                lambda decl: decl.type.args is not None and any(
                    map(
                        lambda param:
                        (not isinstance(param, EllipsisParam) and utils.
                         is_function_pointer_type(param.type)),
                        decl.type.args.params,
                    )),
                result.declarations,
        ):
            output.write(self._generateTypeDefForDecl(decl))
        for defin in filter(
                lambda defin: defin.decl.type.args is not None and any(
                    map(
                        lambda param:
                        (not isinstance(param, EllipsisParam) and utils.
                         is_function_pointer_type(param.type)),
                        defin.decl.type.args.params,
                    )),
                result.definitions,
        ):
            output.write(self._generateTypeDefForDecl(defin.decl))

        for decl in result.declarations:
            output.write(self._generateFakeForDecl(decl))

        for definition in result.definitions:
            output.write(self._generateBypassForFuncDef(definition))
            output.write(self._generateFakeForDecl(definition.decl))
Esempio n. 16
0
def download(url: str, stream: io.IOBase, timeout: int, verify: bool):
    """
    Downloads the content of URL into stream.
    This method only supports HTTP and HTTPS URLs.
    The implementation is safe to use for large contents.
    :param url: URL to download.
    :param stream: stream to write content (e.g. file or I/O buffer).
    :param timeout: timeout until server sends data (not the overall download time).
    :param verify: verify server's SSL certificate.
    """
    with requests.get(url, timeout=timeout, verify=verify, stream=True) as r:
        r.raise_for_status()
        for chunk in r.iter_content(chunk_size=1024 * 1024):
            stream.write(chunk)
Esempio n. 17
0
def write_int(value: int, size: int, signed: bool, writer: io.IOBase):
    """
    Writes an integer into the writer. It is always encoded as a big endian
    value.

    Parameters:
    - `value`: The value to be written;
    - `size`: The size of the value in bytes;
    - `signed`: A flag that determines if the encoding is signed or not;
    - `writer`: The writer;

    It may raise `OverflowError` if the `value` cannot be represented using
    the specfied size.
    """
    writer.write(value.to_bytes(size, byteorder='big', signed=signed))
Esempio n. 18
0
    def _serialize(self, out_stream: io.IOBase, data: list):
        if not data:
            return

        encoding = "utf-8"
        delimeter = '\t'

        out_stream.write("value{d}signer{d}sign\n".format(d=delimeter).encode(encoding))

        for row in data:
            out_stream.write("{value}{d}{signer}{d}{sign}\n".format(
                d=delimeter,
                value=json.dumps(row.data),
                signer=str(base64.b64encode(row.signer), encoding='utf-8') if row.signer else "",
                sign=str(base64.b64encode(row.sign), encoding='utf-8') if row.sign else ""
            ).encode(encoding))
Esempio n. 19
0
File: bsv.py Progetto: slham/bsv
def dump(f: io.IOBase, xss: List[List[Union[bytes, int, float]]]) -> None:
    buffer = io.BytesIO()
    for xs in xss:
        # write max index
        assert _sizeof[_uint16] == buffer.write(
            struct.pack(_uint16,
                        len(xs) - 1))
        # write types
        for x in xs:
            if isinstance(x, bytes):
                x = _BSV_CHAR
            elif isinstance(x, int):
                x = _BSV_INT
            elif isinstance(x, float):
                x = _BSV_FLOAT
            else:
                assert False
            assert _sizeof[_uint8] == buffer.write(struct.pack(_uint8, x))
        # write sizes
        for x in xs:
            if isinstance(x, bytes):
                x = len(x)
            elif isinstance(x, int):
                x = _sizeof[_bsv_int]
            elif isinstance(x, float):
                x = _sizeof[_bsv_float]
            else:
                assert False
            assert _sizeof[_uint16] == buffer.write(struct.pack(_uint16, x))
        # write vals
        for x in xs:
            if isinstance(x, bytes):
                pass
            elif isinstance(x, int):
                x = struct.pack(_bsv_int, x)
            elif isinstance(x, float):
                x = struct.pack(_bsv_float, x)
            else:
                assert False
            assert len(x) == buffer.write(x)
            assert 1 == buffer.write(b'\0')
    assert _sizeof[_int32] == f.write(
        struct.pack(_int32, len(buffer.getvalue())))
    assert len(
        buffer.getvalue()
    ) < _buffer_size, f'you cant dump more than {_buffer_size} bytes at a time'
    assert len(buffer.getvalue()) == f.write(buffer.getvalue())
Esempio n. 20
0
def write_chunk(bs: io.IOBase, magic: bytes, body: bytes):
    body_size = len(body)
    body_size_padding = get_padding_size(body_size)
    body_size += body_size_padding

    bs.write(int.to_bytes(body_size, length=4, byteorder='little'))
    bs.write(magic)
    bs.write(body)
    for _ in range(body_size_padding):
        bs.write(b' ')
Esempio n. 21
0
def _write(f: io.IOBase, param: str):
    try:
        write_str = param.encode()
        written_bytes_len = f.write(write_str)
    except OSError:
        print("cloud not seek file.")
        return os.EX_IOERR
    else:
        print(f"wrote {written_bytes_len} bytes.")
Esempio n. 22
0
def streamcopy(from_: IOBase, to_: IOBase, size=-1, chunk_size=163840):
    '''Copies content from one buffer to the other,chunk by chunk

    NOTE: Nor from_ or to_ has to be IOBase objects,it's doable as long as they have read() / write() calls
    
    Args:
        from_ (IOBase): Stream to copy from
        to_ (IOBase): Stream to copy to
        size (int, optional): Length to be copied. Defaults to -1.
        chunk_size (int, optional): Size of chunk. Defaults to 163840.

    Returns:
        int : copied length
    '''
    if not size: return 0
    size, copied = int(size), 0
    if size < 0:
        # read until EOF
        def copychunk():
            chunk = from_.read(chunk_size)
            if not chunk: return 0
            to_.write(chunk)
            return len(chunk)

        while (True):
            copied_ = copychunk()
            if not copied_: break
            copied += copied_
    else:
        # read `size` of bytes
        for offset in range(0, size, chunk_size):
            remaining = size - offset
            chunk = from_.read(
                remaining if remaining < chunk_size else chunk_size)
            if not chunk: break
            copied += len(chunk)
            to_.write(chunk)
    return copied
Esempio n. 23
0
def pretty_print(
    xml: Union[str, List[Union[Element, str]], Element],
    file: IOBase = sys.stdout,
):
    """Prints beautiful XML-Code

    This function gets a string containing the xml, an object of
    List[lxml.etree.Element] or directly a lxml element.

    Print it with good readable format.

    Arguments:
        xml (str, List[lxml.etree.Element] or lxml.etree.Element):
            xml as string,
            List[lxml.etree.Element] or directly a lxml element.
        file:
            A IOBase type. Can be a File, StringIO, ...

    """
    if not isinstance(file, IOBase):
        raise TypeError(
            f"Type needs to be from IOBase, not {type(file)}.") from None

    if isinstance(xml, list):
        for item in xml:
            if isxmlelement(item):
                file.write(
                    xmltostring(
                        item,
                        pretty_print=True).decode(sys.getdefaultencoding() +
                                                  "\n"))
            else:
                file.write(item + "\n")
    elif isxmlelement(xml):
        file.write(
            xmltostring(xml,
                        pretty_print=True).decode(sys.getdefaultencoding() +
                                                  "\n"))
    elif isinstance(xml, str):
        tree = secET.fromstring(xml)
        file.write(
            xmltostring(tree,
                        pretty_print=True).decode(sys.getdefaultencoding() +
                                                  "\n"))
    else:
        raise InvalidArgumentType(function=pretty_print.__name__,
                                  argument="xml",
                                  arg_type="")
Esempio n. 24
0
    def _log_optimizer(self, f: IOBase):
        opt_name = self.optimizer.__class__.__name__
        f.writelines("Optimizer: {}\n".format(opt_name))
        if opt_name == "Adam":
            f.writelines([
                "lr: {}\n".format(K.eval(self.optimizer.lr)),
                "beta_1: {}\n".format(K.eval(self.optimizer.beta_1)),
                "beta_2: {}\n".format(K.eval(self.optimizer.beta_2)),
                "epsilon: {}\n".format(self.optimizer.epsilon),
                "decay: {}\n".format(K.eval(self.optimizer.decay))
            ])
        elif opt_name == "SGD":
            f.writelines([
                "lr: {}\n".format(K.eval(self.optimizer.lr)),
                "momentum: {}\n".format(K.eval(self.optimizer.momentum)),
                "decay: {}\n".format(K.eval(self.optimizer.decay)),
                "initial_decay: {}\n".format(self.optimizer.initial_decay),
                "nesterov: {}\n".format(self.optimizer.nesterov)
            ])

        f.write(
            "_________________________________________________________________\n\n"
        )
Esempio n. 25
0
    def _log_noise(self, f: IOBase):
        if isinstance(self.sigma, dict):
            # Attribute sigma should not be a dict unless using the FuelCell dataset
            assert list(self.sigma.keys()) == ['A', 'B', 'C', 'D', 'E']

        f.write("Noise2Noise: {}\n".format(self.noise2noise))
        f.write("Noise level: {}\n".format(self.sigma))
        f.write(
            "_________________________________________________________________\n\n"
        )
Esempio n. 26
0
def collapse(barcode_distance: int,
             barcode_mask: str,
             verbose: bool = False,
             inStream: io.IOBase = stdin,
             outStream: io.IOBase = stdout,
             logStream: io.IOBase = stderr):
    """
    Collapse reads sharing the same start position, forward/reverse, and barcode into a single family record.
    :param input: A file or stream handle to read input data
    :param output: A file or stream handle to output data
    :param barcode_distance: The maximum number of differences allowed to be considered the same family
    :param barcode_mask: String mask to denote the positions within a barcode to include in the family name
    :param verbose: Provide verbose output while processing
    :param logStream: The stream to write log output to, defaults to stderr
    :return: None
    """
    if verbose:
        logStream.write("\n")  # This will be deleted by the next write

    inFile = AlignmentFile(inStream)
    outFile = AlignmentFile(
        outStream,
        "wbu" if hasattr(outStream, 'name') else "wb",
        template=inFile)  # compress the data if it is a file
    families = Families(
        barcode_mask * 2, barcode_distance
    )  # reads have mate barcode concatenated so double up mask # TODO multiply distance by 2?
    count = 0
    fcount = 0
    lastFCount = 0
    lastFamilyPos = 0
    for family in CoordinateSortedInputFamilyIterator(
            inFile, families):  #type: FamilyRecord
        outFile.write(family.toPysam())
        count += len(family)
        fcount += 1
        if verbose and lastFamilyPos != family.pos:
            logStream.write(
                "\x1b[F\x1b[2K\r{file}\tRecords processed: {count}\t"
                "Positions buffered: {pos}\tFamily records: {fcount}\t"
                "Families at last position: {lastFCount}\n".format(
                    file=inStream.name
                    if hasattr(inStream, 'name') else 'Streaming',
                    count=count,
                    pos=len(families),
                    fcount=fcount,
                    lastFCount=fcount - lastFCount))
            lastFamilyPos = family.pos
            lastFCount = fcount

    if verbose:
        logStream.write("Completed.\n")
    inFile.close()
    outFile.close()
    outStream.close()
Esempio n. 27
0
def pretty_print(xml, file: IOBase = sys.stdout):
    """Prints beautiful XML-Code

    This function gets a string containing the xml, an object of
    List[lxml.etree.Element] or directly a lxml element.

    Print it with good readable format.

    Arguments:
        xml (str, List[lxml.etree.Element] or lxml.etree.Element):
            xml as string,
            List[lxml.etree.Element] or directly a lxml element.
        file:
            A IOBase type. Can be a File, StringIO, ...

    """
    if not isinstance(file, IOBase):
        raise TypeError(
            'Type needs to be from IOBase, not {}.'.format(type(file))
        ) from None

    if isinstance(xml, list):
        for item in xml:
            if etree.iselement(item):
                file.write(
                    etree.tostring(item, pretty_print=True).decode(
                        sys.getdefaultencoding() + '\n'
                    )
                )
            else:
                file.write(item + '\n')
    elif etree.iselement(xml):
        file.write(
            etree.tostring(xml, pretty_print=True).decode(
                sys.getdefaultencoding() + '\n'
            )
        )
    elif isinstance(xml, str):
        tree = secET.fromstring(xml)
        file.write(
            etree.tostring(tree, pretty_print=True).decode(
                sys.getdefaultencoding() + '\n'
            )
        )
Esempio n. 28
0
 def dump(self, obj: any, stream: IOBase):
     if not isinstance(obj, str):
         raise UnserializableTypeError('Only str type is supported '
                                       'in StringSerializer!')
     return stream.write(obj.encode('utf-8'))
Esempio n. 29
0
 def dump(self, obj: any, stream: IOBase):
     if not isinstance(obj, bytes):
         raise UnserializableTypeError('Only bytes type is supported '
                                       'in BinarySerializer!')
     return stream.write(obj)
Esempio n. 30
0
    def bench_accuracy(
        self,
        file_output: IOBase,
        tester: AccuracyRussianWord,
        image: np.ndarray,
        depth: np.ndarray,
        filename: str,
    ) -> bool:
        """
        Runs all text detection accuracy benchmarks on an image. Outputs results to csv file.

        Parameters
        ----------
        file_output: IO
            File stream to output results to.
        tester: AccuracyRussianWord
            Benchmark class to use. Initialized elsewhere to be static between image runs.
        image: np.ndarray
            The color image from the frame.
        depth: np.ndarray
            The depth image from the frame.
        filename: str
            The name of the image file.

        Returns
        -------
        bool - True if test failed at some point. False if test did not crash and ran to completion.
        """
        crash = False

        ## Run tests on the image ##

        bboxes = []
        # detect_russian_word
        if not crash:
            try:
                bboxes = tester.accuracy_detector(color_image=image,
                                                  depth_image=depth)

                # output BoundingBoxes to text file
                bbstr = [((b.__repr__()) + "\n") for b in bboxes]
                bbstr = "".join(bbstr)

                output_file_name = os.path.splitext(filename)[0] + ".txt"
                output_file_path = os.path.join(self.OUTPUT_RESULTS_DIR,
                                                output_file_name)

                if not bbstr:
                    bbstr = None

                with open(output_file_path, mode="w") as text_data_output:
                    text_data_output.write("BoundingBoxes: " + (
                        str(bbstr) if bbstr else str(None)) + "\n")
                    text_data_output.write("Tesseract Data:" + "\n")
                    for key, val in tester.text_detector.tessdata.items():
                        text_data_output.write("\t" + str(key) + ": " +
                                               str(val) + "\n")

                file_output.write("Found")
            except:
                file_output.write("Crash")
                crash = True
        file_output.write(",")

        if not crash and self.plot_text:
            plot_box(
                boxes=bboxes,
                image=image,
                waittime=0,
                save_img=self.plot_text,
                path=os.path.join(self.OUTPUT_IMGS_DIR, filename),
                quiet_output=True,
            )

        return crash
Esempio n. 31
0
def write(stream: IOBase, blob: bytearray) -> None:
    stream.write(blob)
Esempio n. 32
0
def compileRun(input:IOBase, output:IOBase = None, logger = logging.getLogger()):
    source = _compile(input, logger)
    if output is not None:
        output.write(source.decode("UTF-8"))
    return llvm.run(source).decode("UTF-8")
Esempio n. 33
0
def compile(input:IOBase, output:IOBase, logger = logging.getLogger()):
    output.write(_compile(input, logger).decode("UTF-8"))