Example #1
0
            def stream():

                buffer_ = StringIO()
                writer = csv.writer(buffer_)
                writer.writerow(headerlist)
                current_row = None

                for row in rows:
                    if not current_row or current_row[0] != row[4] or current_row[1] != row[1]:
                        if current_row:
                            writer.writerow(current_row)
                            buffer_.seek(0)
                            data = buffer_.read()
                            buffer_.seek(0)
                            buffer_.truncate()
                            yield data

                        current_row = [row[4], row[1], row[5]];
                        for i in range(0, len(varlist)):
                            current_row.append("")

                    theindex = 3 + varlist.index(row[2])
                    current_row[theindex] = row[0]
                writer.writerow(current_row)
                buffer_.seek(0)
                data = buffer_.read()
                buffer_.seek(0)
                buffer_.truncate()
                yield data
Example #2
0
class UnicodeWriter(object):
    """
    A CSV writer which will write rows to CSV file "f", which is encoded in 
    the given encoding.
    
    """

    def __init__(self, f, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, **kwds)
        self.stream = f
        self.encoder = codecs.getencoder(encoding)

    def writerow(self, row):
        self.writer.writerow(row)
        data = self.queue.getvalue()
        #data now contains the csv data in unicode
        # ... and reencode it into the target encoding
        data, length = self.encoder(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue, go to start position, then truncate
        self.queue.seek(0)
        self.queue.truncate(0)

    def writerows(self, rows):
        list(map(self.writerow, rows))

    def close(self):
        self.stream.close()
Example #3
0
def write_profiling_results(profiler: cProfile.Profile, target: str) -> None:
    """ Write profiling files to file in human readable form and as a binary
        blob for external tool use (with the extra extension '.bin').

        If the file cannot be opened or written to, a shortened form will be
        written to stdout to avoid losing the data.

        Arguments:
            profiler: the profiler instance to log results of
            target: the path of the file to store reuslts in

        Returns:
            None
    """
    stream = StringIO()
    sortby = 'tottime'
    stats = pstats.Stats(profiler, stream=stream).sort_stats(sortby)
    stats.dump_stats(target + ".bin")
    stats.print_stats(.25)  # limit to the more meaningful first 25%
    stats.print_callers(.25)
    try:
        path_to_remove = os.path.dirname(os.path.realpath(__file__)) + os.path.sep
        open(target, "w").write(stream.getvalue().replace(path_to_remove, ""))
        logging.info("Profiling report written to %s", target)
    except IOError:
        # if can't save to file, print to terminal, but only the head
        logging.debug("Couldn't open file to store profiling output")
        stream.truncate(0)
        stats.print_stats(20)  # first 20 lines only
        print(stream.getvalue())
class CsvDictsAdapter(object):
    """Provide a DataChange generator and it provides a file-like object which returns csv data"""
    def __init__(self, source_generator):
        self.source = source_generator
        self.buffer = StringIO()
        self.csv = None
        self.add_header = False

    def __iter__(self):
        return self

    def write_header(self):
        self.add_header = True

    def __next__(self):
        row = next(self.source)

        self.buffer.truncate(0)
        self.buffer.seek(0)

        if not self.csv:
            self.csv = csv.DictWriter(self.buffer,
                                      list(row.keys()),
                                      quoting=csv.QUOTE_NONNUMERIC)
            self.add_header = True
        if self.add_header:
            if hasattr(self.csv, 'writeheader'):
                self.csv.writeheader()
            else:
                self.csv.writerow(dict((fn, fn) for fn in self.csv.fieldnames))
            self.add_header = False

        self.csv.writerow(row)
        self.buffer.seek(0)
        return self.buffer.read()
Example #5
0
def need_to_install_distro(remote, role_config):
    """
    Installing kernels on rpm won't setup grub/boot into them.  This installs
    the newest kernel package and checks its version and compares against
    the running kernel (uname -r).  Similar check for deb.

    :returns: False if running the newest distro kernel. Returns the version of
              the newest if it is not running.
    """
    dist_release = remote.os.name
    package_type = remote.os.package_type
    current = get_version_of_running_kernel(remote)
    log.info("Running kernel on {node}: {version}".format(
        node=remote.shortname, version=current))
    installed_version = None
    if package_type == 'rpm':
        if dist_release in ['opensuse', 'sle']:
            install_stdout = remote.sh(
                'sudo zypper --non-interactive install kernel-default'
                )
        else:
            install_stdout = remote.sh(
                'sudo yum install -y kernel'
                )
            match = re.search(
                "Package (.*) already installed",
                install_stdout, flags=re.MULTILINE)
            if 'Nothing to do' in install_stdout:
                installed_version = match.groups()[0] if match else ''
                err_mess = StringIO()
                err_mess.truncate(0)
                remote.run(args=['echo', 'no', run.Raw('|'), 'sudo', 'yum',
                                 'reinstall', 'kernel', run.Raw('||'), 'true'],
                           stderr=err_mess)
                reinstall_stderr = err_mess.getvalue()
                err_mess.close()
                if 'Skipping the running kernel' in reinstall_stderr:
                    running_version = re.search(
                        "Skipping the running kernel: (.*)",
                        reinstall_stderr, flags=re.MULTILINE).groups()[0]
                    if installed_version == running_version:
                        log.info(
                            'Newest distro kernel already installed and running')
                        return False
                else:
                    remote.run(args=['sudo', 'yum', 'reinstall', '-y', 'kernel',
                                     run.Raw('||'), 'true'])
        newest = get_latest_image_version_rpm(remote)

    if package_type == 'deb':
        newest = get_latest_image_version_deb(remote, dist_release, role_config)

    if current in newest or current.replace('-', '_') in newest:
        log.info('Newest distro kernel installed and running')
        return False
    log.info(
        'Not newest distro kernel. Current: {cur} Expected: {new}'.format(
            cur=current, new=newest))
    return newest
Example #6
0
def log_chk(hdr, level):
    """ Utility method to check header checking / logging

    Asserts that log entry appears during ``hdr.check_fix`` for logging level
    below `level`.

    Parameters
    ----------
    hdr : instance
        Instance of header class, with methods ``copy`` and check_fix``.  The
        header has some minor error (defect) which can be detected with
        ``check_fix``.
    level : int
        Level (severity) of defect present in `hdr`.  When logging threshold is
        at or below `level`, a message appears in the default log (we test that
        happens).

    Returns
    -------
    hdrc : instance
        Header, with defect corrected.
    message : str
        Message generated in log when defect was detected.
    raiser : tuple
        Tuple of error type, callable, arguments that will raise an exception
        when then defect is detected.  Can be empty.  Check with ``if raiser !=
        (): assert_raises(*raiser)``.
    """
    str_io = StringIO()
    logger = logging.getLogger('test.logger')
    handler = logging.StreamHandler(str_io)
    logger.addHandler(handler)
    str_io.truncate(0)
    hdrc = hdr.copy()
    if level == 0:  # Should never log or raise error
        logger.setLevel(0)
        hdrc.check_fix(logger=logger, error_level=0)
        assert str_io.getvalue() == ''
        logger.removeHandler(handler)
        return hdrc, '', ()
    # Non zero defect level, test above and below threshold.
    # Set error level above defect level to prevent exception when defect
    # detected.
    e_lev = level + 1
    # Logging level above threshold, no log.
    logger.setLevel(level + 1)
    hdrc.check_fix(logger=logger, error_level=e_lev)
    assert str_io.getvalue() == ''
    # Logging level below threshold, log appears, store logged message
    logger.setLevel(level - 1)
    hdrc = hdr.copy()
    hdrc.check_fix(logger=logger, error_level=e_lev)
    assert str_io.getvalue() != ''
    message = str_io.getvalue().strip()
    logger.removeHandler(handler)
    # When error level == level, check_fix should raise an error
    hdrc2 = hdr.copy()
    raiser = (HeaderDataError, hdrc2.check_fix, logger, level)
    return hdrc, message, raiser
Example #7
0
class IterDXFWriter:
    def __init__(self, name: Filename, loader: IterDXF):
        self.name = str(name)
        self.file: BinaryIO = open(name, mode="wb")
        self.text = StringIO()
        self.entity_writer = TagWriter(self.text, loader.dxfversion)
        self.loader = loader

    def write_data(self, data: bytes):
        self.file.write(data)

    def write(self, entity: DXFGraphic):
        """Write a DXF entity from the source DXF file to the export file.

        Don't write entities from different documents than the source DXF file,
        dependencies and resources will not match, maybe it will work once, but
        not in a reliable way for different DXF documents.

        """
        # Not necessary to remove this dependencies by copying
        # them into the same document frame
        # ---------------------------------
        # remove all possible dependencies
        # entity.xdata = None
        # entity.appdata = None
        # entity.extension_dict = None
        # entity.reactors = None
        # reset text stream
        self.text.seek(0)
        self.text.truncate()

        if entity.dxf.handle is None:  # DXF R12 without handles
            self.entity_writer.write_handles = False

        entity.export_dxf(self.entity_writer)
        if entity.dxftype() == "POLYLINE":
            polyline = cast(Polyline, entity)
            for vertex in polyline.vertices:
                vertex.export_dxf(self.entity_writer)
            polyline.seqend.export_dxf(self.entity_writer)  # type: ignore
        elif entity.dxftype() == "INSERT":
            insert = cast(Insert, entity)
            if insert.attribs_follow:
                for attrib in insert.attribs:
                    attrib.export_dxf(self.entity_writer)
                insert.seqend.export_dxf(self.entity_writer)  # type: ignore
        data = self.text.getvalue().encode(self.loader.encoding)
        self.file.write(data)

    def close(self):
        """Safe closing of exported DXF file. Copying of OBJECTS section
        happens only at closing the file, without closing the new DXF file is
        invalid.
        """
        self.file.write(b"  0\r\nENDSEC\r\n")  # for ENTITIES section
        if self.loader.dxfversion > "AC1009":
            self.loader.copy_objects_section(self.file)
        self.file.write(b"  0\r\nEOF\r\n")
        self.file.close()
def concat4(lst, num_times):
    ret = StringIO( )
    for i in range(num_times):
        ret.truncate(0)
        ret.seek(0)
        for w in lst:
            ret.write(w)
    return ret
Example #9
0
 def generate():
     data = StringIO()
     w = csv.writer(data)
     for row in csv_list:
         w.writerow(row)
         yield data.getvalue()
         data.seek(0)
         data.truncate(0)
Example #10
0
class PyConsole:
    def __init__(self, dict, exaile):
        self.dict = dict
        self.buffer = StringIO()

        ui = Gtk.Builder()
        ui.add_from_file(
            os.path.join(
                os.path.dirname(os.path.realpath(__file__)), 'console_window.ui'
            )
        )

        self.window = ui.get_object('simple_console_window')
        self.close_handler = self.window.connect(
            'delete-event', console_destroyed, exaile
        )

        self.text_view = tv = ui.get_object('console_output')

        self.text_buffer = buff = tv.get_buffer()
        self.end_mark = buff.create_mark(None, buff.get_end_iter(), False)

        self.entry = entry = ui.get_object('console_input')
        entry.connect('activate', self.entry_activated)
        entry.grab_focus()

    def entry_activated(self, entry, user_data=None):
        """
            Called when the user presses Return on the GtkEntry.
        """
        self.execute(entry.get_text())
        entry.select_region(0, -1)

    def execute(self, code):
        """
            Executes some Python code.
        """
        stdout = sys.stdout
        try:
            pycode = compile(code, '<console>', 'single')
            sys.stdout = self.buffer
            exec(pycode, self.dict)
        except Exception:
            sys.stdout = stdout
            exc = traceback.format_exception(*sys.exc_info())
            del exc[1]  # Remove our function.
            result = ''.join(exc)
        else:
            sys.stdout = stdout
            result = self.buffer.getvalue()
            # Can't simply close and recreate later because help() stores and
            # reuses stdout.
            self.buffer.truncate(0)
        result = '>>> %s\n%s' % (code, result)
        self.text_buffer.insert(self.text_buffer.get_end_iter(), result)
        # Can't use iter; won't scroll correctly.
        self.text_view.scroll_to_mark(self.end_mark, 0, False, 0.5, 0.5)
        self.entry.grab_focus()
Example #11
0
class StdErrWrapper(object):
    """
    Fake file-like stream object that redirects stderr to a logger instance.
    """
    def __init__(self, logger, log_level=logging.ERROR):
        self.__logger = logger
        self.__log_level = log_level
        self.__buffer = StringIO()

        if sys.version_info[0] >= 3:

            def __write(_buffer):
                """
                Write the given buffer to the temporary buffer.
                """
                self.__buffer.write(_buffer)
        else:

            def __write(_buffer):
                """
                Write the given buffer to log.
                """
                _buffer = _buffer.decode('UTF-8')
                self.__buffer.write(_buffer)

                if _buffer == '\n':
                    self.flush()

        self.write = __write

    def update_log_level(self, log_level=logging.ERROR):
        """
        Update the logging level of this stream.
        """
        self.__log_level = log_level

    @staticmethod
    def __filter_record(record):
        msg = record.msg.strip()
        msg = msg.splitlines()[-1]
        msg = msg.split(': ')[1:]
        record.msg = ''.join(msg) + '\n' + record.msg
        return record

    def flush(self):
        """
        Flush the buffer, if applicable.
        """
        if self.__buffer.tell() > 0:
            # Write the buffer to log
            # noinspection PyProtectedMember
            self.__logger._log(level=self.__log_level,
                               msg=self.__buffer.getvalue().strip(),
                               record_filter=StdErrWrapper.__filter_record)
            # Remove the old buffer
            self.__buffer.truncate(0)
            self.__buffer.seek(0)
class prefix(object):
    def __init__(self):
        self.word_list = []
        self.m = 0
        self.n = 0
        self.buffer = StringIO()

    def get_wordlist(self):
        sys.stdout.write('Input:\n')
        line = input()
        self.n, self.m = (int(x) for x in line.split())
        for i in range(self.n):
            raw_word = input()
            if raw_word == '\n':
                break
            if len(raw_word.split()) == 2:
                act, word = (x for x in raw_word.split())
            if len(raw_word.split()) == 1:
                act = raw_word.split()[0]
                print(act)

            if act == '1':
                self.buffer.seek(0, 2)
                self.buffer.write(word)
                print(self.buffer.getvalue())
            elif act == '2':
                self.buffer.seek(0, 2)
                temp = self.buffer.getvalue()
                self.buffer.truncate(0)
                self.buffer.write(temp[:-1])
                print(self.buffer.getvalue())
            elif act == '3':
                self.word_list.append(self.buffer.getvalue())
                print(self.word_list)
            else:
                break
        self.buffer.close()

    def find_prefix(self, prefix):
        frequency = 0
        sub_list = []
        for i in self.word_list:
            if i.startswith(prefix):
                sub_list.append(i)
                frequency = frequency + 1
        return frequency, sub_list

    def search_prefix(self):
        sys.stdout.write('Search:\n')
        for i in range(self.m):
            prefix = input()
            if prefix == '\n':
                break
            frequency, sub_list = self.find_prefix(prefix)
            print("{", end="")
            print(*sub_list, sep=",", end="")
            print("}", "total:", frequency)
Example #13
0
    def parse_attr(self, vcard):
        buf = StringIO()
        for line in self.vcard:
            key, sattrs, kwattrs, value = self.splitattrs(line.strip())

            # There is probably a better place to put this, but w/e
            # Basically, if we come across an "item", we will catch it
            # and store it
            # for later parsing, after the file is fully parsed
            if key.startswith('item'):
                item, key = key.split('.')
                self.buff[item].append({key: value})
                # Continue on to the next line
                continue

            # When the encoding type is quoted-printable, this means
            # That there will be a line cont. sequence in the line
            # So here, we check for the encoding type, and merge any
            # Line that ends in "=0D=0A=", with the next one, until
            # The sequence is not found. "OD" and "OA" are the carriage return
            # and line feed character in hexadecimal.

            if kwattrs.get('encoding') == 'quoted-printable':
                buf.write(value)
                while True:

                    next_line = self.vcard.readline().strip()
                    buf.write(next_line)
                    if not next_line.endswith("=0D=0A="):
                        value = buf.getvalue().replace("=0D=0A=", "")
                        buf.seek(0)
                        buf.truncate()
                        break
            # yield key, sattrs, kwattrs, value

            # Check if its a b64 encoded value, "b" in vcard 3
            if kwattrs.get('encoding') in ('base64', 'b'):
                # Yes? write the line to a buffer,
                # since its probably more than one line
                buf.write(value)
                # Keep iterating through each line until we get what we want
                while True:
                    next_line = self.vcard.readline().replace("\n", "")

                    # Check if the line starts with a space, or an indention.
                    if next_line.startswith(" "):
                        buf.write(next_line)

                    else:
                        # set the value of the field to the b64
                        value = buf.getvalue()
                        # Return the file to its original state before we
                        # Started f*****g around with it
                        buf.seek(0)
                        buf.truncate()
                        break
            yield key, sattrs, kwattrs, value
Example #14
0
    def write_a3m_fasta(self,
                        fp,
                        queryseq,
                        remove_gap=False,
                        combine_hsps=False):
        '''
		Write A3M fasta strings into fp.
		Note that the remove_gap sets to be True automatically
		when combine_hsps is True.
		This limitation is due to save some computation rather than
		biologically motivated.
		'''

        if combine_hsps:
            remove_gap = True

        if verbose:
            print("#################writing a3m fasta##############")
            print(self.protein_name)

        #save fasta records into a temporary
        #output buffer to combine
        outstream = StringIO()
        for hsp in self.hsps:
            hsp.write_a3m_fasta(outstream,
                                self.protein_name,
                                queryseq,
                                remove_gap=remove_gap)

        if verbose:
            print("written fastas:")
            print(outstream.getvalue())

        #rewind
        outstream.seek(0)

        if combine_hsps and len(self.hsps) > 1:
            #read fasta records in self.hsps
            fastas = parse_a3m(outstream)
            #prepare to reuse outstream
            outstream.seek(0)
            outstream.truncate()
            #combining
            #assuming remove_gap is True.
            combined_fasta = FASTA()
            combined_fasta.header = fastas[0].header
            for i in range(len(fastas[0].sequence)):
                for j in range(len(self.hsps)):
                    if fastas[j].sequence[i].isalpha():
                        outstream.write(fastas[j].sequence[i])
                        break
                else:
                    outstream.write('-')
            combined_fasta.sequence = outsteam.getvalue()
        else:
            fp.write(outstream.getvalue())
Example #15
0
 def generate():
     _form = request.form.to_dict()
     for assignment in _form:
         print(assignment)
     output = []
     data = StringIO()
     writer = csv.writer(data)
     writer.writerow(('WorkerId', 'Bonus'))
     yield data.getvalue()
     data.seek(0)
     data.truncate(0)
Example #16
0
 def __str__(self):
     sb = StringIO()
     try:
         for item in self:
             sb.write(item)
             sb.write(", ")
         if sb.len >= 2:
             sb.truncate(sb.len - 2)
         return sb.getvalue()
     finally:
         sb.close()
Example #17
0
 def test_run_3(self):
     input_stream = StringIO(
         'John Doe\nMichael Doe\n2\n6\n2\n8\n5\n4\n7\n3\n1\n9\n')
     output_stream = StringIO()
     game = Game(input_stream=input_stream, output_stream=output_stream)
     output_stream.truncate(0)
     output_stream.seek(0)
     game.run()
     with open(join(dirname(__file__), 'sample2.txt')) as file:
         sample = file.read()
     self.assertEqual(output_stream.getvalue(), sample)
Example #18
0
def test_ini_print_action():
    parser = ArgumentParser()
    ss = StringIO()
    parser.add_argument(
        "--print",
        action=pargparse.ParameterizedIniPrintAction,
        out_stream=ss,
        type=ParamsA,
    )
    parser.parse_args([])
    with pytest.raises(SystemExit):
        parser.parse_args(["--print"])
    ss.seek(0)
    assert (ss.read().strip() == """\
# == Help ==
# [ParamsA]
# bongo: A JSON object


[ParamsA]
bango
bingo
bongo""")
    ss.seek(0)
    ss.truncate()
    parser = ArgumentParser()
    parameterized = OrderedDict()
    parameterized["params_a"] = ParamsA(bongo=["I", "am", "a", "bongo"])
    parameterized["params_b"] = ParamsB(list_=["I", "am", "comma-d"],
                                        object_selector="2")
    parser.add_argument(
        "--print",
        action=pargparse.ParameterizedIniPrintAction,
        out_stream=ss,
        parameterized=parameterized,
        only={
            "params_a": {"bongo"},
            "params_b": {"list_"}
        },
        include_help=False,
        serializer_name_dict={"params_b": {
            "list_": CommaListSerializer()
        }},
    )
    with pytest.raises(SystemExit):
        parser.parse_args(["--print"])
    ss.seek(0)
    assert (ss.read().strip() == """\
[params_a]
bongo = ["I", "am", "a", "bongo"]

[params_b]
list_ = I,am,comma-d""")
Example #19
0
def play(board, players, withUI):
    totalTime = [0, 0]  # total real time for each player
    nextplayer = 0
    nextplayercolor = board._BLACK
    nbmoves = 1

    outputs = ["", ""]
    sysstdout = sys.stdout
    stringio = StringIO()

    # # print(b.legal_moves())
    while not board.is_game_over():

        # # print("Referee Board:")
        # # print(board)
        # # print("Before move", nbmoves)
        # # print("Legal Moves: ", b.legal_moves())

        nbmoves += 1
        otherplayer = (nextplayer + 1) % 2
        othercolor = board._BLACK if nextplayercolor == board._WHITE else board._WHITE

        currentTime = time.time()
        # sys.stdout = stringio
        move = players[nextplayer].getPlayerMove()
        # sys.stdout = sysstdout
        playeroutput = "\r" + stringio.getvalue()
        stringio.truncate(0)

        # # print(("[Player "+str(nextplayer) + "] ").join(playeroutput.splitlines(True)))
        outputs[nextplayer] += playeroutput
        totalTime[nextplayer] += time.time() - currentTime
        # # print("Player ", nextplayercolor, players[nextplayer].getPlayerName(), "plays" + str(move))
        (x, y) = move

        if not board.is_valid_move(nextplayercolor, x, y):
            # print(otherplayer, nextplayer, nextplayercolor)
            # print("Problem: illegal move")
            break

        board.push([nextplayercolor, x, y])

        if withUI:
            update(board, board.get_board_size())

        players[otherplayer].playOpponentMove(x, y)

        nextplayer = otherplayer
        nextplayercolor = othercolor

        # # print(board)

    return totalTime
Example #20
0
def digitize_pdf(file_path):
    file_data = []
    _buffer = StringIO()
    data = parser.from_file(file_path, xmlContent=True)
    xhtml_data = BeautifulSoup(data['content'], features="lxml")
    for page, content in enumerate(xhtml_data.find_all('div', attrs={'class': 'page'})):
        #print('Parsing page {} of pdf file...'.format(page+1))
        _buffer.write(str(content))
        parsed_content = parser.from_buffer(_buffer.getvalue())
        _buffer.truncate()
        file_data.append({'id': str(page+1), 'content': parsed_content['content']})
    return file_data
Example #21
0
class ArgsToCsv:
    def __init__(self, separator=','):
        self.separator = separator
        self.buffer = StringIO()
        self.writer = csv.writer(self.buffer)

    def stringify(self, *args):
        self.writer.writerow(args)
        value = self.buffer.getvalue().strip('\r\n')
        self.buffer.truncate(0)
        self.buffer.seek(0)
        return value
Example #22
0
 def csv_iter(iterable):
     """
     csv module can't be used as a generator, so we have to write each
     row to a temp buffer then yield it.
     """
     out_fd = StringIO()
     w = csv.writer(out_fd)
     for row in iterable:
         w.writerow(row)
         yield out_fd.getvalue()
         out_fd.truncate(0)
         out_fd.seek(0)
Example #23
0
class BufferHandler:
    def __init__(self):
        self._mem = StringIO()

    def handle_message(self, data: str) -> None:
        self._mem.write(data)

    def pop(self) -> str:
        data = self._mem.getvalue()
        self._mem.truncate(0)
        self._mem.seek(0)
        return data
Example #24
0
 def iter_csv(data):
     """ Stream CSV file to user for download """
     line = StringIO()
     writer = csv.writer(line)
     writer.writerow([col_1, col_2])
     for csv_line in data:
         writer.writerow(
             [str(csv_line[0][:-4]).replace('T', ' '), csv_line[1]])
         line.seek(0)
         yield line.read()
         line.truncate(0)
         line.seek(0)
Example #25
0
class IterDXFWriter:
    def __init__(self, name: str, loader: IterDXF):
        self.name = str(name)
        self.file: BinaryIO = open(name, mode='wb')
        self.text = StringIO()
        self.entity_writer = TagWriter(self.text, loader.dxfversion)
        self.loader = loader

    def write_data(self, data: bytes):
        self.file.write(data)

    def write(self, entity: DXFGraphic):
        """ Write a DXF entity from the source DXF file to the export file.

        Don't write entities from different documents than the source DXF file, dependencies and resources will not
        match, maybe it will work once, but not in a reliable way for different DXF documents.

        """
        # remove all possible dependencies
        entity.xdata = None
        entity.appdata = None
        entity.extension_dict = None
        entity.reactors = None
        # reset text stream
        self.text.seek(0)
        self.text.truncate()

        entity.export_dxf(self.entity_writer)
        if entity.dxftype() == 'POLYLINE':
            polyline = cast('Polyline', entity)
            for vertex in polyline.vertices:
                vertex.export_dxf(self.entity_writer)
            polyline.seqend.export_dxf(self.entity_writer)
        elif entity.dxftype() == 'INSERT':
            insert = cast('Insert', entity)
            if insert.attribs_follow:
                for attrib in insert.attribs:
                    attrib.export_dxf(self.entity_writer)
                insert.seqend.export_dxf(self.entity_writer)
        data = self.text.getvalue().encode(self.loader.encoding)
        self.file.write(data)

    def close(self):
        """
        Safe closing of exported DXF file. Copying of OBJECTS section happens only at closing the file,
        without closing the new DXF file is invalid.
        """
        self.file.write(b'  0\r\nENDSEC\r\n')  # for ENTITIES section
        if self.loader.dxfversion > 'AC1009':
            self.loader.copy_objects_section(self.file)
        self.file.write(b'  0\r\nEOF\r\n')
        self.file.close()
Example #26
0
 def segment(col):
     subword_nmt_output = StringIO()
     segment_char_ngrams(
         SimpleNamespace(input=infile_df[col].dropna().astype(str),
                         vocab={},
                         n=args.char_n_gram_mode,
                         output=subword_nmt_output,
                         separator=args.subword_separator))
     subword_nmt_output.seek(0)
     infile_df.loc[infile_df[col].notnull(), [col]] = np.array([
         line.rstrip(' \t\n\r') for line in subword_nmt_output
     ])[:, np.newaxis]
     subword_nmt_output.truncate(0)
Example #27
0
def _decode_gen(data, bound, mapping, encoding):
    stream = BytesIO(data)
    end = len(data)
    reader = codecs.getreader(encoding)(stream)
    output = StringIO()
    sizes = range(max(len(k) for k in mapping), bound, -1)
    while stream.tell() < end:
        do_newline = _read_single(output, data, mapping, sizes, stream, reader)
        if do_newline:
            yield output.getvalue()
            output.seek(0)
            output.truncate()
    yield output.getvalue()
Example #28
0
  def handle_syscall_parameter(
    syscall: SystemCall,
    param_buffer: StringIO
  ) -> None:
    param_text = param_buffer.getvalue().strip()

    # reset the buffer before continuing
    param_buffer.truncate(0)
    param_buffer.seek(0)

    param = parse_parameter(param_text)
    if param is not None:
      syscall.params.append(param)
Example #29
0
def test_logging():
    rep = Report(ValueError, 20, 'msg', 'fix')
    str_io = StringIO()
    logger = logging.getLogger('test.logger')
    logger.setLevel(30)  # defaultish level
    logger.addHandler(logging.StreamHandler(str_io))
    rep.log_raise(logger)
    assert_equal(str_io.getvalue(), '')
    rep.problem_level = 30
    rep.log_raise(logger)
    assert_equal(str_io.getvalue(), 'msg; fix\n')
    str_io.truncate(0)
    str_io.seek(0)
Example #30
0
 def built_text_literal(self, chunks):
     suffix = StringIO()
     out_chunks = []
     for chunk in chunks:
         if isinstance(chunk, str):
             suffix.write(chunk)
         elif isinstance(chunk, Term):
             out_chunks.append((suffix.getvalue(), chunk))
             suffix.seek(0)
             suffix.truncate()
         else:
             assert False  # pragma: no cover
     return out_chunks, suffix.getvalue()