Exemplo n.º 1
0
 def readlines(filename):
     """Read the source code."""
     try:
         with open(filename, 'rb') as f:
             (coding, lines) = tokenize.detect_encoding(f.readline)
             f = TextIOWrapper(f, coding, line_buffering=True)
             return [l.decode(coding) for l in lines] + f.readlines()
     except (LookupError, SyntaxError, UnicodeError):
         # Fall back if file encoding is improperly declared
         with open(filename, encoding='latin-1') as f:
             return f.readlines()
Exemplo n.º 2
0
def assemble(input_stream: io.TextIOWrapper,
             output_stream: io.TextIOWrapper) -> None:
    # Initializing label dictionary
    labels = dict()
    # Initializing instruction number
    instr_count = 0
    # Initializing binary instruction
    asm = []
    # First pass to detect labels
    for line in input_stream.readlines():
        # Sanitize the line
        line = line.split('#')[0].strip()
        # Skip empty lines
        if len(line) == 0:
            continue
        # Checking for labels
        line = line.split(':')
        if len(line) == 1:
            instr_count += 1
        elif len(line) == 2:
            labels[line[0]] = instr_count
            if len(line[1]) > 0:
                instr_count += 1
    # Resetting instruction count
    instr_count = 0
    # Second pass to assemble the code
    input_stream.seek(0, 0)
    for line in input_stream.readlines():
        # Sanitize the line
        line = line.split('#')[0].strip()
        # Skip empty lines
        if len(line) == 0:
            continue
        # Checking for labels
        line = line.split(':')
        # Check if there is no label
        if len(line) == 1:
            instr_count, asm = asm_to_bin(line[0], instr_count, labels)
        # Check if a label exists
        elif len(line) == 2:
            # Check if the line has an instruction
            if len(line[1]) > 0:
                instr_count, asm = asm_to_bin(line[1], instr_count, labels)
            else:
                continue
        # Writing to output stream
        for bin_instr in asm:
            output_stream.write(bin_instr + '\n')
Exemplo n.º 3
0
def generate_code(infile: io.TextIOWrapper) -> Iterator[str]:
    lines = [line.strip() for line in infile.readlines() if line]
    for line in lines:
        if (matched := PYTHON.match(line)):
            field = matched.group(1)
            yield (f"{field}=source.{field},")
        elif (matched := GO.match(line)):
def main(true_file: TextIOWrapper, prediction_files: List[TextIOWrapper],
         task: Task):
    """Evaluate predictions against a reference file in the given evaluation scenario

    :param true_file: the reference file
    :param prediction_files: a list of predictions files to be evaluated
    :param task: the evaluation scenario
    """
    y_true = get_labels(true_file.readlines(), task)
    labels = list(set(y_true) - {'O', False})
    results = []
    for prediction_file in prediction_files:
        y_pred = get_labels(prediction_file.readlines(), task)
        if len(y_true) == len(y_pred):
            ps, rs, f1s, _ = precision_recall_fscore_support(y_true,
                                                             y_pred,
                                                             labels=labels,
                                                             average=None)
            results.append(
                list(num for p, r, f in zip(ps, rs, f1s) for num in (p, r, f)))
        else:
            results.append([float('NaN')] * len(labels) * 3)
    index = [prediction_file.name for prediction_file in prediction_files]
    columns = pd.MultiIndex.from_product([labels, ['P', 'R', 'F1']])
    df = pd.DataFrame(results, index=index, columns=columns)
    with pd.option_context('display.max_rows', None, 'display.max_columns',
                           None):
        print(df)
Exemplo n.º 5
0
def generate_domain_list(pattern_str: str, wordlist_file: TextIOWrapper,
                         skip_files: list, chunk: int, chunks: int, l: Logger):
    if pattern_str is not None:
        d = pattern.generate_candiates(pattern_str)
    else:
        d = [l.rstrip('\n') for l in wordlist_file.readlines()]
        wordlist_file.close()

    l.log('{} domains to test (initially)'.format(len(d)))

    if skip_files is not None:
        domains_to_skip = []
        for file in skip_files:
            domains_to_skip.extend(l.rstrip('\n') for l in file.readlines())
            file.close()
        d = sorted(set(d) - set(domains_to_skip))

    if chunk is not None and chunks is not None:
        # find the chunk of the list
        l.log("Take chunk {} of {}".format(chunk, chunks))
        d = chunking.get(d, chunk, chunks)

    length = len(d)

    l.log('{} domains to test (after skipping and chunking)'.format(length))

    return d, length
Exemplo n.º 6
0
    def post(self, simulationId):

        parse = reqparse.RequestParser()
        parse.add_argument('file',
                           type=FileStorage,
                           location='files',
                           required=True,
                           help='Arquivo de Entrada')

        args = parse.parse_args()
        output = list()

        stream = args['file'].stream
        bytesData = stream.read()

        self.simManager = get_simulation(simulationId)

        f = TextIOWrapper(BytesIO(bytesData), 'utf-8')
        for line in f.readlines():
            command = line.rstrip()
            print(command, flush=True, file=sys.stdout)
            output.append('>> Processando o comando: {}. <<'.format(command))
            output.append(self.processCommand(command))

        return {'output': output}, 201
Exemplo n.º 7
0
    def read_entries(file: io.TextIOWrapper) -> List[str]:
        """
        Returns the lines of a text file.

        :param file: the file.
        :return: the lines of the file.
        """
        return file.readlines()
Exemplo n.º 8
0
def cert_trust(input_file: TextIOWrapper) -> None:
    # todo require user to provide all acceptable uids as parameters
    for key in _parse_keys_from_list_of_lines(input_file.readlines()):
        if not key.is_public and key.is_protected:
            raise CustomException(
                "Encrypted/Protected keys are not supported. Remove protection before importing."
            )
        API.cert_import_certification_key(key)
Exemplo n.º 9
0
def calc_feature_vector_size(file: io.TextIOWrapper) -> int:
    """
    Calculate the feature vector size by reading the training data of RankLib
    :param file: RankLib training data
    :return: 
    """
    lines = file.readlines()
    row = lines[0].split()
    return len(row[2:-1])
def parse_properties(reader):
    result = dict()
    reader.seek(0)
    text_reader = TextIOWrapper(reader)
    for line in text_reader.readlines():
        key, sep, value = (line.rstrip("\r\n").encode("utf-8").decode(
            "unicode_escape").partition("="))
        result[key] = value

    return result
def get_lines_from_thing_file(thing_file: str):
    data_file = BytesIO()
    data_wrapper = TextIOWrapper(
        data_file,
        encoding="iso-8859-1",
        line_buffering=True,
    )
    file_path = "RETR " + THINGS_FOLDER_NAME_TITELIVE + "/" + thing_file
    connect_to_titelive_ftp().retrbinary(file_path, data_file.write)
    data_wrapper.seek(0, 0)
    return iter(data_wrapper.readlines())
Exemplo n.º 12
0
def enqueue_output(out: TextIOWrapper, queue: Queue[Any]) -> bool:
    try:
        for line in out.readlines():
            queue.put_nowait(line)
    finally:
        if out and not out.closed:
            try:
                out.close()
                return True
            except:
                return False
Exemplo n.º 13
0
def get_reader(buffer, delimiter=settings.CSV_DELIMITER):
    """
    Tries different encoding before choosing the right reader.
    :param delimiter:
    :param buffer: string buffer of the csv file
    :return:
    """
    reader = None
    encodings = ['utf-8', 'latin-1', 'windows-1250', 'windows-1252']
    for e in encodings:
        try:
            data = BytesIO(buffer)
            fh = TextIOWrapper(data, encoding=e)
            fh.readlines()
            data.seek(0)
            reader = csv.reader(fh, delimiter=delimiter, quotechar='"')
        except UnicodeDecodeError:
            pass  # error, try another encoding
        else:
            break  # encoding was successful
    return reader
Exemplo n.º 14
0
def file_to_square(input_file: io.TextIOWrapper) -> Square:
    if input_file:
        # Read all non-empty lines from the file
        _rows = [
            line.split() for line in input_file.readlines() if line.strip()
        ]
        # Stolen from here: https://stackoverflow.com/a/642169/893211
        rows: Rows = [list(map(int, x)) for x in _rows]
    else:
        # Some default value...
        rows = [[1, 2], [3, 4]]
    return Square(*rows)
Exemplo n.º 15
0
class LineList:
    def __init__(self, fileobj):

        self.io_wrapper = TextIOWrapper(fileobj, line_buffering=True)
        self.lines = []

    def __getitem__(self, item):
        # For now just assume scalar item
        while item >= len(self.lines):
            self._read_block()
        return self.lines[item]

    def _read_block(self, blocksize=100000):
        self.lines += [x.strip() for x in self.io_wrapper.readlines(blocksize)]
Exemplo n.º 16
0
def generate_object(file: TextIOWrapper, filepath: str, typealgorithm: str):
    arr = file.readlines()
    root = ET.Element('root')
    map_ = ET.SubElement(root, 'map')
    height = ET.SubElement(map_, 'height')
    height.text = arr[1].split()[1]
    width = ET.SubElement(map_, 'width')
    width.text = arr[2].split()[1]
    startx = ET.SubElement(map_, 'startx')
    starty = ET.SubElement(map_, 'starty')
    finishx = ET.SubElement(map_, 'finishx')
    finishy = ET.SubElement(map_, 'finishy')
    grid = ET.SubElement(map_, 'grid')
    tree = ET.ElementTree(root)
    freeceils = []
    for i in range(4, len(arr)):
        tmp = []
        l = arr[i].strip()
        for j in range(len(l)):
            e = l[j]
            if e == '.':
                tmp.append('0')
                freeceils.append((j, i - 4))
            else:
                tmp.append('1')
        row = ET.SubElement(grid, 'row')
        row.text = ' '.join(tmp)
    randomsh(freeceils)
    startx.text, starty.text = str(freeceils[0][0]), str(freeceils[0][1])
    randomsh(freeceils)
    finishx.text, finishy.text = str(freeceils[0][0]), str(freeceils[0][1])
    algorithm = ET.SubElement(root, 'algorithm')
    searchtype = ET.SubElement(algorithm, 'searchtype')
    searchtype.text = typealgorithm
    numberofstates = ET.SubElement(algorithm, 'numberofstates')
    numberofstates.text = "30"
    radius = ET.SubElement(algorithm, 'radius')
    radius.text = "60"
    metrictype = ET.SubElement(algorithm, 'metrictype')
    metrictype.text = "chebyshev"
    allowdiagonal = ET.SubElement(algorithm, 'allowdiagonal')
    allowdiagonal.text = "true"
    cutcorners = ET.SubElement(algorithm, 'cutcorners')
    cutcorners.text = "true"
    allowsqueeze = ET.SubElement(algorithm, 'allowsqueeze')
    allowsqueeze.text = "true"
    hweight = ET.SubElement(algorithm, 'hweight')
    hweight.text = "5"
    tree.write(filepath, pretty_print=True)
Exemplo n.º 17
0
 def load_instructions(self, in_stream: io.TextIOWrapper = sys.stdin) -> None:
   """ Loading instructions from an input stream
   :param in_stream: io.TextIOWrapper
     input steam
   :return: None
   """
   assert isinstance(in_stream, io.TextIOWrapper), \
     'Parameter type error: in_stream {}'.format(type(in_stream))
   self.instruction_memory.clear()
   for line in in_stream.readlines():
     # Sanitizing input
     line = line.split('#')[0].strip().replace(' ', '')
     if len(line) != 32:
       continue
     self.instruction_memory.append(line)
Exemplo n.º 18
0
def create_teams():
    file = TextIOWrapper(request.files["file"], encoding='utf-8')
    lis = file.readlines()

    def generate():
        try:
            from jobs import create_team
            for x in lis:
                create_team.queue(x.rsplit(","))
                yield "data: {}\n\n"
            yield "done"
        finally:
            print("CLOSED!!!!!")
            subprocess.run(["rq", "suspend"])

    return Response(generate(), status=201, mimetype='text/plain')
Exemplo n.º 19
0
def process_csv(f, user_id):
    # wrapper fn; takes a file and iterates through lines
    # calls process_line to turn line into li, then saves and updates category total

    from io import TextIOWrapper

    text_f = TextIOWrapper(f.file, encoding='utf-8')

    cols = get_cols()

    for line in text_f.readlines():

        li = process_line(line, cols, get_titles_dict(), user_id)
        if li is not None:
            li.save()
            update_category_price(li, li.category, None)
Exemplo n.º 20
0
    def parser(self, file, header_seperator=None):
        '''Load a document as a list of lines'''

        if not isinstance(file, TextIOWrapper):
            file = TextIOWrapper(file,
                                 encoding='ascii',
                                 errors='surrogateescape')
        document = file.readlines()
        document = ''.join(line.strip(' ') for line in document)
        self.rawdata = document
        self.__parseheader(header_seperator)
        self.__countlines()
        if self.lines_of_body == 0:
            print('Body text from %s is empty.' % (self.path))
            #warnings.warn(msg)
        return self
Exemplo n.º 21
0
def load_training_data(file: io.TextIOWrapper) -> Dict[int, RankLibRow]:
    """
    Create a list of ranklib objects where the feature vector is a sparse vector. It is up to
    the user to grok this sparse vector.
    :return: 
    """

    def marshall_ranklib(row: str) -> RankLibRow:
        """
        This nasty looking function just reads a feature file line and marshalls it into a 
        Ranklib object.
        :param row: 
        :return: 
        """
        target, qid, *rest = row.split()
        qid = int(qid.split(':')[-1])
        # extract the info, if one exists
        info = ''
        if rest[-1][0] == '#':
            info = rest[-1]
            info = info.replace('#', '').strip()
        # remove info item
        if info != '':
            rest = rest[:-2]
        # parse the features
        features = {}
        for pair in rest:
            feature_id, value = pair.split(':')
            v = float(value)
            if v > 0:
                features[int(feature_id)] = v
        return RankLibRow(target=target, qid=qid, features=features, info=info)

    ranklib_rows = {}

    # marshall the data into rank lib row objects
    bar = progressbar.ProgressBar()
    for line in bar(file.readlines()):
        ranklib = marshall_ranklib(line)
        if ranklib.qid not in ranklib_rows:
            ranklib_rows[ranklib.qid] = []
        ranklib_rows[ranklib.qid].append(ranklib)

    return ranklib_rows
Exemplo n.º 22
0
def fix_headerguard(filename):
    supposed = get_guard_name(filename)
    with open(filename, "r", encoding='utf-8', errors='ignore') as f:
        inlines = f.readlines()

    tmp = TextIOWrapper(BytesIO(), encoding="utf-8", errors="ignore")
    tmp.seek(0)

    guard_found = 0
    guard_name = ""
    ifstack = 0
    for line in inlines:
        if guard_found == 0:
            if line.startswith("#ifndef"):
                guard_found += 1
                guard_name = line[8:].rstrip()
                line = "#ifndef %s\n" % (supposed)
        elif guard_found == 1:
            if line.startswith("#define") and line[8:].rstrip() == guard_name:
                line = "#define %s\n" % (supposed)
                guard_found += 1
            else:
                break
        elif guard_found == 2:
            if line.startswith("#if"):
                ifstack += 1
            elif line.startswith("#endif"):
                if ifstack > 0:
                    ifstack -= 1
                else:
                    guard_found += 1
                    line = "#endif /* %s */\n" % supposed

        tmp.write(line)

    tmp.seek(0)
    if guard_found == 3:
        for line in difflib.unified_diff(inlines, tmp.readlines(),
                                         "%s" % filename, "%s" % filename):
            sys.stdout.write(line)
    else:
        print("%s: no / broken header guard" % filename, file=sys.stderr)
        return False
Exemplo n.º 23
0
 def __get_json_from_file(fh: TextIOWrapper) -> dict:
     try:
         return json.load(fh)
     except:
         try:
             fh.seek(0)
             contents = ""
             for line in fh.readlines():
                 cleanedLine = line.split("//", 1)[0]
                 if len(cleanedLine) > 0 and line.endswith(
                         "\n") and "\n" not in cleanedLine:
                     cleanedLine += "\n"
                 contents += cleanedLine
             while "/*" in contents:
                 preComment, postComment = contents.split("/*", 1)
                 contents = preComment + postComment.split("*/", 1)[1]
             return json.loads(contents)
         except Exception as e:
             print(e)
             return {}
Exemplo n.º 24
0
    def create(self, validated_data):
        """
        1. Before creating, adds additional validation on existing Report.
        2. Adds DailyData instances from CSV file after report is created.

        (1) also can be done in validate() method but this means we have to read the file twice.

        :param validated_data:
        :return: Report
        """
        with validated_data['csv_file'].open() as csv_file:
            csv_file = TextIOWrapper(csv_file.file, encoding='utf-8')
            content = csv_file.readlines()
            last_line_n = len(content) - 1
            footer = content[last_line_n]
            report_id = footer.split('report id,')[1].split(',')[0]
            # Validate existing report id.
            if Report.objects.filter(id=report_id).exists():
                raise ValidationError({'csv_file': [validation_messages.REPORT_ALREADY_EXISTS]})

            validated_data['id'] = report_id
            # Preload job groups into dict so we have group_type:id pairs
            groups_by_type = {group['group_type']: group['id'] for group in JobGroup.objects.values('id', 'group_type')}
            with transaction.atomic():
                report = super().create(validated_data)
                # Parse the CSV and create related DailyData instances
                data_list = []
                csv_reader = csv.DictReader(content[:last_line_n], delimiter=',', quotechar='|')
                for row in csv_reader:
                    ready_data = {'report': report.id}
                    for key in row.keys():
                        # Transition csv column names to db field names
                        ready_data[DailyData.CSV_TO_DB_FIELDS[key]] = row[key]
                    ready_data['job_group'] = groups_by_type[ready_data['job_group']]
                    ready_data['employee'] = {'id': ready_data['employee'], 'job_group': ready_data['job_group']}

                    data_list.append(ready_data)
                dd_serializer = DailyDataSerializer(data=data_list, many=True)
                dd_serializer.is_valid(raise_exception=True)
                dd_serializer.save()
                return report
Exemplo n.º 25
0
def scanFile(f: TextIOWrapper):
    in_doc = False
    tabs = 0
    get_method = False
    buffer: List[str] = []
    total: Dict[str, List[str]] = {}
    for line in f.readlines():
        if get_method:
            if '@' in line:
                continue
            get_method = False
            name = getName(line)

            if name == '' and len(total) == 0:
                name = '-file-start'

            total[name] = buffer
            tabs = 0
            buffer = []
            continue

        if not in_doc and '"""doc' in line:
            in_doc = True
            # count the number of whitespaces to offset all lines in docs by
            tabs = len(re.match(r'\W*', line)[0]) - 3
            continue

        if not in_doc:
            continue

        if '"""' in line:
            in_doc = False
            get_method = True
            continue

        line = line[tabs:]
        buffer.append(line)

    return total
Exemplo n.º 26
0
def readlines(f:TextIOWrapper) -> pylist:
    return f.readlines()
Exemplo n.º 27
0
def readStripLines(f: TextIOWrapper):
    return map(stripTimestamp, f.readlines())
Exemplo n.º 28
0
def parse(f: io.TextIOWrapper) -> List[str]:
    "Parse a file with read permissions as list of words"
    return list(map(str.strip, f.readlines()))
Exemplo n.º 29
0
def cert_sign(uid: List[str], input_file: TextIOWrapper,
              output_file: TextIOWrapper) -> None:
    for key in API.cert_sign_multiple(
            _parse_keys_from_list_of_lines(input_file.readlines()), uid):
        output_file.write(str(key))
Exemplo n.º 30
0
def cert_import(input_file: TextIOWrapper) -> None:
    for key in _parse_keys_from_list_of_lines(input_file.readlines()):
        API.cert_import_single(key)
Exemplo n.º 31
0
class FileObjectPosix(object):
    """
    A file-like object that operates on non-blocking files but
    provides a synchronous, cooperative interface.

    .. caution::
         This object is most effective wrapping files that can be used appropriately
         with :func:`select.select` such as sockets and pipes.

         In general, on most platforms, operations on regular files
         (e.g., ``open('/etc/hosts')``) are considered non-blocking
         already, even though they can take some time to complete as
         data is copied to the kernel and flushed to disk (this time
         is relatively bounded compared to sockets or pipes, though).
         A :func:`~os.read` or :func:`~os.write` call on such a file
         will still effectively block for some small period of time.
         Therefore, wrapping this class around a regular file is
         unlikely to make IO gevent-friendly: reading or writing large
         amounts of data could still block the event loop.

         If you'll be working with regular files and doing IO in large
         chunks, you may consider using
         :class:`~gevent.fileobject.FileObjectThread` or
         :func:`~gevent.os.tp_read` and :func:`~gevent.os.tp_write` to bypass this
         concern.

    .. note::
         Random read/write (e.g., ``mode='rwb'``) is not supported.
         For that, use :class:`io.BufferedRWPair` around two instance of this
         class.

    .. tip::
         Although this object provides a :meth:`fileno` method and
         so can itself be passed to :func:`fcntl.fcntl`, setting the
         :data:`os.O_NONBLOCK` flag will have no effect; likewise, removing
         that flag will cause this object to no longer be cooperative.
    """

    #: platform specific default for the *bufsize* parameter
    default_bufsize = io.DEFAULT_BUFFER_SIZE

    def __init__(self, fobj, mode='rb', bufsize=-1, close=True):
        """
        :keyword fobj: Either an integer fileno, or an object supporting the
            usual :meth:`socket.fileno` method. The file *will* be
            put in non-blocking mode using :func:`gevent.os.make_nonblocking`.
        :keyword str mode: The manner of access to the file, one of "rb", "rU" or "wb"
            (where the "b" or "U" can be omitted).
            If "U" is part of the mode, IO will be done on text, otherwise bytes.
        :keyword int bufsize: If given, the size of the buffer to use. The default
            value means to use a platform-specific default, and a value of 0 is translated
            to a value of 1. Other values are interpreted as for the :mod:`io` package.
            Buffering is ignored in text mode.
        """
        if isinstance(fobj, int):
            fileno = fobj
            fobj = None
        else:
            fileno = fobj.fileno()
        if not isinstance(fileno, int):
            raise TypeError('fileno must be int: %r' % fileno)

        orig_mode = mode
        mode = (mode or 'rb').replace('b', '')
        if 'U' in mode:
            self._translate = True
            mode = mode.replace('U', '')
        else:
            self._translate = False

        if len(mode) != 1 and mode not in 'rw':  # pragma: no cover
            # Python 3 builtin `open` raises a ValueError for invalid modes;
            # Python 2 ignores it. In the past, we raised an AssertionError, if __debug__ was
            # enabled (which it usually was). Match Python 3 because it makes more sense
            # and because __debug__ may not be enabled.
            # NOTE: This is preventing a mode like 'rwb' for binary random access;
            # that code was never tested and was explicitly marked as "not used"
            raise ValueError('mode can only be [rb, rU, wb], not %r' %
                             (orig_mode, ))

        self._fobj = fobj
        self._closed = False
        self._close = close

        self.fileio = GreenFileDescriptorIO(fileno, mode, closefd=close)

        if bufsize < 0 or bufsize == 1:
            bufsize = self.default_bufsize
        elif bufsize == 0:
            bufsize = 1

        if mode == 'r':
            self.io = BufferedReader(self.fileio, bufsize)
        else:
            assert mode == 'w'
            self.io = BufferedWriter(self.fileio, bufsize)
        #else: # QQQ: not used, not reachable
        #
        #    self.io = BufferedRandom(self.fileio, bufsize)

        if self._translate:
            self.io = TextIOWrapper(self.io)

    @property
    def closed(self):
        """True if the file is closed"""
        return self._closed

    def close(self):
        if self._closed:
            # make sure close() is only run once when called concurrently
            return
        self._closed = True
        try:
            self.io.close()
            self.fileio.close()
        finally:
            self._fobj = None

    def flush(self):
        self.io.flush()

    def fileno(self):
        return self.io.fileno()

    def write(self, data):
        self.io.write(data)

    def writelines(self, lines):
        self.io.writelines(lines)

    def read(self, size=-1):
        return self.io.read(size)

    def readline(self, size=-1):
        return self.io.readline(size)

    def readlines(self, sizehint=0):
        return self.io.readlines(sizehint)

    def readable(self):
        return self.io.readable()

    def writable(self):
        return self.io.writable()

    def seek(self, *args, **kwargs):
        return self.io.seek(*args, **kwargs)

    def seekable(self):
        return self.io.seekable()

    def tell(self):
        return self.io.tell()

    def truncate(self, size=None):
        return self.io.truncate(size)

    def __iter__(self):
        return self.io

    def __getattr__(self, name):
        # XXX: Should this really be _fobj, or self.io?
        # _fobj can easily be None but io never is
        return getattr(self._fobj, name)
Exemplo n.º 32
-1
class FileObjectPosix:
    default_bufsize = io.DEFAULT_BUFFER_SIZE

    def __init__(self, fobj, mode='rb', bufsize=-1, close=True):
        if isinstance(fobj, int):
            fileno = fobj
            fobj = None
        else:
            fileno = fobj.fileno()
        if not isinstance(fileno, int):
            raise TypeError('fileno must be int: %r' % fileno)

        mode = (mode or 'rb').replace('b', '')
        if 'U' in mode:
            self._translate = True
            mode = mode.replace('U', '')
        else:
            self._translate = False
        assert len(mode) == 1, 'mode can only be [rb, rU, wb]'

        self._fobj = fobj
        self._closed = False
        self._close = close

        self.fileio = GreenFileDescriptorIO(fileno, mode, closefd=close)

        if bufsize < 0:
            bufsize = self.default_bufsize
        if mode == 'r':
            if bufsize == 0:
                bufsize = 1
            elif bufsize == 1:
                bufsize = self.default_bufsize
            self.io = BufferedReader(self.fileio, bufsize)
        elif mode == 'w':
            if bufsize == 0:
                bufsize = 1
            elif bufsize == 1:
                bufsize = self.default_bufsize
            self.io = BufferedWriter(self.fileio, bufsize)
        else:
            # QQQ: not used
            self.io = BufferedRandom(self.fileio, bufsize)
        if self._translate:
            self.io = TextIOWrapper(self.io)

    @property
    def closed(self):
        """True if the file is cloed"""
        return self._closed

    def close(self):
        if self._closed:
            # make sure close() is only ran once when called concurrently
            return
        self._closed = True
        try:
            self.io.close()
            self.fileio.close()
        finally:
            self._fobj = None

    def flush(self):
        self.io.flush()

    def fileno(self):
        return self.io.fileno()

    def write(self, data):
        self.io.write(data)

    def writelines(self, list):
        self.io.writelines(list)

    def read(self, size=-1):
        return self.io.read(size)

    def readline(self, size=-1):
        return self.io.readline(size)

    def readlines(self, sizehint=0):
        return self.io.readlines(sizehint)

    def __iter__(self):
        return self.io
class FileObjectPosix(object):
    """
    A file-like object that operates on non-blocking files but
    provides a synchronous, cooperative interface.

    .. caution::
         This object is most effective wrapping files that can be used appropriately
         with :func:`select.select` such as sockets and pipes.

         In general, on most platforms, operations on regular files
         (e.g., ``open('/etc/hosts')``) are considered non-blocking
         already, even though they can take some time to complete as
         data is copied to the kernel and flushed to disk (this time
         is relatively bounded compared to sockets or pipes, though).
         A :func:`~os.read` or :func:`~os.write` call on such a file
         will still effectively block for some small period of time.
         Therefore, wrapping this class around a regular file is
         unlikely to make IO gevent-friendly: reading or writing large
         amounts of data could still block the event loop.

         If you'll be working with regular files and doing IO in large
         chunks, you may consider using
         :class:`~gevent.fileobject.FileObjectThread` or
         :func:`~gevent.os.tp_read` and :func:`~gevent.os.tp_write` to bypass this
         concern.

    .. note::
         Random read/write (e.g., ``mode='rwb'``) is not supported.
         For that, use :class:`io.BufferedRWPair` around two instance of this
         class.

    .. tip::
         Although this object provides a :meth:`fileno` method and
         so can itself be passed to :func:`fcntl.fcntl`, setting the
         :data:`os.O_NONBLOCK` flag will have no effect; however, removing
         that flag will cause this object to no longer be cooperative.

    .. versionchanged:: 1.1
       Now uses the :mod:`io` package internally. Under Python 2, previously
       used the undocumented class :class:`socket._fileobject`. This provides
       better file-like semantics (and portability to Python 3).
    """

    #: platform specific default for the *bufsize* parameter
    default_bufsize = io.DEFAULT_BUFFER_SIZE

    def __init__(self, fobj, mode='rb', bufsize=-1, close=True):
        """
        :keyword fobj: Either an integer fileno, or an object supporting the
            usual :meth:`socket.fileno` method. The file *will* be
            put in non-blocking mode using :func:`gevent.os.make_nonblocking`.
        :keyword str mode: The manner of access to the file, one of "rb", "rU" or "wb"
            (where the "b" or "U" can be omitted).
            If "U" is part of the mode, IO will be done on text, otherwise bytes.
        :keyword int bufsize: If given, the size of the buffer to use. The default
            value means to use a platform-specific default, and a value of 0 is translated
            to a value of 1. Other values are interpreted as for the :mod:`io` package.
            Buffering is ignored in text mode.
        """
        if isinstance(fobj, int):
            fileno = fobj
            fobj = None
        else:
            fileno = fobj.fileno()
        if not isinstance(fileno, int):
            raise TypeError('fileno must be int: %r' % fileno)

        orig_mode = mode
        mode = (mode or 'rb').replace('b', '')
        if 'U' in mode:
            self._translate = True
            mode = mode.replace('U', '')
        else:
            self._translate = False

        if len(mode) != 1 and mode not in 'rw': # pragma: no cover
            # Python 3 builtin `open` raises a ValueError for invalid modes;
            # Python 2 ignores it. In the past, we raised an AssertionError, if __debug__ was
            # enabled (which it usually was). Match Python 3 because it makes more sense
            # and because __debug__ may not be enabled.
            # NOTE: This is preventing a mode like 'rwb' for binary random access;
            # that code was never tested and was explicitly marked as "not used"
            raise ValueError('mode can only be [rb, rU, wb], not %r' % (orig_mode,))

        self._fobj = fobj
        self._closed = False
        self._close = close

        self.fileio = GreenFileDescriptorIO(fileno, mode, closefd=close)

        if bufsize < 0 or bufsize == 1:
            bufsize = self.default_bufsize
        elif bufsize == 0:
            bufsize = 1

        if mode == 'r':
            self.io = BufferedReader(self.fileio, bufsize)
        else:
            assert mode == 'w'
            self.io = BufferedWriter(self.fileio, bufsize)
        #else: # QQQ: not used, not reachable
        #
        #    self.io = BufferedRandom(self.fileio, bufsize)

        if self._translate:
            self.io = TextIOWrapper(self.io)

    @property
    def closed(self):
        """True if the file is closed"""
        return self._closed

    def close(self):
        if self._closed:
            # make sure close() is only run once when called concurrently
            return
        self._closed = True
        try:
            self.io.close()
            self.fileio.close()
        finally:
            self._fobj = None

    def flush(self):
        self.io.flush()

    def fileno(self):
        return self.io.fileno()

    def write(self, data):
        self.io.write(data)

    def writelines(self, lines):
        self.io.writelines(lines)

    def read(self, size=-1):
        return self.io.read(size)

    def readline(self, size=-1):
        return self.io.readline(size)

    def readlines(self, sizehint=0):
        return self.io.readlines(sizehint)

    def readable(self):
        """
        .. versionadded:: 1.1b2
        """
        return self.io.readable()

    def writable(self):
        """
        .. versionadded:: 1.1b2
        """
        return self.io.writable()

    def seek(self, *args, **kwargs):
        return self.io.seek(*args, **kwargs)

    def seekable(self):
        return self.io.seekable()

    def tell(self):
        return self.io.tell()

    def truncate(self, size=None):
        return self.io.truncate(size)

    def __iter__(self):
        return self.io

    def __getattr__(self, name):
        # XXX: Should this really be _fobj, or self.io?
        # _fobj can easily be None but io never is
        return getattr(self._fobj, name)
Exemplo n.º 34
-1
class FileObjectPosix(object):
    """
    A file-like object that operates on non-blocking files.

    .. seealso:: :func:`gevent.os.make_nonblocking`
    """
    default_bufsize = io.DEFAULT_BUFFER_SIZE

    def __init__(self, fobj, mode='rb', bufsize=-1, close=True):
        """
        :param fobj: Either an integer fileno, or an object supporting the
            usual :meth:`socket.fileno` method. The file will be
            put in non-blocking mode.
        """
        if isinstance(fobj, int):
            fileno = fobj
            fobj = None
        else:
            fileno = fobj.fileno()
        if not isinstance(fileno, int):
            raise TypeError('fileno must be int: %r' % fileno)

        mode = (mode or 'rb').replace('b', '')
        if 'U' in mode:
            self._translate = True
            mode = mode.replace('U', '')
        else:
            self._translate = False
        assert len(mode) == 1, 'mode can only be [rb, rU, wb]'

        self._fobj = fobj
        self._closed = False
        self._close = close

        self.fileio = GreenFileDescriptorIO(fileno, mode, closefd=close)

        if bufsize < 0:
            bufsize = self.default_bufsize
        if mode == 'r':
            if bufsize == 0:
                bufsize = 1
            elif bufsize == 1:
                bufsize = self.default_bufsize
            self.io = BufferedReader(self.fileio, bufsize)
        elif mode == 'w':
            if bufsize == 0:
                bufsize = 1
            elif bufsize == 1:
                bufsize = self.default_bufsize
            self.io = BufferedWriter(self.fileio, bufsize)
        else:
            # QQQ: not used
            self.io = BufferedRandom(self.fileio, bufsize)
        if self._translate:
            self.io = TextIOWrapper(self.io)

    @property
    def closed(self):
        """True if the file is cloed"""
        return self._closed

    def close(self):
        if self._closed:
            # make sure close() is only ran once when called concurrently
            return
        self._closed = True
        try:
            self.io.close()
            self.fileio.close()
        finally:
            self._fobj = None

    def flush(self):
        self.io.flush()

    def fileno(self):
        return self.io.fileno()

    def write(self, data):
        self.io.write(data)

    def writelines(self, lines):
        self.io.writelines(lines)

    def read(self, size=-1):
        return self.io.read(size)

    def readline(self, size=-1):
        return self.io.readline(size)

    def readlines(self, sizehint=0):
        return self.io.readlines(sizehint)

    def seek(self, *args, **kwargs):
        return self.io.seek(*args, **kwargs)

    def seekable(self):
        return self.io.seekable()

    def tell(self):
        return self.io.tell()

    def truncate(self, size=None):
        return self.io.truncate(size)

    def __iter__(self):
        return self.io

    def __getattr__(self, name):
        return getattr(self._fobj, name)
Exemplo n.º 35
-1
class FileObjectPosix(object):
    """
    A file-like object that operates on non-blocking files.

    .. seealso:: :func:`gevent.os.make_nonblocking`
    """
    default_bufsize = io.DEFAULT_BUFFER_SIZE

    def __init__(self, fobj, mode='rb', bufsize=-1, close=True):
        """
        :param fobj: Either an integer fileno, or an object supporting the
            usual :meth:`socket.fileno` method. The file will be
            put in non-blocking mode.
        """
        if isinstance(fobj, int):
            fileno = fobj
            fobj = None
        else:
            fileno = fobj.fileno()
        if not isinstance(fileno, int):
            raise TypeError('fileno must be int: %r' % fileno)

        orig_mode = mode
        mode = (mode or 'rb').replace('b', '')
        if 'U' in mode:
            self._translate = True
            mode = mode.replace('U', '')
        else:
            self._translate = False
        if len(mode) != 1:
            # Python 3 builtin `open` raises a ValueError for invalid modes;
            # Python 2 ignores in. In the past, we raised an AssertionError, if __debug__ was
            # enabled (which it usually was). Match Python 3 because it makes more sense
            # and because __debug__ may not be enabled
            raise ValueError('mode can only be [rb, rU, wb], not %r' % (orig_mode,))

        self._fobj = fobj
        self._closed = False
        self._close = close

        self.fileio = GreenFileDescriptorIO(fileno, mode, closefd=close)

        if bufsize < 0:
            bufsize = self.default_bufsize
        if mode == 'r':
            if bufsize == 0:
                bufsize = 1
            elif bufsize == 1:
                bufsize = self.default_bufsize
            self.io = BufferedReader(self.fileio, bufsize)
        elif mode == 'w':
            if bufsize == 0:
                bufsize = 1
            elif bufsize == 1:
                bufsize = self.default_bufsize
            self.io = BufferedWriter(self.fileio, bufsize)
        else:
            # QQQ: not used
            self.io = BufferedRandom(self.fileio, bufsize)
        if self._translate:
            self.io = TextIOWrapper(self.io)

    @property
    def closed(self):
        """True if the file is cloed"""
        return self._closed

    def close(self):
        if self._closed:
            # make sure close() is only ran once when called concurrently
            return
        self._closed = True
        try:
            self.io.close()
            self.fileio.close()
        finally:
            self._fobj = None

    def flush(self):
        self.io.flush()

    def fileno(self):
        return self.io.fileno()

    def write(self, data):
        self.io.write(data)

    def writelines(self, lines):
        self.io.writelines(lines)

    def read(self, size=-1):
        return self.io.read(size)

    def readline(self, size=-1):
        return self.io.readline(size)

    def readlines(self, sizehint=0):
        return self.io.readlines(sizehint)

    def readable(self):
        return self.io.readable()

    def writable(self):
        return self.io.writable()

    def seek(self, *args, **kwargs):
        return self.io.seek(*args, **kwargs)

    def seekable(self):
        return self.io.seekable()

    def tell(self):
        return self.io.tell()

    def truncate(self, size=None):
        return self.io.truncate(size)

    def __iter__(self):
        return self.io

    def __getattr__(self, name):
        # XXX: Should this really be _fobj, or self.io?
        # _fobj can easily be None but io never is
        return getattr(self._fobj, name)
Exemplo n.º 36
-1
class FileObjectPosix(object):
    """
    A file-like object that operates on non-blocking files but
    provides a synchronous, cooperative interface.

    .. note::
         Random read/write (e.g., ``mode='rwb'``) is not supported.
         For that, use :class:`io.BufferedRWPair` around two instance of this
         class.

    .. tip::
         Although this object provides a :meth:`fileno` method and
         so can itself be passed to :func:`fcntl.fcntl`, setting the
         :data:`os.O_NONBLOCK` flag will have no effect; likewise, removing
         that flag will cause this object to no longer be cooperative.
    """

    #: platform specific default for the *bufsize* parameter
    default_bufsize = io.DEFAULT_BUFFER_SIZE

    def __init__(self, fobj, mode='rb', bufsize=-1, close=True):
        """
        :keyword fobj: Either an integer fileno, or an object supporting the
            usual :meth:`socket.fileno` method. The file *will* be
            put in non-blocking mode using :func:`gevent.os.make_nonblocking`.
        :keyword str mode: The manner of access to the file, one of "rb", "rU" or "wb"
            (where the "b" or "U" can be omitted).
            If "U" is part of the mode, IO will be done on text, otherwise bytes.
        :keyword int bufsize: If given, the size of the buffer to use. The default
            value means to use a platform-specific default, and a value of 0 is translated
            to a value of 1. Other values are interpreted as for the :mod:`io` package.
            Buffering is ignored in text mode.
        """
        if isinstance(fobj, int):
            fileno = fobj
            fobj = None
        else:
            fileno = fobj.fileno()
        if not isinstance(fileno, int):
            raise TypeError('fileno must be int: %r' % fileno)

        orig_mode = mode
        mode = (mode or 'rb').replace('b', '')
        if 'U' in mode:
            self._translate = True
            mode = mode.replace('U', '')
        else:
            self._translate = False

        if len(mode) != 1 and mode not in 'rw': # pragma: no cover
            # Python 3 builtin `open` raises a ValueError for invalid modes;
            # Python 2 ignores it. In the past, we raised an AssertionError, if __debug__ was
            # enabled (which it usually was). Match Python 3 because it makes more sense
            # and because __debug__ may not be enabled.
            # NOTE: This is preventing a mode like 'rwb' for binary random access;
            # that code was never tested and was explicitly marked as "not used"
            raise ValueError('mode can only be [rb, rU, wb], not %r' % (orig_mode,))

        self._fobj = fobj
        self._closed = False
        self._close = close

        self.fileio = GreenFileDescriptorIO(fileno, mode, closefd=close)

        if bufsize < 0 or bufsize == 1:
            bufsize = self.default_bufsize
        elif bufsize == 0:
            bufsize = 1

        if mode == 'r':
            self.io = BufferedReader(self.fileio, bufsize)
        else:
            assert mode == 'w'
            self.io = BufferedWriter(self.fileio, bufsize)
        #else: # QQQ: not used, not reachable
        #
        #    self.io = BufferedRandom(self.fileio, bufsize)

        if self._translate:
            self.io = TextIOWrapper(self.io)

    @property
    def closed(self):
        """True if the file is cloed"""
        return self._closed

    def close(self):
        if self._closed:
            # make sure close() is only run once when called concurrently
            return
        self._closed = True
        try:
            self.io.close()
            self.fileio.close()
        finally:
            self._fobj = None

    def flush(self):
        self.io.flush()

    def fileno(self):
        return self.io.fileno()

    def write(self, data):
        self.io.write(data)

    def writelines(self, lines):
        self.io.writelines(lines)

    def read(self, size=-1):
        return self.io.read(size)

    def readline(self, size=-1):
        return self.io.readline(size)

    def readlines(self, sizehint=0):
        return self.io.readlines(sizehint)

    def readable(self):
        return self.io.readable()

    def writable(self):
        return self.io.writable()

    def seek(self, *args, **kwargs):
        return self.io.seek(*args, **kwargs)

    def seekable(self):
        return self.io.seekable()

    def tell(self):
        return self.io.tell()

    def truncate(self, size=None):
        return self.io.truncate(size)

    def __iter__(self):
        return self.io

    def __getattr__(self, name):
        # XXX: Should this really be _fobj, or self.io?
        # _fobj can easily be None but io never is
        return getattr(self._fobj, name)