class UnicodeWriter:
    """A CSV writer which will write rows to CSV file "f",
    which is encoded in the given encoding.

    The standard `csv` module isn't able to handle Unicode. We can "cheat" it.
    Firstly, we encode it into plain UTF-8 byte string and write into the
    memory buffer (`StringIO`). Then we convert created CSV data back into
    Unicode and write to the target file.
    """

    def __init__(self, f, dialect=csv.excel, encoding="utf-8"):
        self.buffer = StringIO()
        self.writer = csv.writer(self.buffer, dialect=dialect)
        self.target_stream = f

    def writerow(self, row):
        # Row elements may contain raw Unicode codepoints.
        # We must encode them into UTF-8 (unicode string -> plain byte string).
        encoded_row = [s.encode("utf-8") for s in row]
        # Write encoded row with the standard CSV writer.
        self.writer.writerow(encoded_row)
        # Valid CSV row is now in the memory. Get it ...
        data = self.buffer.getvalue()
        # and convert back into Unicode.
        data = data.decode("utf-8")
        # Now we can easily write valid CSV row in Unicode
        # into the target file.
        self.target_stream.write(data)
        # Empty the buffer.
        self.buffer.truncate(0)
Example #2
0
    def __patch_jenkins_config( self ):
        """
        A context manager that retrieves the Jenkins configuration XML, deserializes it into an
        XML ElementTree, yields the XML tree, then serializes the tree and saves it back to
        Jenkins.
        """
        config_file = StringIO( )
        if run( 'test -f ~/config.xml', quiet=True ).succeeded:
            fresh_instance = False
            get( remote_path='~/config.xml', local_path=config_file )
        else:
            # Get the in-memory config as the on-disk one may be absent on a fresh instance.
            # Luckily, a fresh instance won't have any configured security.
            fresh_instance = True
            config_url = 'http://localhost:8080/computer/(master)/config.xml'
            with hide( 'output' ):
                config_file.write( run( 'curl "%s"' % config_url ) )
        config_file.seek( 0 )
        config = ElementTree.parse( config_file )

        yield config

        config_file.truncate( 0 )
        config.write( config_file, encoding='utf-8', xml_declaration=True )
        if fresh_instance:
            self.__service_jenkins( 'stop' )
        try:
            put( local_path=config_file, remote_path='~/config.xml' )
        finally:
            if fresh_instance:
                self.__service_jenkins( 'start' )
            else:
                log.warn( 'Visit the Jenkins web UI and click Manage Jenkins - Reload '
                          'Configuration from Disk' )
Example #3
0
def test_str_round():
    # from report by Angus McMorland on mailing list 3 May 2010
    stream = StringIO()
    in_arr = np.array(['Hello', 'Foob'])
    out_arr = np.array(['Hello', 'Foob '])
    savemat_future(stream, dict(a=in_arr))
    res = loadmat(stream)
    # resulted in [u'HloolFoa', u'elWrdobr']
    yield assert_array_equal, res['a'], out_arr
    stream.truncate(0)
    # Make Fortran ordered version of string
    in_str = in_arr.tostring(order='F')
    in_from_str = np.ndarray(shape=a.shape,
                             dtype=in_arr.dtype,
                             order='F',
                             buffer=in_str)
    savemat_future(stream, dict(a=in_from_str))
    yield assert_array_equal, res['a'], out_arr
    # unicode save did lead to buffer too small error
    stream.truncate(0)
    in_arr_u = in_arr.astype('U')
    out_arr_u = out_arr.astype('U')
    savemat_future(stream, {'a': in_arr_u})
    res = loadmat(stream)
    yield assert_array_equal, res['a'], out_arr_u
Example #4
0
    def get(self, request):
        report = MeaningfulConsentReport()

        # setup zip file for the key & value file
        response = HttpResponse(content_type='application/zip')

        disposition = 'attachment; filename=meaningfulconsent.zip'
        response['Content-Disposition'] = disposition

        z = ZipFile(response, 'w')

        output = StringIO()  # temp output file
        writer = csv.writer(output)

        # report on all hierarchies
        hierarchies = Hierarchy.objects.all()

        # Key file
        for row in report.metadata(hierarchies):
            writer.writerow(row)

        z.writestr("meaningfulconsent_key.csv", output.getvalue())

        # Results file
        output.truncate(0)
        output.seek(0)

        writer = csv.writer(output)

        for row in report.values(hierarchies):
            writer.writerow(row)

        z.writestr("meaningfulconsent_values.csv", output.getvalue())

        return response
Example #5
0
def test_read_opts():
    # tests if read is seeing option sets, at initialization and after
    # initialization
    arr = np.arange(6).reshape(1, 6)
    stream = StringIO()
    savemat_future(stream, {'a': arr})
    rdr = MatFile5Reader_future(stream)
    back_dict = rdr.get_variables()
    rarr = back_dict['a']
    yield assert_array_equal, rarr, arr
    rdr = MatFile5Reader_future(stream, squeeze_me=True)
    yield assert_array_equal, rdr.get_variables()['a'], arr.reshape((6, ))
    rdr.squeeze_me = False
    yield assert_array_equal, rarr, arr
    rdr = MatFile5Reader_future(stream, byte_order=boc.native_code)
    yield assert_array_equal, rdr.get_variables()['a'], arr
    # inverted byte code leads to error on read because of swapped
    # header etc
    rdr = MatFile5Reader_future(stream, byte_order=boc.swapped_code)
    yield assert_raises, Exception, rdr.get_variables
    rdr.byte_order = boc.native_code
    yield assert_array_equal, rdr.get_variables()['a'], arr
    arr = np.array(['a string'])
    stream.truncate(0)
    savemat_future(stream, {'a': arr})
    rdr = MatFile5Reader_future(stream)
    yield assert_array_equal, rdr.get_variables()['a'], arr
    rdr = MatFile5Reader_future(stream, chars_as_strings=False)
    carr = np.atleast_2d(np.array(list(arr.item()), dtype='U1'))
    yield assert_array_equal, rdr.get_variables()['a'], carr
    rdr.chars_as_strings = True
    yield assert_array_equal, rdr.get_variables()['a'], arr
Example #6
0
class UnicodeWriter:
    """
    A CSV writer which will write rows to CSV file "f",
    which is encoded in the given encoding.
    Copied from https://docs.python.org/2/library/csv.html#examples
    """

    def __init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        self.writer.writerow([s.encode('utf-8') for s in row])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode('utf-8')
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)
Example #7
0
def get_lines(data, filename=OUTPUT_PATH):
    output = StringIO()
    text = [
        ";;; generated by: {} at {}".format(
            'csirtg-indicator',
            time.strftime('%Y-%m-%dT%H:%M:%S %Z')), ';RPZ DATA!', '$TTL    1',
        '@       IN      SOA     localhost. root.localhost. (',
        '                     {}         ; Serial'.format(int(time.time())),
        '                         604800         ; Refresh',
        '                          86400         ; Retry',
        '                        2419200         ; Expire',
        '                          86400 )       ; Negative Cache TTL', ';',
        '@       IN      NS      localhost.'
    ]

    for t in text:
        output.write(t)
        yield str(output.getvalue())

        if isinstance(output, StringIO):
            output.truncate(0)

    for i in data:
        if isinstance(i, Indicator):
            i = i.__dict__()

        if i['itype'] != 'fqdn':
            continue

        output.write('{}        CNAME .'.format(i['indicator'], filename))
        output.write('*.{}        CNAME .'.format(i['indicator'], filename))
        yield output.getvalue()

        if isinstance(output, StringIO):
            output.truncate(0)
Example #8
0
class MockLogger(object):
    def __init__(self):
        self.local = local
        self.log_file = StringIO()
        self.logger = None

    def get_logger(self, name):
        if not self.logger:
            logger = logging.getLogger(name)
            logger.setLevel(1)  # caputure everything
            handler = logging.StreamHandler(self.log_file)
            handler.setFormatter(
                logging.Formatter('%(name)s:%(levelname)s:%(message)s'))
            logger.addHandler(handler)
            self.logger = LunrLoggerAdapter(logger)
        return self.logger

    def pop_log_messages(self):
        rv = self.log_file.getvalue()
        self.log_file.seek(0)
        self.log_file.truncate()
        return rv

    def __getattribute__(self, name):
        try:
            return object.__getattribute__(self, name)
        except AttributeError:
            # prevent recursive lookup
            logger = object.__getattribute__(self, 'logger')
            if hasattr(logger, name):
                return getattr(logger, name)
            raise
Example #9
0
    def test_validate_with_obsolete_str(self):
        """
        Test the handling for the obsolete 'str' option type (it's
        'string' now). Remove support for it after a while, and take
        this test with it.
        """
        config = yaml.load(sample_configuration)
        config["options"]["title"]["type"] = "str"
        obsolete_config = yaml.dump(config)

        sio = StringIO()
        self.patch(sys, "stderr", sio)

        self.config.parse(obsolete_config)
        data = self.config.validate({"title": "Helpful Title"})
        self.assertEqual(data["title"], "Helpful Title")
        self.assertIn("obsolete 'str'", sio.getvalue())

        # Trying it again, it should not warn since we don't want
        # to pester the charm author.
        sio.truncate(0)
        self.config.parse(obsolete_config)
        data = self.config.validate({"title": "Helpful Title"})
        self.assertEqual(data["title"], "Helpful Title")
        self.assertEqual(sio.getvalue(), "")
  def test_01displayLogs(self):
    """
    Test the display of the logs according to the value of the boolean in the method.
    """
    # Enabled
    gLogger.enableLogsFromExternalLibs()

    # modify the output to capture logs of the root logger
    bufferRoot = StringIO()
    logging.getLogger().handlers[0].stream = bufferRoot

    logging.getLogger().info("message")
    logstring1 = cleaningLog(bufferRoot.getvalue())

    self.assertEqual("UTCExternalLibrary/rootINFO:message\n", logstring1)
    bufferRoot.truncate(0)

    # this is a direct child of root, as the logger in DIRAC
    logging.getLogger("sublog").info("message")
    logstring1 = cleaningLog(bufferRoot.getvalue())

    self.assertEqual("UTCExternalLibrary/sublogINFO:message\n", logstring1)
    bufferRoot.truncate(0)

    # Disabled
    gLogger.disableLogsFromExternalLibs()

    logging.getLogger().info("message")
    # this is a direct child of root, as the logger in DIRAC
    logging.getLogger("sublog").info("message")

    self.assertEqual("", bufferRoot.getvalue())
Example #11
0
class OutputLogger(object):
    def __init__(self):
        self.old_stdout = sys.stdout
        self.old_stderr = sys.stderr
        self.new_stdout = StringIO()
        self.new_stderr = StringIO()

        sys.stdout = self.new_stdout
        sys.stderr = self.new_stderr

    def __enter__(self):
        sys.stdout = self.new_stdout
        sys.stderr = self.new_stderr
        self.new_stdout.truncate(0)
        self.new_stderr.truncate(0)
        return self

    def __exit__(self, type, value, traceback):
        sys.stdout = self.old_stdout
        sys.stderr = self.old_stderr

    def getStdout(self):
        self.new_stdout.seek(0)
        return self.new_stdout.read()

    def getStderr(self):
        self.new_stderr.seek(0)
        return self.new_stderr.read()
Example #12
0
    def decrypt(self, cipherbytes):
        """Decrypts a string using AES-256."""
        fin = StringIO(cipherbytes)
        fout = StringIO()

        try:
            input_size = struct.unpack("<Q", fin.read(struct.calcsize("Q")))[0]
            iv = fin.read(16)
            aes = AES.new(self._key, AES.MODE_CBC, iv)

            while True:
                chunk = fin.read(self.chunksize)
                if len(chunk) == 0:
                    break  # done
                fout.write(aes.decrypt(chunk))

            # truncate any padded random noise
            fout.truncate(input_size)

            text = fout.getvalue()
        finally:
            fin.close()
            fout.close()

        return text
Example #13
0
    def get(self, request):
        report = TeachDentistryReport()

        # setup zip file for the key & value file
        response = HttpResponse(content_type='application/zip')

        disposition = 'attachment; filename=teachdentistry.zip'
        response['Content-Disposition'] = disposition

        z = ZipFile(response, 'w')

        output = StringIO()  # temp output file
        writer = csv.writer(output)

        # report on all hierarchies
        hierarchies = Hierarchy.objects.filter(name='main')

        # Key file
        for row in report.metadata(hierarchies):
            writer.writerow(row)

        z.writestr("teachdentistry_key.csv", output.getvalue())

        # Results file
        output.truncate(0)
        output.seek(0)

        writer = csv.writer(output)

        for row in report.values(hierarchies):
            writer.writerow(row)

        z.writestr("teachdentistry_values.csv", output.getvalue())

        return response
class AssertTestingHelper(object):
    def __init__(self,b_raise_exception=True):
        self.b_raise_exception = b_raise_exception
        
    def install_hooks(self):
        self._orig_assert_logger = assertions.Assertions.logger

        # install a logger for assertions module that catches logs in StringWriter self.output
        logger = logging.getLogger('TestAssertions')
        logger.propagate = False
        logger.setLevel(logging.WARNING) # assertions should not log below this level    

        # remove previous loggers, since we may be getting a logger from previous invocations
        for h in copy.copy(logger.handlers): # safer to copy list, since we modify it during iteration
            logger.removeHandler(h)

        self.output = StringIO()
        handler = logging.StreamHandler(self.output)
        logger.addHandler(handler)
        
        assertions.Assertions.logger = logger
        
    def uninstall_hooks(self):
        assertions.Assertions.logger = self._orig_assert_logger

    def get_output(self, b_reset=True):
        s = self.output.getvalue()
        if b_reset:
            self.output.seek(0)
            self.output.truncate()
        return s
        
Example #15
0
def test_tv_class():
    tvf = tv.TrackvisFile([])
    yield assert_equal(tvf.streamlines, [])
    yield assert_true(isinstance(tvf.header, np.ndarray))
    yield assert_equal(tvf.endianness, tv.native_code)
    yield assert_equal(tvf.filename, None)
    out_f = StringIO()
    tvf.to_file(out_f)
    yield assert_equal(out_f.getvalue(), tv.empty_header().tostring())
    out_f.truncate(0)
    # Write something not-default
    tvf = tv.TrackvisFile([], {'id_string':'TRACKb'})
    tvf.to_file(out_f)
    # read it back
    out_f.seek(0)
    tvf_back = tv.TrackvisFile.from_file(out_f)
    yield assert_equal(tvf_back.header['id_string'], 'TRACKb')
    # check that we check input values
    out_f.truncate(0)
    yield assert_raises(tv.HeaderError,
                        tv.TrackvisFile,
                        [],{'id_string':'not OK'})
    yield assert_raises(tv.HeaderError,
                        tv.TrackvisFile,
                        [],{'version': 2})
    yield assert_raises(tv.HeaderError,
                        tv.TrackvisFile,
                        [],{'hdr_size':0})
    affine = np.diag([1,2,3,1])
    affine[:3,3] = [10,11,12]
    tvf.set_affine(affine)
    yield assert_true(np.all(tvf.get_affine() == affine))
Example #16
0
def _log_chk(hdr, level):
    # utility function to check header checking / logging
    # If level == 0, this header should always be OK
    str_io = StringIO()
    logger = logging.getLogger('test.logger')
    handler = logging.StreamHandler(str_io)
    logger.addHandler(handler)
    str_io.truncate(0)
    hdrc = hdr.copy()
    if level == 0:  # Should never log or raise error
        logger.setLevel(0)
        hdrc.check_fix(logger=logger, error_level=0)
        assert_true(str_io.getvalue() == '')
        logger.removeHandler(handler)
        return hdrc, '', ()
    # Non zero level, test above and below threshold
    # Logging level above threshold, no log
    logger.setLevel(level + 1)
    e_lev = level + 1
    hdrc.check_fix(logger=logger, error_level=e_lev)
    assert_true(str_io.getvalue() == '')
    # Logging level below threshold, log appears
    logger.setLevel(level + 1)
    logger.setLevel(level - 1)
    hdrc = hdr.copy()
    hdrc.check_fix(logger=logger, error_level=e_lev)
    assert_true(str_io.getvalue() != '')
    message = str_io.getvalue().strip()
    logger.removeHandler(handler)
    hdrc2 = hdr.copy()
    raiser = (HeaderDataError, hdrc2.check_fix, logger, level)
    return hdrc, message, raiser
Example #17
0
class BufferLogger(object):
    def __init__(self, format, level):
        self.rootLogger = logging.getLogger()
        self.formatter = logging.Formatter(format)
        self.level = level

    def startCapture(self):
        self.__buffer__ = StringIO()
        self.__logHandler__ = logging.StreamHandler(self.__buffer__)
        self.__logHandler__.setFormatter(self.formatter)
        self.rootLogger.addHandler(self.__logHandler__)

        std_output = logging.StreamHandler(sys.stdout)
        self.rootLogger.addHandler(std_output)

        self.rootLogger.setLevel(self.level)

    def getLogger(self):
        return self.rootLogger

    def getBufferVal(self):
        return self.__buffer__.getvalue()

    def stopCapture(self):
        self.__logHandler__.flush()
        self.__buffer__.truncate(0)
        self.__buffer__.seek(0)

    def cleanup(self):
        self.__buffer__.close()
Example #18
0
def test_read_opts():
    # tests if read is seeing option sets, at initialization and after
    # initialization
    arr = np.arange(6).reshape(1,6)
    stream = StringIO()
    savemat(stream, {'a': arr})
    rdr = MatFile5Reader(stream)
    back_dict = rdr.get_variables()
    rarr = back_dict['a']
    yield assert_array_equal, rarr, arr
    rdr = MatFile5Reader(stream, squeeze_me=True)
    yield assert_array_equal, rdr.get_variables()['a'], arr.reshape((6,))
    rdr.squeeze_me = False
    yield assert_array_equal, rarr, arr
    rdr = MatFile5Reader(stream, byte_order=boc.native_code)
    yield assert_array_equal, rdr.get_variables()['a'], arr
    # inverted byte code leads to error on read because of swapped
    # header etc
    rdr = MatFile5Reader(stream, byte_order=boc.swapped_code)
    yield assert_raises, Exception, rdr.get_variables
    rdr.byte_order = boc.native_code
    yield assert_array_equal, rdr.get_variables()['a'], arr
    arr = np.array(['a string'])
    stream.truncate(0)
    savemat(stream, {'a': arr})
    rdr = MatFile5Reader(stream)
    yield assert_array_equal, rdr.get_variables()['a'], arr
    rdr = MatFile5Reader(stream, chars_as_strings=False)
    carr = np.atleast_2d(np.array(list(arr.item()), dtype='U1'))
    yield assert_array_equal, rdr.get_variables()['a'], carr
    rdr.chars_as_strings=True
    yield assert_array_equal, rdr.get_variables()['a'], arr
Example #19
0
class EmpyEngine(Engine):

    """Empy templating engine."""

    handle = 'empy'

    def __init__(self, template, dirname=None, **kwargs):
        """Initialize empy template."""
        super(EmpyEngine, self).__init__(**kwargs)

        if dirname is not None:
            # FIXME: This is a really bad idea, as it works like a global.
            # Blame EmPy.
            em.theSubsystem = SubsystemWrapper(basedir=dirname)

        self.output = StringIO()
        self.interpreter = em.Interpreter(output=self.output)
        self.template = template

    def apply(self, mapping):
        """Apply a mapping of name-value-pairs to a template."""
        self.output.seek(0)
        self.output.truncate(0)
        self.interpreter.string(self.template, locals=mapping)
        return self.output.getvalue()
Example #20
0
def crypt_CFB(instream, outstream, algorithm, key, register, direction):
    """'Crypt a string in cipher-feedback mode.

    :Parameters:
        - `instream`: StringIO/file incoming
        - `outstream`: StringIO/file outgoing
        - `algorithm`: integer symmetric cipher constant
        - `key`: string encryption/decryption key
        - `register`: string initialization vector (IV) to feed register
        - `direction`: string 'encrypt' or 'decrypt' setting CFB mode

    :Returns: string ciphertext or cleartext

    OpenPGP performs CFB shifts on blocks of characters the same size
    as the block used by the symmetric cipher - for example, CAST5
    works on 64-bit blocks, therefore CAST5 CFB shifts use 8 bytes at
    a time (the remaining cleartext bytes that do not completely fill
    an 8-byte block at the end of a message are XOR'ed with the
    "left-most" bytes of the encrypted mask).
    """
    ciphermod = _import_cipher(algorithm)
    cipher = ciphermod.new(key, ciphermod.MODE_ECB)
    encrypt = cipher.encrypt
    shift = ciphermod.block_size # number of bytes to process (normally 8)

    # after tweaking, there's still not much difference in speed (~2% max)
    int2str = STN.int2str
    str2int = STN.str2int
    apply_mask = lambda c,m: int2str(str2int(c) ^ str2int(m))
 
    if register is None:
        register = STN.prepad(shift) # use an IV full of 0x00

    if shift > len(register):
        raise PGPCryptoError, "CFB shift amount->(%s) can't be larger than the feedback register->(%s)." % (shift, len(register))
  
    while True:
        inblock = instream.read(shift) # block size = shift size
        chunk = StringIO()

        if inblock:
            mask = encrypt(register)
            chunk.seek(0)

            for i, c in enumerate(inblock):
                chunk.write(apply_mask(c, mask[i]))

            chunk.truncate()
            chunk.seek(0)
            outblock = chunk.read()

            if 'encrypt' == direction:
                register = outblock
            elif 'decrypt' == direction:
                register = inblock

            outstream.write(outblock)

        else:
            break
Example #21
0
def get_lines(data, cols=COLUMNS, quoting=csv.QUOTE_ALL):
    output = StringIO()
    csvWriter = csv.DictWriter(output, cols, quoting=quoting)
    csvWriter.writeheader()

    for i in data:
        if isinstance(i, Indicator):
            i = i.__dict__()

        r = dict()
        for c in cols:
            y = i.get(c, u'')

            if type(y) is list:
                y = u','.join(y)

            if PYVERSION < 3:
                r[c] = y
                if isinstance(r[c], basestring):
                    r[c] = unicode(r[c]).replace('\n', r'\\n')
                    r[c] = r[c].encode('utf-8', 'ignore')
            else:
                r[c] = y
                if isinstance(r[c], basestring):
                    r[c] = r[c].replace('\n', r'\\n')

        csvWriter.writerow(r)
        yield output.getvalue().rstrip('\r\n')

        if isinstance(output, StringIO):
            output.truncate(0)
    def test_01displayLogs(self):
        """
    Test the display of the logs according to the value of the boolean in the method.
    """
        # Enabled
        gLogger.enableLogsFromExternalLibs()

        # modify the output to capture logs of the root logger
        bufferRoot = StringIO()
        logging.getLogger().handlers[0].stream = bufferRoot

        logging.getLogger().info("message")
        logstring1 = cleaningLog(bufferRoot.getvalue())

        self.assertEqual("UTCExternalLibrary/rootINFO:message\n", logstring1)
        bufferRoot.truncate(0)

        # this is a direct child of root, as the logger in DIRAC
        logging.getLogger("sublog").info("message")
        logstring1 = cleaningLog(bufferRoot.getvalue())

        self.assertEqual("UTCExternalLibrary/sublogINFO:message\n", logstring1)
        bufferRoot.truncate(0)

        # Disabled
        gLogger.disableLogsFromExternalLibs()

        logging.getLogger().info("message")
        # this is a direct child of root, as the logger in DIRAC
        logging.getLogger("sublog").info("message")

        self.assertEqual("", bufferRoot.getvalue())
Example #23
0
    def _legacy_decrypt(self, b64_ciphertext):
        """Decrypts a string that's encoded with a SimpleAES version < 1.0.
        To convert a ciphertext to the new-style algo, use:

            aes = SimpleAES('my secret')
            aes.convert(legacy_ciphertext)
        """
        cipherbytes = base64.b64decode(b64_ciphertext)
        fin = StringIO(cipherbytes)
        fout = StringIO()

        key = hashlib.sha256(self._password).digest()
        chunksize = 64 * 1024
        try:
            input_size = struct.unpack('<Q', fin.read(struct.calcsize('Q')))[0]
            iv = fin.read(16)
            aes = AES.new(key, AES.MODE_CBC, iv)

            while True:
                chunk = fin.read(chunksize)
                if len(chunk) == 0:
                    break  # done
                fout.write(aes.decrypt(chunk))

            # truncate any padded random noise
            fout.truncate(input_size)

            text = fout.getvalue()
        finally:
            fin.close()
            fout.close()

        return text
Example #24
0
    def test_deprecated_decorator(self):
        import sys
        try:
            from StringIO import StringIO
        except ImportError:
            from io import StringIO

        saved_stderr = sys.stderr
        try:
            out = StringIO()
            sys.stderr = out
            self.deprecated_decorator()
            assert (
                'DeprecationWarning: Call to deprecated method deprecated_decorator'
                in out.getvalue())
            out.truncate(0)  # clean the buffer
            self.old_method()
            assert (
                'DeprecationWarning: Call to deprecated method old_method; use new_method instead'
                in out.getvalue())
            out.truncate(0)  # clean the buffer
            self.new_method()
            assert ('DeprecationWarning' not in out.getvalue())
        finally:
            sys.stderr = saved_stderr
Example #25
0
    class UnicodeWriter:
        """
        A CSV writer for Unicode data.
        """

        def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
            # Redirect output to a queue
            self.queue = StringIO()
            self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
            self.stream = f
            self.encoder = codecs.getincrementalencoder(encoding)()

        def writerow(self, row):
            self.writer.writerow(
                [s.encode("utf-8") if hasattr(s, "encode") else "" for s in row]
            )
            # Fetch UTF-8 output from the queue ...
            data = self.queue.getvalue()
            data = data.decode("utf-8")
            # ... and reencode it into the target encoding
            data = self.encoder.encode(data)
            # write to the target stream
            self.stream.write(data)
            # empty queue
            self.queue.truncate(0)

        def writerows(self, rows):
            for row in rows:
                self.writerow(row)
Example #26
0
def gzip_generator(string_generator):
    """Return generator for gzipping given string generator.

    Example:

        >>> import StringIO
        >>> z = ''.join(gzip_generator(iter(['hello,', ' ', 'world!'])))
        >>> ''.join(gunzip_generator(StringIO.StringIO(z)))
        'hello, world!'

    """
    # Use gzip and not zlib to make proper gzip header.
    buffer = StringIO()
    gzip = GzipFile(fileobj=buffer, mode='w')

    # Yield header
    yield buffer.getvalue()
    buffer.truncate(0)

    for string in string_generator:

        gzip.write(string)
        gzip.flush()

        yield buffer.getvalue()
        buffer.truncate(0)

    # Flush
    gzip.close()

    yield buffer.getvalue()
Example #27
0
def gzip_generator(string_generator):
    """Return generator for gzipping given string generator.

    Example:

        >>> import StringIO
        >>> z = ''.join(gzip_generator(iter(['hello,', ' ', 'world!'])))
        >>> ''.join(gunzip_generator(StringIO.StringIO(z)))
        'hello, world!'

    """
    # Use gzip and not zlib to make proper gzip header.
    buffer = StringIO()
    gzip = GzipFile(fileobj=buffer, mode='w')

    # Yield header
    yield buffer.getvalue()
    buffer.truncate(0)

    for string in string_generator:

        gzip.write(string)
        gzip.flush()

        yield buffer.getvalue()
        buffer.truncate(0)

    # Flush
    gzip.close()

    yield buffer.getvalue()
Example #28
0
def test_str_round():
    # from report by Angus McMorland on mailing list 3 May 2010
    stream = StringIO()
    in_arr = np.array(['Hello', 'Foob'])
    out_arr = np.array(['Hello', 'Foob '])
    savemat_future(stream, dict(a=in_arr))
    res = loadmat(stream)
    # resulted in [u'HloolFoa', u'elWrdobr']
    yield assert_array_equal, res['a'], out_arr
    stream.truncate(0)
    # Make Fortran ordered version of string
    in_str = in_arr.tostring(order='F')
    in_from_str = np.ndarray(shape=a.shape,
                             dtype=in_arr.dtype,
                             order='F',
                             buffer=in_str)
    savemat_future(stream, dict(a=in_from_str))
    yield assert_array_equal, res['a'], out_arr
    # unicode save did lead to buffer too small error
    stream.truncate(0)
    in_arr_u = in_arr.astype('U')
    out_arr_u = out_arr.astype('U')
    savemat_future(stream, {'a': in_arr_u})
    res = loadmat(stream)
    yield assert_array_equal, res['a'], out_arr_u
Example #29
0
class IPythonView(TkConsoleView, IterableIPShell):
  def __init__(self,root,banner=None, debug=False):
    self.debug = debug
    TkConsoleView.__init__(self,root)
    self.cout = StringIO()
    IterableIPShell.__init__(self, cout=self.cout,cerr=self.cout,
                             input_func=self.raw_input)

    if banner:
      self.showBanner(banner)
    self.execute()
    self.cout.truncate(0)
    self.showPrompt(self.prompt)
    self.interrupt = False

  def raw_input(self, prompt=''):
    if self.interrupt:
      self.interrupt = False
      raise KeyboardInterrupt
    return self.getCurrentLine()

  def _processLine(self):
    self.history_pos = 0
    self.execute()
    rv = self.cout.getvalue()
    if self.debug:
        print >>self.o,"_processLine got rv: %s" % rv
    if rv: rv = rv.strip('\n')
    self.showReturned(rv)
    self.cout.truncate(0)
Example #30
0
    def test_validate_with_obsolete_str(self):
        """
        Test the handling for the obsolete 'str' option type (it's
        'string' now). Remove support for it after a while, and take
        this test with it.
        """
        config = yaml.load(sample_configuration)
        config["options"]["title"]["type"] = "str"
        obsolete_config = yaml.dump(config)

        sio = StringIO()
        self.patch(sys, "stderr", sio)

        self.config.parse(obsolete_config)
        data = self.config.validate({"title": "Helpful Title"})
        self.assertEqual(data["title"], "Helpful Title")
        self.assertIn("obsolete 'str'", sio.getvalue())

        # Trying it again, it should not warn since we don't want
        # to pester the charm author.
        sio.truncate(0)
        self.config.parse(obsolete_config)
        data = self.config.validate({"title": "Helpful Title"})
        self.assertEqual(data["title"], "Helpful Title")
        self.assertEqual(sio.getvalue(), "")
class MemoryLogs(object):
  """Collects logs in memory."""

  def __init__(self, logger):
    self._logger = logger
    self._log_buffer = StringIO()
    self._log_handler = logging.StreamHandler(self._log_buffer)
    formatter = logging.Formatter("[%(asctime)s][%(levelname)s] %(message)s",
                                  "%y-%m-%d %H:%M:%S")
    self._log_handler.setFormatter(formatter)

  def Start(self):
    """Starts collecting the logs."""
    self._logger.addHandler(self._log_handler)

  def Flush(self):
    """Stops collecting the logs and returns the logs collected since Start()
    was called.
    """
    self._logger.removeHandler(self._log_handler)
    self._log_handler.flush()
    self._log_buffer.flush()
    result = self._log_buffer.getvalue()
    self._log_buffer.truncate(0)
    return result
Example #32
0
class UnicodeCSVWriter(object):
    """
    A CSV writer which will write rows to CSV file "f",
    which is encoded in the given encoding.
    """

    def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        self.writer.writerow([s.encode("utf-8") for s in row])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode("utf-8")
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)
Example #33
0
    def _legacy_decrypt(self, b64_ciphertext):
        """Decrypts a string that's encoded with a SimpleAES version < 1.0.
        To convert a ciphertext to the new-style algo, use:

            aes = SimpleAES('my secret')
            aes.convert(legacy_ciphertext)
        """
        cipherbytes = base64.b64decode(b64_ciphertext)
        fin = StringIO(cipherbytes)
        fout = StringIO()

        key = hashlib.sha256(self._password).digest()
        chunksize = 64 * 1024
        try:
            input_size = struct.unpack('<Q', fin.read(struct.calcsize('Q')))[0]
            iv = fin.read(16)
            aes = AES.new(key, AES.MODE_CBC, iv)

            while True:
                chunk = fin.read(chunksize)
                if len(chunk) == 0:
                    break  # done
                fout.write(aes.decrypt(chunk))

            # truncate any padded random noise
            fout.truncate(input_size)

            text = fout.getvalue()
        finally:
            fin.close()
            fout.close()

        return text
Example #34
0
def test_tv_class():
    tvf = tv.TrackvisFile([])
    yield assert_equal(tvf.streamlines, [])
    yield assert_true(isinstance(tvf.header, np.ndarray))
    yield assert_equal(tvf.endianness, tv.native_code)
    yield assert_equal(tvf.filename, None)
    out_f = StringIO()
    tvf.to_file(out_f)
    yield assert_equal(out_f.getvalue(), tv.empty_header().tostring())
    out_f.truncate(0)
    # Write something not-default
    tvf = tv.TrackvisFile([], {'id_string': 'TRACKb'})
    tvf.to_file(out_f)
    # read it back
    out_f.seek(0)
    tvf_back = tv.TrackvisFile.from_file(out_f)
    yield assert_equal(tvf_back.header['id_string'], 'TRACKb')
    # check that we check input values
    out_f.truncate(0)
    yield assert_raises(tv.HeaderError, tv.TrackvisFile, [],
                        {'id_string': 'not OK'})
    yield assert_raises(tv.HeaderError, tv.TrackvisFile, [], {'version': 3})
    yield assert_raises(tv.HeaderError, tv.TrackvisFile, [], {'hdr_size': 0})
    affine = np.diag([1, 2, 3, 1])
    affine[:3, 3] = [10, 11, 12]
    tvf.set_affine(affine)
    yield assert_true(np.all(tvf.get_affine() == affine))
Example #35
0
def help(constructor, identity, style, flags, aemreference, commandname=''):
	id = (constructor, identity, style)
	if id not in _cache:
		if constructor == 'path':
			appobj = appscript.app(identity)
		elif constructor == 'pid':
			appobj = appscript.app(pid=identity)
		elif constructor == 'url':
			appobj = appscript.app(url=identity)
		elif constructor == 'aemapp':
			appobj = appscript.app(aemapp=aem.Application(desc=identity))
		elif constructor == 'current':
			appobj = appscript.app()
		else:
			raise RuntimeError, 'Unknown constructor: %r' % constructor
		output = StringIO()		
		helpobj = Help(appobj, style, output)
		_cache[id] = (appobj, helpobj, output)
	ref, helpobj, output = _cache[id]
	output.truncate(0)
	if aemreference is not None:
		ref = ref.AS_newreference(aemreference)
	if commandname:
		ref = getattr(ref, commandname)
	helpobj.help(flags, ref)
	s = output.getvalue()
	if NSUserDefaults.standardUserDefaults().boolForKey_(u'enableLineWrap'):
		res = []
		textwrapper = textwrap.TextWrapper(width=NSUserDefaults.standardUserDefaults().integerForKey_(u'lineWrap'), 
				subsequent_indent=' ' * 12)
		for line in s.split('\n'):
			res.append(textwrapper.fill(line))
		s = u'\n'.join(res)
	return s
Example #36
0
class MockLogger(object):

    def __init__(self):
        self.local = local
        self.log_file = StringIO()
        self.logger = None

    def get_logger(self, name):
        if not self.logger:
            logger = logging.getLogger(name)
            logger.setLevel(1)  # caputure everything
            handler = logging.StreamHandler(self.log_file)
            handler.setFormatter(
                logging.Formatter('%(name)s:%(levelname)s:%(message)s'))
            logger.addHandler(handler)
            self.logger = LunrLoggerAdapter(logger)
        return self.logger

    def pop_log_messages(self):
        rv = self.log_file.getvalue()
        self.log_file.seek(0)
        self.log_file.truncate()
        return rv

    def __getattribute__(self, name):
        try:
            return object.__getattribute__(self, name)
        except AttributeError:
            # prevent recursive lookup
            logger = object.__getattribute__(self, 'logger')
            if hasattr(logger, name):
                return getattr(logger, name)
            raise
Example #37
0
def get_lines(data, cols=COLUMNS, quoting=csv.QUOTE_ALL):
    output = StringIO()
    csvWriter = csv.DictWriter(output, cols, quoting=quoting)
    csvWriter.writeheader()

    for i in data:
        if isinstance(i, Indicator):
            i = i.__dict__()

        r = dict()
        for c in cols:
            y = i.get(c, u'')

            if type(y) is list:
                y = u','.join(y)

            if c == 'confidence' and y is None:
                y = 0.0

            if PYVERSION < 3:
                r[c] = y
                if isinstance(r[c], basestring):
                    r[c] = unicode(r[c]).replace('\n', r'\\n')
                    r[c] = r[c].encode('utf-8', 'ignore')
            else:
                r[c] = y
                if isinstance(r[c], basestring):
                    r[c] = r[c].replace('\n', r'\\n')

        csvWriter.writerow(r)
        yield output.getvalue().rstrip('\r\n')

        if isinstance(output, StringIO):
            output.truncate(0)
Example #38
0
    def __patch_jenkins_config(self):
        """
        A context manager that retrieves the Jenkins configuration XML, deserializes it into an
        XML ElementTree, yields the XML tree, then serializes the tree and saves it back to
        Jenkins.
        """
        config_file = StringIO()
        if run('test -f ~/config.xml', quiet=True).succeeded:
            fresh_instance = False
            get(remote_path='~/config.xml', local_path=config_file)
        else:
            # Get the in-memory config as the on-disk one may be absent on a fresh instance.
            # Luckily, a fresh instance won't have any configured security.
            fresh_instance = True
            config_url = 'http://localhost:8080/computer/(master)/config.xml'
            with hide('output'):
                config_file.write(run('curl "%s"' % config_url))
        config_file.seek(0)
        config = ElementTree.parse(config_file)

        yield config

        config_file.truncate(0)
        config.write(config_file, encoding='utf-8', xml_declaration=True)
        if fresh_instance:
            self.__service_jenkins('stop')
        try:
            put(local_path=config_file, remote_path='~/config.xml')
        finally:
            if fresh_instance:
                self.__service_jenkins('start')
            else:
                log.warn(
                    'Visit the Jenkins web UI and click Manage Jenkins - Reload '
                    'Configuration from Disk')
Example #39
0
def help(constructor, identity, style, flags, aemreference, commandname=''):
	id = (constructor, identity, style)
	if id not in _cache:
		if constructor == 'path':
			appobj = appscript.app(identity)
		elif constructor == 'pid':
			appobj = appscript.app(pid=identity)
		elif constructor == 'url':
			appobj = appscript.app(url=identity)
		elif constructor == 'aemapp':
			appobj = appscript.app(aemapp=aem.Application(desc=identity))
		elif constructor == 'current':
			appobj = appscript.app()
		else:
			raise RuntimeError, 'Unknown constructor: %r' % constructor
		output = StringIO()		
		helpobj = Help(appobj, style, output)
		_cache[id] = (appobj, helpobj, output)
	ref, helpobj, output = _cache[id]
	output.truncate(0)
	if aemreference is not None:
		ref = ref.AS_newreference(aemreference)
	if commandname:
		ref = getattr(ref, commandname)
	helpobj.help(flags, ref)
	s = output.getvalue()
	if NSUserDefaults.standardUserDefaults().boolForKey_(u'enableLineWrap'):
		res = []
		textwrapper = textwrap.TextWrapper(width=NSUserDefaults.standardUserDefaults().integerForKey_(u'lineWrap'), 
				subsequent_indent=' ' * 12)
		for line in s.split('\n'):
			res.append(textwrapper.fill(line))
		s = u'\n'.join(res)
	return s
Example #40
0
class OutputLogger(object):
    def __init__(self):
        self.old_stdout = sys.stdout
        self.old_stderr = sys.stderr
        self.new_stdout = StringIO()
        self.new_stderr = StringIO()

        sys.stdout = self.new_stdout
        sys.stderr = self.new_stderr

    def __enter__(self):
        sys.stdout = self.new_stdout
        sys.stderr = self.new_stderr
        self.new_stdout.truncate(0)
        self.new_stderr.truncate(0)
        return self

    def __exit__(self, type, value, traceback):
        sys.stdout = self.old_stdout
        sys.stderr = self.old_stderr

    def getStdout(self):
        self.new_stdout.seek(0)
        return self.new_stdout.read()

    def getStderr(self):
        self.new_stderr.seek(0)
        return self.new_stderr.read()
Example #41
0
def _log_chk(hdr, level):
    # utility function to check header checking / logging
    # If level == 0, this header should always be OK
    str_io = StringIO()
    logger = logging.getLogger('test.logger')
    handler = logging.StreamHandler(str_io)
    logger.addHandler(handler)
    str_io.truncate(0)
    hdrc = hdr.copy()
    if level == 0: # Should never log or raise error
        logger.setLevel(0)
        hdrc.check_fix(logger=logger, error_level=0)
        assert_true(str_io.getvalue() == '')
        logger.removeHandler(handler)
        return hdrc, '', ()
    # Non zero level, test above and below threshold
    # Logging level above threshold, no log
    logger.setLevel(level+1)
    e_lev = level+1
    hdrc.check_fix(logger=logger, error_level=e_lev)
    assert_true(str_io.getvalue() == '')
    # Logging level below threshold, log appears
    logger.setLevel(level+1)
    logger.setLevel(level-1)
    hdrc = hdr.copy()
    hdrc.check_fix(logger=logger, error_level=e_lev)
    assert_true(str_io.getvalue() != '')
    message = str_io.getvalue().strip()
    logger.removeHandler(handler)
    hdrc2 = hdr.copy()
    raiser = (HeaderDataError,
              hdrc2.check_fix,
              logger,
              level)
    return hdrc, message, raiser
Example #42
0
def report(request):
    if request.method == "GET":
        exclusions = ['faculty', 'resources']
        hierarchies = Hierarchy.objects.all().exclude(
            name__in=exclusions).order_by("id")
        return {'hierarchies': hierarchies}
    else:
        hierarchy_id = request.POST.get('hierarchy-id', None)
        hierarchy = Hierarchy.objects.get(id=hierarchy_id)

        include_superusers = request.POST.get('include-superusers', False)

        response = HttpResponse(mimetype='application/zip')
        response['Content-Disposition'] = 'attachment; filename=tobacco.zip'

        z = ZipFile(response, 'w')

        output = StringIO()  # temp output file
        _all_results_key(output, hierarchy)
        z.writestr("tobacco_%s_key.csv" % hierarchy.name, output.getvalue())

        output.truncate(0)
        output.seek(0)
        _all_results(output, hierarchy, include_superusers)
        z.writestr("tobacco_%s_values.csv" % hierarchy.name, output.getvalue())

        return response
Example #43
0
def get_lines(data, cols=COLUMNS):
    output = StringIO()
    output.write("{0}\n".format(HEADER))

    for i in data:
        if isinstance(i, Indicator):
            i = i.__dict__()

        r = []
        if i['itype'] is 'url':
            i['indicator'] = re.sub(r'(https?\:\/\/)', '', i['indicator'])

        for c in cols:
            y = i.get(c, '-')

            if type(y) is list:
                y = SEP.join(y)

            y = str(y)
            if c is 'itype':
                y = 'Intel::{0}'.format(itype[i[c]])
            r.append(y)

        # do_notice
        # https://www.bro.org/bro-exchange-2013/exercises/intel.html
        # https://github.com/csirtgadgets/massive-octo-spice/issues/438
        r.append('T')

        output.write("\t".join(r))
        output.write("\n")
        yield output.getvalue()

        if isinstance(output, StringIO):
            output.truncate(0)
Example #44
0
    def get(self, request):
        report = PedialabsReport()

        # setup zip file for the key & value file
        response = HttpResponse(content_type="application/zip")

        disposition = "attachment; filename=pedialabs.zip"
        response["Content-Disposition"] = disposition

        z = ZipFile(response, "w")

        output = StringIO()  # temp output file
        writer = csv.writer(output)

        # report on all hierarchies
        hierarchies = Hierarchy.objects.filter(name="labs")

        # Key file
        for row in report.metadata(hierarchies):
            writer.writerow(row)

        z.writestr("pedialabs_key.csv", output.getvalue())

        # Results file
        output.truncate(0)
        output.seek(0)

        writer = csv.writer(output)

        for row in report.values(hierarchies):
            writer.writerow(row)

        z.writestr("pedialabs_values.csv", output.getvalue())

        return response
Example #45
0
class OutputWrapper(object):
    def __init__(self, ioStream):
        self.__buffer = StringIO()
        self.__original = ioStream
        self.capturing = False

    def write(self, s):
        if self.capturing: self.__buffer.write(s)
        self.__original.write(s)

    def writelines(self, strs):
        if self.capturing: self.__buffer.writelines(strs)
        self.__original.writelines(strs)

    def getvalue(self):
        return self.__buffer.getvalue()

    def reset(self):
        self.__buffer.truncate(0)

    def getOriginalStream(self):
        return self.__original

    def startCapturing(self):
        self.capturing = True

    def stopCapturing(self):
        self.capturing = False

    def __getattr__(self, *args, **kwargs):
        return self.__original.__getattribute__(*args, **kwargs)
class IPythonView(TkConsoleView, IterableIPShell):
  def __init__(self, root, banner=None):
    TkConsoleView.__init__(self, root)
    self.cout = StringIO()
    IterableIPShell.__init__(self, cout=self.cout, cerr=self.cout,
                             input_func=self.raw_input)

    if banner:
      self.showBanner(banner)
    self.execute()
    self.cout.truncate(0)
    self.showPrompt(self.prompt)
    self.interrupt = False

  def raw_input(self, prompt=''):
    if self.interrupt:
      self.interrupt = False
      raise KeyboardInterrupt
    return self.getCurrentLine()

  def _processLine(self):
    self.history_pos = 0
    self.execute()
    rv = self.cout.getvalue()
    if self.debug:
        print >> self.o, "_processLine got rv: %s" % rv
    if rv: rv = rv.strip('\n')
    self.showReturned(rv)
    self.cout.truncate(0)
Example #47
0
    class UnicodeWriter:
        """
        A CSV writer for Unicode data.
        """
        def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
            # Redirect output to a queue
            self.queue = StringIO()
            self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
            self.stream = f
            self.encoder = codecs.getincrementalencoder(encoding)()

        def writerow(self, row):
            self.writer.writerow([
                s.encode("utf-8") if hasattr(s, "encode") else "" for s in row
            ])
            # Fetch UTF-8 output from the queue ...
            data = self.queue.getvalue()
            data = data.decode("utf-8")
            # ... and reencode it into the target encoding
            data = self.encoder.encode(data)
            # write to the target stream
            self.stream.write(data)
            # empty queue
            self.queue.truncate(0)

        def writerows(self, rows):
            for row in rows:
                self.writerow(row)
Example #48
0
def report(request):
    if request.method == "GET":
        exclusions = ['faculty', 'resources']
        hierarchies = Hierarchy.objects.all().exclude(
            name__in=exclusions).order_by("id")
        return {'hierarchies': hierarchies}
    else:
        hierarchy_id = request.POST.get('hierarchy-id', None)
        hierarchy = Hierarchy.objects.get(id=hierarchy_id)

        include_superusers = request.POST.get('include-superusers', False)

        response = HttpResponse(mimetype='application/zip')
        response['Content-Disposition'] = 'attachment; filename=tobacco.zip'

        z = ZipFile(response, 'w')

        output = StringIO()  # temp output file
        _all_results_key(output, hierarchy)
        z.writestr("tobacco_%s_key.csv" % hierarchy.name, output.getvalue())

        output.truncate(0)
        output.seek(0)
        _all_results(output, hierarchy, include_superusers)
        z.writestr("tobacco_%s_values.csv" % hierarchy.name, output.getvalue())

        return response
Example #49
0
def export_csv_iter(*args, **kwargs):
    s = StringIO()
    w = csv.writer(s)
    for row in export_iter(*args, **kwargs):
        w.writerow(row)
        s.flush()
        yield s.getvalue()
        s.truncate(0)
Example #50
0
def export_csv_iter(*args, **kwargs):
    s = StringIO()
    w = csv.writer(s)
    for row in export_iter(*args, **kwargs):
        w.writerow(row)
        s.flush()
        yield s.getvalue()
        s.truncate(0)
Example #51
0
	def truncate(self, size=None):
		if size is None:
			size = self._read_pos

		StringIO.truncate(self, size)

		self._read_pos = 0
		self._write_pos = 0
Example #52
0
class Collector(object):
    """
  Collector for map and reduce output values
  """
    def __init__(self, scheme=None, outputClient=None):
        """

    Parameters
    ---------------------------------------------
    scheme - The scheme for the datums to output - can be a json string
           - or an instance of Schema
    outputClient - The output client used to send messages to the parent
    """

        if not (isinstance(scheme, schema.Schema)):
            scheme = schema.parse(scheme)

        if (outputClient is None):
            raise ValueError("output client can't be none.")

        self.scheme = scheme
        self.buff = StringIO()
        self.encoder = avio.BinaryEncoder(self.buff)

        self.datum_writer = avio.DatumWriter(writers_schema=self.scheme)
        self.outputClient = outputClient

    def collect(self, record, partition=None):
        """Collect a map or reduce output value

    Parameters
    ------------------------------------------------------
    record - The record to write
    partition - Indicates the partition for a pre-partitioned map output
              - currently not supported
    """

        self.buff.truncate(0)
        self.datum_writer.write(record, self.encoder)
        self.buff.flush()
        self.buff.seek(0)

        # delete all the data in the buffer
        if (partition is None):

            # TODO: Is there a more efficient way to read the data in self.buff?
            # we could use self.buff.read() but that returns the byte array as a string
            # will that work?  We can also use self.buff.readinto to read it into
            # a bytearray but the byte array must be pre-allocated
            # self.outputClient.output(self.buff.buffer.read())

            #its not a StringIO
            self.outputClient.request("output", {"datum": self.buff.read()})
        else:
            self.outputClient.request("outputPartitioned", {
                "datum": self.buff.read(),
                "partition": partition
            })
Example #53
0
class TestWorkerProtocol(TestCaseInTempDir, PullerWorkerMixin):
    """Tests for the client-side implementation of the protocol used to
    communicate to the master process.
    """

    def setUp(self):
        TestCaseInTempDir.setUp(self)
        self.output = StringIO()
        self.protocol = PullerWorkerProtocol(self.output)
        self.factory = ObjectFactory()

    def assertSentNetstrings(self, expected_netstrings):
        """Assert that the protocol sent the given netstrings (in order)."""
        observed_netstrings = get_netstrings(self.output.getvalue())
        self.assertEqual(expected_netstrings, observed_netstrings)

    def resetBuffers(self):
        # Empty the test output and error buffers.
        self.output.truncate(0)
        self.assertEqual('', self.output.getvalue())

    def test_nothingSentOnConstruction(self):
        # The protocol sends nothing until it receives an event.
        self.branch_to_mirror = self.makePullerWorker(protocol=self.protocol)
        self.assertSentNetstrings([])

    def test_startMirror(self):
        # Calling startMirroring sends 'startMirroring' as a netstring.
        self.protocol.startMirroring()
        self.assertSentNetstrings(['startMirroring', '0'])

    def test_branchChanged(self):
        # Calling 'branchChanged' sends the arguments.
        arbitrary_args = [self.factory.getUniqueString() for x in range(6)]
        self.protocol.startMirroring()
        self.resetBuffers()
        self.protocol.branchChanged(*arbitrary_args)
        self.assertSentNetstrings(['branchChanged', '6'] + arbitrary_args)

    def test_mirrorFailed(self):
        # Calling 'mirrorFailed' sends the error message.
        self.protocol.startMirroring()
        self.resetBuffers()
        self.protocol.mirrorFailed('Error Message', 'OOPS')
        self.assertSentNetstrings(
            ['mirrorFailed', '2', 'Error Message', 'OOPS'])

    def test_progressMade(self):
        # Calling 'progressMade' sends an arbitrary string indicating
        # progress.
        self.protocol.progressMade('test')
        self.assertSentNetstrings(['progressMade', '0'])

    def test_log(self):
        # Calling 'log' sends 'log' as a netstring and its arguments, after
        # formatting as a string.
        self.protocol.log('logged %s', 'message')
        self.assertSentNetstrings(['log', '1', 'logged message'])
Example #54
0
class TestMain(unittest.TestCase):
    def setUp(self):
        self.logger = logging.getLogger()
        self.stream = StringIO()
        self.logger.addHandler(logging.StreamHandler(self.stream))
        self.conf = {
            'migrations': [],
            'migration_status': None,
            'internal_pool': None,
            'logger': self.logger,
            'items_chunk': None,
            'node_id': 0,
            'nodes': 1,
            'poll_interval': 30,
            'once': True,
        }

    @contextmanager
    def patch(self, name):
        with mock.patch('s3_sync.migrator.' + name) as mocked:
            yield mocked

    def pop_log_lines(self):
        lines = self.stream.getvalue()
        self.stream.seek(0)
        self.stream.truncate()
        return lines

    def test_run_once(self):
        start = time.time()
        with self.patch('time') as mocktime:
            mocktime.time.side_effect = [start, start + 1]
            s3_sync.migrator.run(**self.conf)
            # with once = True we don't sleep
            self.assertEqual(mocktime.sleep.call_args_list, [])
            self.assertEqual('Finished cycle in 1.00s\n', self.pop_log_lines())

    def test_run_forever(self):
        start = time.time()
        self.conf['once'] = False

        class StopDeamon(Exception):
            pass

        with self.patch('process_migrations') as mock_process, \
                self.patch('time') as mocktime:
            mock_process.side_effect = [None, None, StopDeamon()]
            mocktime.time.side_effect = [start + i for i in range(5)]
            with self.assertRaises(StopDeamon):
                s3_sync.migrator.run(**self.conf)
            self.assertEqual(mocktime.sleep.call_args_list,
                             [mock.call(29)] * 2)
            self.assertEqual([
                'Finished cycle in 1.00s, sleeping for 29.00s.',
                'Finished cycle in 1.00s, sleeping for 29.00s.',
            ],
                             self.pop_log_lines().splitlines())
Example #55
0
class TestWorkerProtocol(TestCaseInTempDir, PullerWorkerMixin):
    """Tests for the client-side implementation of the protocol used to
    communicate to the master process.
    """
    def setUp(self):
        TestCaseInTempDir.setUp(self)
        self.output = StringIO()
        self.protocol = PullerWorkerProtocol(self.output)
        self.factory = ObjectFactory()

    def assertSentNetstrings(self, expected_netstrings):
        """Assert that the protocol sent the given netstrings (in order)."""
        observed_netstrings = get_netstrings(self.output.getvalue())
        self.assertEqual(expected_netstrings, observed_netstrings)

    def resetBuffers(self):
        # Empty the test output and error buffers.
        self.output.truncate(0)
        self.assertEqual('', self.output.getvalue())

    def test_nothingSentOnConstruction(self):
        # The protocol sends nothing until it receives an event.
        self.branch_to_mirror = self.makePullerWorker(protocol=self.protocol)
        self.assertSentNetstrings([])

    def test_startMirror(self):
        # Calling startMirroring sends 'startMirroring' as a netstring.
        self.protocol.startMirroring()
        self.assertSentNetstrings(['startMirroring', '0'])

    def test_branchChanged(self):
        # Calling 'branchChanged' sends the arguments.
        arbitrary_args = [self.factory.getUniqueString() for x in range(6)]
        self.protocol.startMirroring()
        self.resetBuffers()
        self.protocol.branchChanged(*arbitrary_args)
        self.assertSentNetstrings(['branchChanged', '6'] + arbitrary_args)

    def test_mirrorFailed(self):
        # Calling 'mirrorFailed' sends the error message.
        self.protocol.startMirroring()
        self.resetBuffers()
        self.protocol.mirrorFailed('Error Message', 'OOPS')
        self.assertSentNetstrings(
            ['mirrorFailed', '2', 'Error Message', 'OOPS'])

    def test_progressMade(self):
        # Calling 'progressMade' sends an arbitrary string indicating
        # progress.
        self.protocol.progressMade('test')
        self.assertSentNetstrings(['progressMade', '0'])

    def test_log(self):
        # Calling 'log' sends 'log' as a netstring and its arguments, after
        # formatting as a string.
        self.protocol.log('logged %s', 'message')
        self.assertSentNetstrings(['log', '1', 'logged message'])
Example #56
0
class BaseToolchainTest(BaseConfigureTest):
    def setUp(self):
        super(BaseToolchainTest, self).setUp()
        self.out = StringIO()
        self.logger = logging.getLogger('BaseToolchainTest')
        self.logger.setLevel(logging.ERROR)
        self.handler = logging.StreamHandler(self.out)
        self.logger.addHandler(self.handler)

    def tearDown(self):
        self.logger.removeHandler(self.handler)
        del self.handler
        del self.out
        super(BaseToolchainTest, self).tearDown()

    def do_toolchain_test(self, paths, results, args=[], environ={}):
        '''Helper to test the toolchain checks from toolchain.configure.

        - `paths` is a dict associating compiler paths to FakeCompiler
          definitions from above.
        - `results` is a dict associating result variable names from
          toolchain.configure (c_compiler, cxx_compiler, host_c_compiler,
          host_cxx_compiler) with a result.
          The result can either be an error string, or a CompilerResult
          corresponding to the object returned by toolchain.configure checks.
          When the results for host_c_compiler are identical to c_compiler,
          they can be omitted. Likewise for host_cxx_compiler vs.
          cxx_compiler.
        '''
        environ = dict(environ)
        if 'PATH' not in environ:
            environ['PATH'] = os.pathsep.join(
                mozpath.abspath(p) for p in ('/bin', '/usr/bin'))

        sandbox = self.get_sandbox(paths, {},
                                   args,
                                   environ,
                                   logger=self.logger)

        for var in ('c_compiler', 'cxx_compiler', 'host_c_compiler',
                    'host_cxx_compiler'):
            if var in results:
                result = results[var]
            elif var.startswith('host_'):
                result = results.get(var[5:], {})
            else:
                result = {}
            try:
                self.out.truncate(0)
                compiler = sandbox._value_for(sandbox[var])
                # Add var on both ends to make it clear which of the
                # variables is failing the test when that happens.
                self.assertEquals((var, compiler), (var, result))
            except SystemExit:
                self.assertEquals((var, result),
                                  (var, self.out.getvalue().strip()))
                return
Example #57
0
class Serialization(unittest.TestCase):
    def setUp(self):
        self.t1 = m.SporadicTask(10, 100)
        self.t2 = m.SporadicTask(5, 19, 15, id=3)
        self.t3 = m.SporadicTask(25, 50, id=5, deadline=75)
        self.ts = m.TaskSystem([self.t1, self.t2, self.t3])
        self.f  = StringIO()

    def test_serialize_task(self):
        for t in self.ts:
            s.write_xml(s.task(t), self.f)
            self.f.seek(0)
            x = s.load(self.f)
            self.assertIsInstance(x, m.SporadicTask)
            self.assertEqual(x.cost, t.cost)
            self.assertEqual(x.deadline, t.deadline)
            self.assertEqual(x.period, t.period)
            self.assertEqual(x.id, t.id)
            self.f.seek(0)
            self.f.truncate()

    def test_serialize_taskset(self):
        s.write(self.ts, self.f)
        self.f.seek(0)
        xs = s.load(self.f)
        self.assertIsInstance(xs, m.TaskSystem)
        self.assertEqual(len(xs), len(self.ts))
        for x,t in zip(xs, self.ts):
            self.assertEqual(x.cost, t.cost)
            self.assertEqual(x.deadline, t.deadline)
            self.assertEqual(x.period, t.period)
            self.assertEqual(x.id, t.id)

    def test_serialize_resmodel(self):
        r.initialize_resource_model(self.ts)
        self.t1.resmodel[1].add_request(1)
        self.t2.resmodel[1].add_read_request(2)
        self.t2.resmodel['serial I/O'].add_request(2)
        self.t3.resmodel['serial I/O'].add_request(3)

        for t in self.ts:
            s.write_xml(s.task(t), self.f)
            self.f.seek(0)
            x = s.load(self.f)
            self.assertIsInstance(x.resmodel, r.ResourceRequirements)
            self.assertEqual(len(x.resmodel), len(t.resmodel))
            self.assertEqual(x.resmodel.keys(), t.resmodel.keys())
            for res_id in x.resmodel:
                self.assertEqual(x.resmodel[res_id].max_reads, t.resmodel[res_id].max_reads)
                self.assertEqual(x.resmodel[res_id].max_writes, t.resmodel[res_id].max_writes)
                self.assertEqual(x.resmodel[res_id].max_requests, t.resmodel[res_id].max_requests)
                self.assertEqual(x.resmodel[res_id].max_read_length, t.resmodel[res_id].max_read_length)
                self.assertEqual(x.resmodel[res_id].max_write_length, t.resmodel[res_id].max_write_length)
                self.assertEqual(x.resmodel[res_id].max_length, t.resmodel[res_id].max_length)
            self.f.seek(0)
            self.f.truncate()
Example #58
0
class IPythonView(ConsoleView, IterableIPShell):

    def __init__(self, main):
        ConsoleView.__init__(self)
        self.cout = StringIO()
        IterableIPShell.__init__(self, cout=self.cout, 
                cerr=self.cout, input_func=self.raw_input,
                user_ns={'main':main})
        self.connect('key_press_event', self.keyPress)
        self.execute()
        self.cout.truncate(0)
        self.showPrompt(self.prompt)
        self.interrupt = False
        self.changeLine("main.canvas.inspect()")
        self._processLine()

    def raw_input(self, prompt=''):
        if self.interrupt:
            self.interrupt = False
            raise KeyboardInterrupt
        return self.getCurrentLine()

    def keyPress(self, widget, event):
        if event.state & gtk.gdk.CONTROL_MASK and event.keyval == 99:
            self.interrupt = True
            self._processLine()
            return True
        elif event.keyval == gtk.keysyms.Return:
            self._processLine()
            return True
        elif event.keyval == gtk.keysyms.Up:
            self.changeLine(self.historyBack())
            return True
        elif event.keyval == gtk.keysyms.Down:
            self.changeLine(self.historyForward())
            return True
        elif event.keyval == gtk.keysyms.Tab:
            if not self.getCurrentLine().strip():
                return False
            completed, possibilities = self.complete(self.getCurrentLine())
            if len(possibilities) > 1:
                slice = self.getCurrentLine()
                self.write('\n')
                for symbol in possibilities:
                    self.write(symbol+'\n')
                self.showPrompt(self.prompt)
            self.changeLine(completed or slice)
            return True

    def _processLine(self):
        self.history_pos = 0
        self.execute()
        rv = self.cout.getvalue()
        if rv: rv = rv.strip('\n')
        self.showReturned(rv)
        self.cout.truncate(0)