Пример #1
0
    def test_any_sequences_to_fasta(self):
        for fn, obj in ((_sequence_collection_to_fasta, self.seq_coll),
                        (_alignment_to_fasta, self.align)):
            # test writing with default parameters
            fh = StringIO()
            fn(obj, fh)
            obs = fh.getvalue()
            fh.close()

            with open(get_data_path('fasta_3_seqs_defaults'), 'U') as fh:
                exp = fh.read()

            self.assertEqual(obs, exp)

            # test writing with non-defaults
            fasta_fh = StringIO()
            qual_fh = StringIO()
            fn(obj, fasta_fh, id_whitespace_replacement='*',
               description_newline_replacement='+', max_width=3, qual=qual_fh)
            obs_fasta = fasta_fh.getvalue()
            obs_qual = qual_fh.getvalue()
            fasta_fh.close()
            qual_fh.close()

            with open(get_data_path('fasta_3_seqs_non_defaults'), 'U') as fh:
                exp_fasta = fh.read()
            with open(get_data_path('qual_3_seqs_non_defaults'), 'U') as fh:
                exp_qual = fh.read()

            self.assertEqual(obs_fasta, exp_fasta)
            self.assertEqual(obs_qual, exp_qual)
Пример #2
0
 def csv_iter():
     rows = iter(table)
     fo = IO()
     csv_writer = csv.writer(fo)
     csv_writer.writerow(converter.header2())
     while True:
         try:
           for _ in range(1000):
             row = next(rows)
             #print row
             csv_writer.writerow(row)
         except StopIteration:
             fo.seek(0)
             yield fo.read().encode('utf-8')
             del fo
             break
         fo.seek(0)
         data = fo.read().encode('utf-8')
         fo.seek(0)
         fo.truncate()
         yield data
     if converter.errors:
         yield 'The following errors were found at unspecified points in processing:\n'
         for error in converter.errors:
             yield str(error)+'\n'
Пример #3
0
def compress_string(s):

    # avg_block_size is acutally the reciporical of the average
    # intended interflush distance.   

    rnd = Random(s)

    flushes_remaining = FLUSH_LIMIT

    if len(s) < AVERAGE_SPAN_BETWEEN_FLUSHES * APPROX_MIN_FLUSHES:
        avg_block_size = APPROX_MIN_FLUSHES / float(len(s) + 1)
    else:
        avg_block_size = 1.0 / AVERAGE_SPAN_BETWEEN_FLUSHES

    s = StringIO(s) if isinstance(s, six.text_type) else BytesIO(s)
    zbuf = BytesIO()
    zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
    chunk = s.read(MIN_INTERFLUSH_INTERVAL + int(rnd.expovariate(avg_block_size)))
    while chunk and flushes_remaining:
        zfile.write(chunk)
        zfile.flush()
        flushes_remaining -= 1
        chunk = s.read(MIN_INTERFLUSH_INTERVAL + int(rnd.expovariate(avg_block_size)))
    zfile.write(chunk)
    zfile.write(s.read())
    zfile.close()
    return zbuf.getvalue()
Пример #4
0
    def test_any_sequence_to_fasta(self):
        # store writer function, sequence object to write, expected
        # fasta filepath for default parameters, expected fasta filepath for
        # non-defaults, and expected qual filepath for non-defaults
        id_ = 'f o o'
        desc = 'b\na\nr'
        test_data = (
            (_biological_sequence_to_fasta,
             Sequence('ACGT', id=id_, description=desc,
                      quality=range(1, 5)),
             ('fasta_single_bio_seq_defaults',
              'fasta_single_bio_seq_non_defaults',
              'qual_single_bio_seq_non_defaults')),
            (_dna_sequence_to_fasta,
             DNA('TACG', id=id_, description=desc, quality=range(4)),
             ('fasta_single_dna_seq_defaults',
              'fasta_single_dna_seq_non_defaults',
              'qual_single_dna_seq_non_defaults')),
            (_rna_sequence_to_fasta,
             RNA('UACG', id=id_, description=desc, quality=range(2, 6)),
             ('fasta_single_rna_seq_defaults',
              'fasta_single_rna_seq_non_defaults',
              'qual_single_rna_seq_non_defaults')),
            (_protein_sequence_to_fasta,
             Protein('PQQ', id=id_, description=desc, quality=[42, 41, 40]),
             ('fasta_single_prot_seq_defaults',
              'fasta_single_prot_seq_non_defaults',
              'qual_single_prot_seq_non_defaults')))

        for fn, obj, fps in test_data:
            defaults_fp, non_defaults_fasta_fp, non_defaults_qual_fp = fps

            # test writing with default parameters
            fh = StringIO()
            fn(obj, fh)
            obs = fh.getvalue()
            fh.close()

            with open(get_data_path(defaults_fp), 'U') as fh:
                exp = fh.read()

            self.assertEqual(obs, exp)

            # test writing with non-defaults
            fasta_fh = StringIO()
            qual_fh = StringIO()
            fn(obj, fasta_fh, id_whitespace_replacement='-',
               description_newline_replacement='_', max_width=1, qual=qual_fh)
            obs_fasta = fasta_fh.getvalue()
            obs_qual = qual_fh.getvalue()
            fasta_fh.close()
            qual_fh.close()

            with open(get_data_path(non_defaults_fasta_fp), 'U') as fh:
                exp_fasta = fh.read()
            with open(get_data_path(non_defaults_qual_fp), 'U') as fh:
                exp_qual = fh.read()

            self.assertEqual(obs_fasta, exp_fasta)
            self.assertEqual(obs_qual, exp_qual)
Пример #5
0
def patch(old_file_name, new_file_name, patch_file_name):

    patch_file = open(patch_file_name, "rb")
    patch_file.read(8) #magic number

    compressed_control_len = offtin(patch_file.read(8))
    compressed_diff_len = offtin(patch_file.read(8))
    new_file_len = offtin(patch_file.read(8))

    compressed_control_block = patch_file.read(compressed_control_len)
    compressed_diff_block    = patch_file.read(compressed_diff_len)
    compressed_extra_block   = patch_file.read()

    control_stream = StringIO(bz2.decompress(compressed_control_block))
    diff_string    = bz2.decompress(compressed_diff_block)
    extra_string   = bz2.decompress(compressed_extra_block)

    control_tuples_list = []
    while True:
        r = control_stream.read(8)
        if not r:
            break
        x = offtin(r)
        y = offtin(control_stream.read(8))
        z = offtin(control_stream.read(8))
        control_tuples_list.append((x,y,z))

    old_data = open(old_file_name, "rb").read()
    new_data = Patch(old_data, new_file_len, control_tuples_list, diff_string, extra_string)

    new_file = open(new_file_name, "wb")
    new_file.write(new_data)
Пример #6
0
    def _parse10(self):

        """Messages are delimited by MSG_DELIM. The buffer could have grown by
        a maximum of BUF_SIZE bytes everytime this method is called. Retains
        state across method calls and if a byte has been read it will not be
        considered again."""

        logger.debug("parsing netconf v1.0")
        delim = MSG_DELIM
        n = len(delim) - 1
        expect = self._parsing_state10
        buf = self._buffer
        buf.seek(self._parsing_pos10)
        while True:
            x = buf.read(1)
            if isinstance(x, bytes):
                x = x.decode('UTF-8')
            if not x: # done reading
                break
            elif x == delim[expect]: # what we expected
                expect += 1 # expect the next delim char
            else:
                expect = 0
                continue
            # loop till last delim char expected, break if other char encountered
            for i in range(expect, n):
                x = buf.read(1)
                if isinstance(x, bytes):
                    x = x.decode('UTF-8')
                if not x: # done reading
                    break
                if x == delim[expect]: # what we expected
                    expect += 1 # expect the next delim char
                else:
                    expect = 0 # reset
                    break
            else: # if we didn't break out of the loop, full delim was parsed
                msg_till = buf.tell() - n
                buf.seek(0)
                logger.debug('parsed new message')
                if sys.version < '3':
                    self._dispatch_message(buf.read(msg_till).strip())
                    buf.seek(n+1, os.SEEK_CUR)
                    rest = buf.read()
                    buf = StringIO()
                else:
                    self._dispatch_message(buf.read(msg_till).strip().decode('UTF-8'))
                    buf.seek(n+1, os.SEEK_CUR)
                    rest = buf.read()
                    buf = BytesIO()
                buf.write(rest)
                buf.seek(0)
                expect = 0
        self._buffer = buf
        self._parsing_state10 = expect
        self._parsing_pos10 = self._buffer.tell()
Пример #7
0
    def test_any_sequences_to_fasta(self):
        for fn, obj in ((_sequence_collection_to_fasta, self.seq_coll),
                        (_alignment_to_fasta, self.align)):
            # test writing with default parameters
            fh = StringIO()
            fn(obj, fh)
            obs = fh.getvalue()
            fh.close()

            with open(get_data_path('fasta_3_seqs_defaults'), 'U') as fh:
                exp = fh.read()

            self.assertEqual(obs, exp)

            # test writing with non-defaults
            fasta_fh = StringIO()
            qual_fh = StringIO()
            fn(obj, fasta_fh, id_whitespace_replacement='*',
               description_newline_replacement='+', max_width=3, qual=qual_fh)
            obs_fasta = fasta_fh.getvalue()
            obs_qual = qual_fh.getvalue()
            fasta_fh.close()
            qual_fh.close()

            with open(get_data_path('fasta_3_seqs_non_defaults'), 'U') as fh:
                exp_fasta = fh.read()
            with open(get_data_path('qual_3_seqs_non_defaults'), 'U') as fh:
                exp_qual = fh.read()

            self.assertEqual(obs_fasta, exp_fasta)
            self.assertEqual(obs_qual, exp_qual)

            fh2 = StringIO()
            with self.assertRaisesRegexp(AttributeError,
                                         "lowercase specified but class "
                                         "Sequence does not support lowercase "
                                         "functionality"):
                fn(obj, fh2, lowercase='introns')
            fh2.close()

            fasta_fh2 = StringIO()
            qual_fh2 = StringIO()
            with self.assertRaisesRegexp(AttributeError,
                                         "lowercase specified but class "
                                         "Sequence does not support lowercase "
                                         "functionality"):
                fn(obj, fasta_fh2, id_whitespace_replacement='*',
                   description_newline_replacement='+', max_width=3,
                   qual=qual_fh2, lowercase='introns')
            fasta_fh2.close()
            qual_fh2.close()
Пример #8
0
    def test_start_subshell(self, call_mock, tempfile_mock):
        memfile = StringIO()
        memfile.name = 'FILENAME'
        tempfile_mock.return_value = memfile
        credentials = {'AWS_VALID_SECONDS': 600}
        start_subshell(credentials, 'ACCOUNT', 'ROLE')
        call_mock.assert_called_once_with(
            ["bash", "--rcfile", 'FILENAME'],
            stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin)
        expected = dedent("""
            # Pretend to be an interactive, non-login shell
            for file in /etc/bash.bashrc ~/.bashrc; do
                [ -f "$file" ] && . "$file"
            done

            function afp_minutes_left {
                if ((SECONDS >= 600)) ; then
                    echo EXPIRED
                else
                    echo $(((600-SECONDS)/60)) Min
                fi
            }

            PS1="(AWS ACCOUNT/ROLE \$(afp_minutes_left)) $PS1"
            export AWS_VALID_SECONDS='600'""")
        memfile.seek(0)
        received = memfile.read()
        self.assertEqual(received, expected)
Пример #9
0
def to_string(table):
    """
    Returns a list of the maximum width for each column across all rows
    >>> type(to_string([['foo', 'goodbye'], ['llama', 'bar']]))
    <type 'unicode'>
    """
    result = StringIO()

    (columns, rows) = get_dimensions(table)
        
    result.write("     {} columns, {} rows\n".format(columns, rows))
    col_widths = find_column_widths(table)
    table_width = sum(col_widths) + len(col_widths) + 2
    hbar = '    {}\n'.format('-' * table_width)

    result.write("      {}\n".format(' '.join(
        [six.text_type(col_index).rjust(width, ' ') for (col_index, width)
         in enumerate(col_widths)])))

    result.write(hbar)
    for row_index, row in enumerate(table):
        cells = [cell.rjust(width, ' ') for (cell, width)
                 in zip(row, col_widths)]
        result.write("{:>3} | {}|\n".format(row_index, '|'.join(cells)))
    result.write(hbar)
    result.seek(0)
    return six.text_type(result.read())
Пример #10
0
def write_f90namelist(f90namelist, stream=None):
    """ Writes namelist to file or string, or stream

        - if stream is None (default), then returns a string containing namelist in fortran
            format
        - if stream is a string, then it should a path to a file
        - otherwise, stream is assumed to be a stream of some sort, with a `write` method

        Keywords are passed on to :py:method:`Namelist.namelist`
    """
    from f90nml import Namelist as F90Namelist
    from six import StringIO
    from ..misc import local_path
    if stream is None:
        result = StringIO()
        write_f90namelist(f90namelist, result)
        result.seek(0)
        return result.read()

    if isinstance(stream, str):
        path = local_path(stream)
        logger.log(10, "Writing fortran namelist to %s" % path)
        with open(path, 'w') as file:
            write_f90namelist(f90namelist, file)
        return
    f90namelist.write(stream)
    def _run_doit(self, sel_tasks, reporter=None, doit_vars=None):
        """load this file as dodo file to collect tasks"""
        inc = IncrementalTasks(self.py_files, test_files=list(self.test_files))
        output = StringIO()
        config = {
            'dep_file': self.DB_FILE,
            'continue': True,
            'outfile': output,
        }
        if reporter:
            config['reporter'] = reporter

        ctx = {
            'tasks_generator': inc,
            'DOIT_CONFIG': config,
        }
        doit_cmd.reset_vars()
        if doit_vars:
            for key, value in doit_vars.items():
                doit_cmd.set_var(key, value)
        loader = ModuleTaskLoader(ctx)
        cmd = Run(task_loader=loader)
        cmd.parse_execute(sel_tasks)
        output.seek(0)
        return inc.graph, output.read()
Пример #12
0
def test_file_output():
    """ Tests that output to arbitrary file-like objects works """
    our_file = StringIO()
    for i in tqdm(range(3), file=our_file):
        if i == 1:
            our_file.seek(0)
            assert '0/3' in our_file.read()
Пример #13
0
 def test_log(self):
     # Logger
     logger = logging.getLogger(__name__)
     logger.level = logging.INFO
     stream = StringIO()
     stream_handler = logging.StreamHandler(stream)
     logger.addHandler(stream_handler)
     # Actual test
     request = testing.DummyRequest()
     apply_request_extensions(request)
     self.assertTrue(request.authenticated_userid, 'jane')
     obj = self._cut(request)
     # Patch logger
     obj.logger = logger
     obj.add('uid', 'darth', [ROLE_VIEWER], [ROLE_OWNER])
     payload = obj.prepare()
     formatted = obj.format(payload)
     try:
         obj.log(formatted)
         stream.seek(0)
         output = stream.read()
     finally:
         logger.removeHandler(stream_handler)
     self.assertIn('"+": ["role:Viewer"]', output)
     self.assertIn('"-": ["role:Owner"]', output)
     self.assertIn('"jane"', output)
     # Make sure json can read this
     json_row = loads(output)
     self.assertIn('contexts', json_row)
Пример #14
0
def test_to_string3():
    # Test printing
    outcomes = ['00', '01', '10', '11']
    pmf = [1/4]*4
    d = Distribution(outcomes, pmf)
    s_ = """Class:          Distribution
Alphabet:       ('0', '1') for all rvs
Base:           linear
Outcome Class:  str
Outcome Length: 2
RV Names:       None

x    p(x)
00   0.25
01   0.25
10   0.25
11   0.25"""

    # context manager?
    import sys
    from six import StringIO
    sio = StringIO()
    try:
        old = sys.stdout
        sys.stdout = sio
        print(d, end='')
    finally:
        sys.stdout = old
    sio.seek(0)
    s = sio.read()
    assert_equal(s, s_)
Пример #15
0
def runquery_csv():
	global out

	q = frappe.form_dict.get('query')

	rep_name = frappe.form_dict.get('report_name')
	if not frappe.form_dict.get('simple_query'):

		# Report Name
		if not rep_name:
			rep_name = get_sql_tables(q)[0]

	if not rep_name: rep_name = 'DataExport'

	rows = [[rep_name], out['colnames']] + out['values']

	from six import StringIO
	import csv

	f = StringIO()
	writer = csv.writer(f)
	for r in rows:
		# encode only unicode type strings and not int, floats etc.
		writer.writerow(map(lambda v: isinstance(v, text_type) and v.encode('utf-8') or v, r))

	f.seek(0)
	out['result'] = text_type(f.read(), 'utf-8')
	out['type'] = 'csv'
	out['doctype'] = rep_name
Пример #16
0
 def toXml(self, filename='', compress=False):
     xml = StringIO()
     xml.write("<?xml version='1.0' encoding='UTF-8'?>\n")
     xml.write(
         "<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\" \"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd \">\n")
     self.svg.toXml(0, xml)
     if not filename:
         if compress:
             import gzip
             f = StringIO()
             zf = gzip.GzipFile(fileobj=f, mode='wb')
             zf.write(xml.getvalue())
             zf.close()
             f.seek(0)
             return f.read()
         else:
             return xml.getvalue()
     else:
         if filename[-4:] == 'svgz':
             import gzip
             f = gzip.GzipFile(
                 filename=filename, mode="wb", compresslevel=9)
             f.write(xml.getvalue())
             f.close()
         else:
             f = file(filename, 'w')
             f.write(xml.getvalue())
             f.close()
def test_write_csv_to_stream():
    observation = OrderedDict([
        ('datetime', datetime.datetime(2015, 1, 24, 18, tzinfo=pytz.UTC)),
        ('wind_direction', 'W'),
        ('wind_direction_degrees', 270.0),
        ('pressure_tendency', 'R'),
        ('screen_relative_humidity', 82.5),
        ('pressure', 1029.0),
        ('wind_speed', 16.0),
        ('temperature', 6.1),
        ('weather_type', 'Overcast'),
        ('visibility', 9000.0),
        ('dew_point', 3.4),
    ])

    stream = StringIO()
    write_csv_to_stream(stream, [observation])
    stream.seek(0)

    lines = stream.read().split('\r\n')

    expected_header = (
        'datetime,wind_direction,wind_direction_degrees,pressure_tendency,'
        'screen_relative_humidity,pressure,wind_speed,wind_gust,temperature,'
        'weather_type,visibility,dew_point')

    expected_line_1 = ','.join([
        '2015-01-24T18:00:00Z', 'W', '270.0', 'R', '82.5', '1029.0', '16.0',
        '', '6.1', 'Overcast', '9000.0', '3.4'])

    yield assert_equal, expected_header, lines[0]
    yield assert_equal, expected_line_1, lines[1]
Пример #18
0
def execute_code(string, state, data=None):
    string = displayhook_hack(string)

    # Now execute the code capturing the output and files that are
    # generated.
    back = os.path.abspath('.')
    tempdir = tempfile.mkdtemp()
    if data is not None:
        # make a symbolic link from the data directory into local tmp directory
        os.symlink(data, os.path.join(tempdir, os.path.split(data)[1]))
    
    s = StringIO()
    saved_stream = sys.stdout
    sys.stdout = s
    try:
        os.chdir(tempdir)
        exec(string, state)
    except Exception:
        traceback.print_exc(file=s)
    finally:
        sys.stdout = saved_stream
        os.chdir(back)
    s.seek(0)
    out = str(s.read())
    files = [os.path.join(tempdir, x) for x in os.listdir(tempdir)]
    return out, files, tempdir
Пример #19
0
def write_f90namelist(f90namelist, stream=None):
    """ Writes namelist to file or string, or stream

        - if stream is None (default), then returns a string containing namelist in fortran
            format
        - if stream is a string, then it should a path to a file
        - otherwise, stream is assumed to be a stream of some sort, with a `write` method

        Keywords are passed on to :py:method:`Namelist.namelist`
    """
    from f90nml import Namelist as F90Namelist
    from six import StringIO
    from ..misc import local_path
    if stream is None:
        result = StringIO()
        write_f90namelist(f90namelist, result)
        result.seek(0)
        return result.read()

    if isinstance(stream, str):
        path = local_path(stream)
        logger.log(10, "Writing fortran namelist to %s" % path)
        with open(path, 'w') as file:
            write_f90namelist(f90namelist, file)
        return

    for key, value in f90namelist.items():
        if isinstance(value, list):
            for g_vars in value:
                f90namelist.write_nmlgrp(key, g_vars, stream)
        elif isinstance(value, F90Namelist):
            f90namelist.write_nmlgrp(key, value, stream)
        else:
            raise RuntimeError("Can only write namelists that consist of namelists")
Пример #20
0
    class _File(object):
        """
        A file like object representing a file in git

        @todo: We don't support any byte ranges yet.
        """
        def __init__(self, content):
            self._iter = iter
            self._data = StringIO(content)

        def readline(self):
            return self._data.readline()

        def readlines(self):
            return self._data.readlines()

        def read(self, size=None):
            return self._data.read(size)

        def close(self):
            return self._data.close()

        def __enter__(self):
            return self

        def __exit__(self, exc_type, exc_val, exc_tb):
            self.close()
Пример #21
0
def export_query():
	"""export from report builder"""
	form_params = get_form_params()
	form_params["limit_page_length"] = None
	form_params["as_list"] = True
	doctype = form_params.doctype
	add_totals_row = None
	file_format_type = form_params["file_format_type"]

	del form_params["doctype"]
	del form_params["file_format_type"]

	if 'add_totals_row' in form_params and form_params['add_totals_row']=='1':
		add_totals_row = 1
		del form_params["add_totals_row"]

	frappe.permissions.can_export(doctype, raise_exception=True)

	if 'selected_items' in form_params:
		si = json.loads(frappe.form_dict.get('selected_items'))
		form_params["filters"] = {"name": ("in", si)}
		del form_params["selected_items"]

	db_query = DatabaseQuery(doctype)
	ret = db_query.execute(**form_params)

	if add_totals_row:
		ret = append_totals_row(ret)

	data = [['Sr'] + get_labels(db_query.fields, doctype)]
	for i, row in enumerate(ret):
		data.append([i+1] + list(row))

	if file_format_type == "CSV":

		# convert to csv
		import csv
		from frappe.utils.xlsxutils import handle_html

		f = StringIO()
		writer = csv.writer(f)
		for r in data:
			# encode only unicode type strings and not int, floats etc.
			writer.writerow([handle_html(frappe.as_unicode(v)).encode('utf-8') \
				if isinstance(v, string_types) else v for v in r])

		f.seek(0)
		frappe.response['result'] = text_type(f.read(), 'utf-8')
		frappe.response['type'] = 'csv'
		frappe.response['doctype'] = doctype

	elif file_format_type == "Excel":

		from frappe.utils.xlsxutils import make_xlsx
		xlsx_file = make_xlsx(data, doctype)

		frappe.response['filename'] = doctype + '.xlsx'
		frappe.response['filecontent'] = xlsx_file.getvalue()
		frappe.response['type'] = 'binary'
Пример #22
0
 def __str__(self):
     stream = StringIO()
     pprint.pprint(self.extractors, stream)
     stream.seek(0)
     template_data = stream.read()
     if template_data:
         return "%s[\n%s\n]" % (self.__class__.__name__, template_data)
     return "%s[none]" % (self.__class__.__name__)
Пример #23
0
def qtree(q):
    q = optimize(q)
    f = StringIO()
    q.print_tree(out=f)
    f.seek(0)
    out = f.read()
    f.close()
    return out
Пример #24
0
    def to_string(self, digits=None, exact=None, tol=1e-9):
        """
        Returns a string representation of the distribution.

        Parameters
        ----------
        digits : int or None
            The probabilities will be rounded to the specified number of
            digits, using NumPy's around function. If `None`, then no rounding
            is performed. Note, if the number of digits is greater than the
            precision of the floats, then the resultant number of digits will
            match that smaller precision.
        exact : bool
            If `True`, then linear probabilities will be displayed, even if
            the underlying pmf contains log probabilities.  The closest
            rational fraction within a tolerance specified by `tol` is used
            as the display value.
        tol : float
            If `exact` is `True`, then the probabilities will be displayed
            as the closest rational fraction within `tol`.

        Returns
        -------
        s : str
            A string representation of the distribution.

        """
        from six import StringIO
        s = StringIO()

        if exact is None:
            exact = ditParams['print.exact']

        x = prepare_string(self, digits, exact, tol)
        pmf, outcomes, base, colsep, max_length, pstr = x

        headers = ["Class: ",
                   "Alphabet: ",
                   "Base: "]
        vals = [self.__class__.__name__,
                self.alphabet,
                base]

        L = max(map(len, headers))
        for head, val in zip(headers, vals):
            s.write("{0}{1}\n".format(head.ljust(L), val))
        s.write("\n")

        s.write(''.join(['x'.ljust(max_length), colsep, pstr, "\n"]))
        for o, p in zip(outcomes, pmf):
            s.write(''.join([o.ljust(max_length), colsep, str(p), "\n"]))

        s.seek(0)
        s = s.read()
        # Remove the last \n
        s = s[:-1]

        return s
Пример #25
0
def test_json_format():
    broker = dr.Broker()
    output = StringIO()
    with JsonFormat(broker, stream=output):
        dr.run(report, broker=broker)
    output.seek(0)
    data = output.read()
    assert "foo" in data
    assert "bar" in data
Пример #26
0
def test_human_readable():
    broker = dr.Broker()
    output = StringIO()
    with HumanReadableFormat(broker, stream=output):
        dr.run(report, broker=broker)
    output.seek(0)
    data = output.read()
    assert "foo" in data
    assert "bar" in data
Пример #27
0
def test_setmeta_bytes():
    p = Properties()
    p["a key"] = "the value", {b"metakey": b"metaval"}

    out = StringIO()
    p.store(out, strip_meta=False, timestamp=False)

    out.seek(0)
    assert out.read() == "#: metakey=metaval\na\\ key=the value\n"
Пример #28
0
    def test_management_command(self):
        """Test freeze management command"""
        stdout = StringIO()
        call_command('bower_freeze', stdout=stdout)
        stdout.seek(0)
        output = stdout.read()

        self.assertIn('BOWER_INSTALLED_APPS', output)
        self.assertIn('backbone', output)
Пример #29
0
    def _body(self):
        result = self._discovered.spreadsheets().values()\
            .get(spreadsheetId=self._sheet_id, range=self._range).execute()
        out_io = StringIO()
        writer = csv.writer(out_io, delimiter=',')
        writer.writerows(result.get('values', []))
        out_io.seek(0)

        return BytesIO(out_io.read().encode())
Пример #30
0
def test_simple_html_format():
    broker = dr.Broker()
    output = StringIO()
    with SimpleHtmlFormat(broker, stream=output):
        dr.run(report, broker=broker)
    output.seek(0)
    data = output.read()
    assert "foo" in data
    assert "bar" in data
Пример #31
0
def test_syslog_format_no_archive():
    broker = dr.Broker()
    output = StringIO()
    with SysLogFormat(broker, stream=output):
        dr.run(report, broker=broker)
    output.seek(0)
    data = output.read()
    assert SL_MSG in data
    assert SL_CMD in data
Пример #32
0
    def test_management_command(self):
        """Test freeze management command"""
        stdout = StringIO()
        call_command('bower_freeze', stdout=stdout)
        stdout.seek(0)
        output = stdout.read()

        self.assertIn('BOWER_INSTALLED_APPS', output)
        self.assertIn('backbone', output)
Пример #33
0
 def test_color_output_wrapped_as_expected_with_no_args(self):
     out = StringIO()
     with redirect_stderr(out):
         self.assertRaises(SystemExit, rainbow_maker_no_args, ['--bad'])
     out.seek(0)
     self.assertEqual(
         out.read(), 'usage: {rainbow_maker}\n'
         '{rainbow_maker}: error: unrecognized arguments: --bad\n'.format(
             **color_kwargs))
Пример #34
0
def test_leave_option():
    """
    Tests that if leave=True, tqdm will leave the info
    about the last iteration on the screen
    """
    our_file = StringIO()
    for i in tqdm(range(3), file=our_file, leave=True):
        pass
    our_file.seek(0)
    assert '3/3 100%' in our_file.read()
    our_file.close()

    our_file2 = StringIO()
    for i in tqdm(range(3), file=our_file2, leave=False):
        pass
    our_file2.seek(0)
    assert '3/3 100%' not in our_file2.read()
    our_file2.close()
Пример #35
0
    def test_figure(self):
        # http://stackoverflow.com/a/2473445/1694979
        imgfile = StringIO(
            'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,'
            '\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;')
        imgfile.name = 'test_img_file.gif'

        self.q.figure.save('image', ContentFile(imgfile.read()))
        self.assertIsInstance(self.q.figure, ImageFieldFile)
Пример #36
0
class HttpResponse(object):
    def __init__(self, body=None, status=None, reason=None, headers=None):
        """Constructor for an HttpResponse object.

    HttpResponse represents the server's response to an HTTP request from
    the client. The HttpClient.request method returns a httplib.HTTPResponse
    object and this HttpResponse class is designed to mirror the interface
    exposed by httplib.HTTPResponse.

    Args:
      body: A file like object, with a read() method. The body could also
          be a string, and the constructor will wrap it so that
          HttpResponse.read(self) will return the full string.
      status: The HTTP status code as an int. Example: 200, 201, 404.
      reason: The HTTP status message which follows the code. Example:
          OK, Created, Not Found
      headers: A dictionary containing the HTTP headers in the server's
          response. A common header in the response is Content-Length.
    """
        if body:
            if hasattr(body, 'read'):
                self._body = body
            else:
                self._body = StringIO(body)
        else:
            self._body = None
        if status is not None:
            self.status = int(status)
        else:
            self.status = None
        self.reason = reason
        self._headers = headers or {}

    def getheader(self, name, default=None):
        if name in self._headers:
            return self._headers[name]
        else:
            return default

    def read(self, amt=None):
        if not amt:
            return self._body.read()
        else:
            return self._body.read(amt)
Пример #37
0
def sitemap_generator(request, maps, page, current_site):
    output = StringIO()
    protocol = request.is_secure() and 'https' or 'http'
    xml = SimplerXMLGenerator(output, settings.DEFAULT_CHARSET)
    xml.startDocument()
    xml.startElement('urlset', {'xmlns':'http://www.sitemaps.org/schemas/sitemap/0.9'})
    yield output.getvalue()
    pos = output.tell()
    for site in maps:
        if callable(site):
            if issubclass(site, RequestSitemap):
                site = site(request=request)
            else:
                site = site()
        elif hasattr(site, 'request'):
            site.request = request
        for url in site.get_urls(page=page, site=current_site, protocol=protocol):
            xml.startElement('url', {})
            xml.addQuickElement('loc', url['location'])
            try:
                if url['lastmod']:
                    xml.addQuickElement('lastmod', url['lastmod'].strftime('%Y-%m-%d'))
            except (KeyError, AttributeError):
                pass
            try:
                if url['changefreq']:
                    xml.addQuickElement('changefreq', url['changefreq'])
            except KeyError:
                pass
            try:
                if url['priority']:
                    xml.addQuickElement('priority', url['priority'])
            except KeyError:
                pass
            xml.endElement('url')
            output.seek(pos)
            yield output.read()
            pos = output.tell()
    xml.endElement('urlset')
    xml.endDocument()
    output.seek(pos)
    last = output.read()
    output.close()
    yield last
Пример #38
0
def check_binary(name, file_path=True):
    # Handles files if file_path is True or text if file_path is False
    if file_path:
        temp = open(name, "U")
    else:
        temp = StringIO(name)
    try:
        return util.is_binary(temp.read(1024))
    finally:
        temp.close()
Пример #39
0
    def test_create_namelist(self):
        stream = StringIO()
        cfg = yaml.load(open('test_conf.yml'))
        create_namelist(cfg, stream)

        stream.seek(0)

        res = stream.read()
        self.assertGreater(len(res), 0)
        self.assertIn('INOBSFOR', res)
Пример #40
0
def check_binary(name, file_path=True):
    # Handles files if file_path is True or text if file_path is False
    if file_path:
        temp = open(name, "U")
    else:
        temp = StringIO(name)
    try:
        return util.is_binary(temp.read(1024))
    finally:
        temp.close()
Пример #41
0
class CaptureStdout(object):
    """Context manager which captures stdout and stderr. """

    def __init__(self):
        self.reset()

    def reset(self):
        self.__mock_stdout = StringIO('')
        self.__mock_stderr = StringIO('')
        self.__mock_stdout.mode = 'w'
        self.__mock_stderr.mode = 'w'
        return self

    def __enter__(self):
        self.__real_stdout = sys.stdout
        self.__real_stderr = sys.stderr

        sys.stdout = self.__mock_stdout
        sys.stderr = self.__mock_stderr


    def __exit__(self, *args, **kwargs):
        sys.stdout = self.__real_stdout
        sys.stderr = self.__real_stderr

        if args[0] is not None:
            print('Error')
            print('stdout:')
            print(self.stdout)
            print('stderr:')
            print(self.stderr)

        return False

    @property
    def stdout(self):
        self.__mock_stdout.seek(0)
        return self.__mock_stdout.read()

    @property
    def stderr(self):
        self.__mock_stderr.seek(0)
        return self.__mock_stderr.read()
Пример #42
0
def test_setmeta_unicode():
    p = Properties()
    p["a key"] = "the value", {u"metakey": u"ünicode metävalue!"}

    out = StringIO()
    p.store(out, encoding="utf-8", strip_meta=False, timestamp=False)

    out.seek(0)
    text = "#: metakey=ünicode metävalue\\!\na\\ key=the value\n"
    assert out.read() == text
Пример #43
0
def test_setmeta_unicode():
    p = Properties()
    p["a key"] = "the value", {u"metakey": u"ünicode metävalue!"}

    out = StringIO()
    p.store(out, encoding="utf-8", strip_meta=False, timestamp=False)

    out.seek(0)
    text = "#: metakey=ünicode metävalue\\!\na\\ key=the value\n"
    assert out.read() == text
def test_obtain_token_command_should_produce_valid_token(monkeypatch, user):
    output = StringIO()
    monkeypatch.setattr(settings, "DEBUG", True)
    call_command('obtain_token', str(user.pk), stdout=output)
    output.seek(0)
    printed_token = output.read()

    payload = JSONWebTokenAuthentication.jwt_decode_token(
        printed_token.strip().encode())
    assert payload['user_id'] == user.pk
def sitemap_generator(request, maps, page, current_site):
    output = StringIO()
    xml = SimplerXMLGenerator(output, settings.DEFAULT_CHARSET)
    xml.startDocument()
    xml.startElement('urlset', {'xmlns':'http://www.sitemaps.org/schemas/sitemap/0.9'})
    yield output.getvalue()
    pos = output.tell()
    for site in maps:
        if callable(site):
            if issubclass(site, RequestSitemap):
                site = site(request=request)
            else:
                site = site()
        elif hasattr(site, 'request'):
            site.request = request
        for url in site.get_urls(page=page, site=current_site):
            xml.startElement('url', {})
            xml.addQuickElement('loc', url['location'])
            try:
                if url['lastmod']:
                    xml.addQuickElement('lastmod', url['lastmod'].strftime('%Y-%m-%d'))
            except (KeyError, AttributeError):
                pass
            try:
                if url['changefreq']:
                    xml.addQuickElement('changefreq', url['changefreq'])
            except KeyError:
                pass
            try:
                if url['priority']:
                    xml.addQuickElement('priority', url['priority'])
            except KeyError:
                pass
            xml.endElement('url')
            output.seek(pos)
            yield output.read()
            pos = output.tell()
    xml.endElement('urlset')
    xml.endDocument()
    output.seek(pos)
    last = output.read()
    output.close()
    yield last
Пример #46
0
    def to_string(self, digits=None, exact=False, tol=1e-9):
        """
        Returns a string representation of the distribution.

        Parameters
        ----------
        digits : int or None
            The probabilities will be rounded to the specified number of
            digits, using NumPy's around function. If `None`, then no rounding
            is performed. Note, if the number of digits is greater than the
            precision of the floats, then the resultant number of digits will
            match that smaller precision.
        exact : bool
            If `True`, then linear probabilities will be displayed, even if
            the underlying pmf contains log probabilities.  The closest
            rational fraction within a tolerance specified by `tol` is used
            as the display value.
        tol : float
            If `exact` is `True`, then the probabilities will be displayed
            as the closest rational fraction within `tol`.

        Returns
        -------
        s : str
            A string representation of the distribution.

        """
        from six import StringIO
        s = StringIO()

        x = prepare_string(self, digits, exact, tol)
        pmf, outcomes, base, colsep, max_length, pstr = x

        headers = ["Class: ",
                   "Alphabet: ",
                   "Base: "]
        vals = [self.__class__.__name__,
                self.alphabet,
                base]

        L = max(map(len, headers))
        for head, val in zip(headers, vals):
            s.write("{0}{1}\n".format(head.ljust(L), val))
        s.write("\n")

        s.write(''.join(['x'.ljust(max_length), colsep, pstr, "\n"]))
        for o, p in zip(outcomes, pmf):
            s.write(''.join([o.ljust(max_length), colsep, str(p), "\n"]))

        s.seek(0)
        s = s.read()
        # Remove the last \n
        s = s[:-1]

        return s
Пример #47
0
    def test_write(self):
        for fp, obj in zip(self.fps, self.objs):
            fh = StringIO()
            _alignment_to_phylip(obj, fh)
            obs = fh.getvalue()
            fh.close()

            with open(fp, 'U') as fh:
                exp = fh.read()

            self.assertEqual(obs, exp)
Пример #48
0
    def test_image_in_question(self):
        imgfile = StringIO(
            'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,'
            '\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;')
        imgfile.name = 'test_img_file.gif'

        self.question1.figure.save('image', ContentFile(imgfile.read()))
        response = self.client.get('/tq1/take/')

        self.assertContains(response, '<img src=')
        self.assertContains(response, 'alt="' + str(self.question1.content))
Пример #49
0
    def test_write(self):
        for fp, obj in zip(self.valid_fps, self.ordination_results_objs):
            fh = StringIO()
            _ordination_results_to_ordres(obj, fh)
            obs = fh.getvalue()
            fh.close()

            with open(fp, 'U') as fh:
                exp = fh.read()

            npt.assert_equal(obs, exp)
Пример #50
0
 def test_extractPortletNode(self):
     node = parseString(_XML_MULTIPLE_INTERFACES).documentElement
     self.importer._initPortletNode(node)
     portlet = getUtility(IPortletType, 'portlets.New')
     node = self.exporter._extractPortletNode('portlets.New', portlet)
     file = StringIO()
     node.writexml(file)
     file.seek(0)
     self.assertEqual(
         """<portlet title="Foo" addview="portlets.New" description="Foo"><for interface="plone.app.portlets.interfaces.IColumn"/><for interface="plone.app.portlets.interfaces.IDashboard"/></portlet>""",
         file.read())
Пример #51
0
 def test_extractPortletNode_defaultManagerInterface(self):
     node = parseString(_XML_EXPLICIT_DEFAULT_INTERFACE).documentElement
     self.importer._initPortletNode(node)
     portlet = getUtility(IPortletType, 'portlets.New')
     node = self.exporter._extractPortletNode('portlets.New', portlet)
     file = StringIO()
     node.writexml(file)
     file.seek(0)
     self.assertEqual(
         """<portlet title="Foo" addview="portlets.New" description="Foo"/>""",
         file.read())
Пример #52
0
 def test_color_output_wrapped_as_expected_with_auto_usage_long_prog_small_width(
         self):
     try:
         os.environ['COLUMNS'] = '42'
         out = StringIO()
         with redirect_stdout(out):
             self.assertRaises(SystemExit,
                               rainbow_maker_auto_usage_long_prog, ['-h'])
         out.seek(0)
         self.assertEqual(
             out.read(),
             'usage: {red-orange-yellow-green-blue-indigo-violet}\n'
             '       [-h]\n'
             '       first second third forth fifth\n'
             '       sixth seventh\n'
             '\n'
             'This script is a test for {rainbow_maker}.\n'
             'This description consists of 140 chars.\n'
             'It should be able to fit onto two 80\n'
             'char lines.\n'
             '\n'
             'positional arguments:\n'
             '  first       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {red}.\n'
             '  second      {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {orange}.\n'
             '  third       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {yellow}.\n'
             '  forth       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {green}.\n'
             '  fifth       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {blue}.\n'
             '  sixth       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {indigo}.\n'
             '  seventh     {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {violet}.\n'
             '\n'
             '{options_string}:\n'
             '  -h, --help  displays this {colorful}\n'
             '              help text\n'
             '\n'
             'This epilog has some {colorful} escapes in\n'
             'it as well and should not wrap on 80.\n'.format(
                 **color_kwargs))
     finally:
         del os.environ['COLUMNS']
Пример #53
0
def test_syslog_format_archive():
    broker = dr.Broker()
    output = StringIO()
    with SysLogFormat(broker, archive="../../insights/core", stream=output):
        dr.run(report, broker=broker)
    output.seek(0)
    data = output.read()

    assert SL_MSG in data
    assert SL_CMD in data
    assert SL_ARCHIVE in data
    assert SL_PATH in data
Пример #54
0
def unmangleId(id):
    """
    Unmangles a doxygen ID and returns a tuple with the scope type, the
    C++ identifier of the scope (empty for files, which means global scope)
    and the identified type. When the identified type is a unique ID the
    returned id is an empty string (the name will have to be retrieved from
    the 'name' property of the data object with this ID).
    """
    out = StringIO()
    underscore = False
    c = 0
    while c < len(id):
        if id[c] != '_':
            if underscore:
                if id[c] == '1':
                    out.write(':')
                elif id[c] == '8':
                    out = StringIO()
                    out.write('file')
                    # Skipping extension
                    c += 1
                    if id[c] != 'h':
                        c += 2
                else:
                    out.write(id[c].upper())
            else:
                out.write(id[c])
            underscore = False
        else:
            if underscore:
                out.write('_')
            underscore = not underscore
        c += 1
    out.seek(0)
    out = out.read()

    for t in ['class', 'struct', 'namespace', 'file']:
        if t in out[:len(t)]:
            id = out[len(t):]
            if re.search("([^:]|^):[^:]", id):
                scope = id.rsplit(':', 1)[0]
                return (t, scope, '')
            else:
                try:
                    scope, id = id.rsplit('::', 1)
                    return (t, scope, id)
                except ValueError:
                    return (t, '', id)

    #This is a file, removing the file name

    return out
Пример #55
0
class HttpResponse(object):
    """Translates a urlfetch resoinse to look like an hhtplib resoinse.

  Used to allow the resoinse from HttpRequest to be usable by gdata.service
  methods.
  """
    def __init__(self, urlfetch_response):
        self.body = StringIO(urlfetch_response.content)
        self.headers = urlfetch_response.headers
        self.status = urlfetch_response.status_code
        self.reason = ''

    def read(self, length=None):
        if not length:
            return self.body.read()
        else:
            return self.body.read(length)

    def getheader(self, name):
        if name not in self.headers:
            return self.headers[name.lower()]
        return self.headers[name]
Пример #56
0
def output_properties(path=None, content=None):
    checksum = hashlib.sha1()
    properties = {
        "class": "File",
    }
    if path is not None:
        properties["path"] = path
        f = open(path, "rb")
    else:
        f = StringIO(content)

    try:
        contents = f.read(1024 * 1024)
        filesize = 0
        while contents != "":
            checksum.update(contents)
            filesize += len(contents)
            contents = f.read(1024 * 1024)
    finally:
        f.close()
    properties["checksum"] = "sha1$%s" % checksum.hexdigest()
    properties["size"] = filesize
    return properties
Пример #57
0
def test_write_label_first_data():
    gold = """
1 data
2 data data
3 data data data
5 data data data data data
4 data data data data
    """.strip()
    labels = list('12354')
    texts = [['data'] * int(l) for l in labels]
    data = StringIO()
    write_label_first_data(data, labels, texts)
    data.seek(0)
    assert data.read() == gold
Пример #58
0
def test_save():
    """
    Test the IDF.save() function using a filehandle to avoid external effects.
    """
    file_text = "Material,TestMaterial,  !- Name"
    idf = IDF(StringIO(file_text))
    # test save with just a filehandle
    file_handle = StringIO()
    idf.save(file_handle)
    expected = "TestMaterial"
    file_handle.seek(0)
    result = file_handle.read()
    # minimal test that TestMaterial is being written to the file handle
    assert expected in result
Пример #59
0
 def test_color_output_wrapped_as_expected_small_width(self):
     try:
         os.environ['COLUMNS'] = '42'
         out = StringIO()
         with redirect_stdout(out):
             self.assertRaises(SystemExit, rainbow_maker, ['-h'])
         out.seek(0)
         self.assertEqual(
             out.read(),
             # usage doesnt wrap for some reason when manually specified.
             # seems like a bug but leaving alone because seems out of scope re: colors.
             'usage: {rainbow_maker} [-h] {first} {second} {third} {forth} {fifth} {sixth} {seventh}\n'
             '\n'
             'This script is a test for {rainbow_maker}.\n'
             'This description consists of 140 chars.\n'
             'It should be able to fit onto two 80\n'
             'char lines.\n'
             '\n'
             'positional arguments:\n'
             '  first       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {red}.\n'
             '  second      {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {orange}.\n'
             '  third       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {yellow}.\n'
             '  forth       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {green}.\n'
             '  fifth       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {blue}.\n'
             '  sixth       {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {indigo}.\n'
             '  seventh     {color} used when making\n'
             '              rainbow, {typically} this\n'
             '              would be {violet}.\n'
             '\n'
             '{options_string}:\n'
             '  -h, --help  displays this {colorful}\n'
             '              help text\n'
             '\n'
             'This epilog has some {colorful} escapes in\n'
             'it as well and should not wrap on 80.\n'.format(
                 **color_kwargs))
     finally:
         del os.environ['COLUMNS']
Пример #60
0
  def runLiveTest(self, test_list=None, run_only=None, debug=False,
                  verbose=False):
    """
    Launch live tests

    run_only=STRING      Run only specified test methods delimited with
                         commas (e.g. testFoo,testBar). This can be regular
                         expressions.
    debug=boolean        Invoke debugger on errors / failures.
    verbose=boolean      Display more information when running tests
    """
    from six import StringIO
    global global_stream
    global live_test_running
    self.serialize()
    if live_test_running:
      LOG('ComponentTool', INFO, 'Live test already running')
      return ''

    global_stream = StringIO()
    test_list = self._getCommaSeparatedParameterList(test_list)
    if not test_list:
      # no test to run
      return ''

    # Allow having strings for verbose and debug
    verbose = int(verbose) and True or False
    debug = int(debug) and True or False
    run_only = self._getCommaSeparatedParameterList(run_only)
    verbosity = verbose and 2 or 1
    request_server_url = self.REQUEST.get('SERVER_URL')

    try:
      live_test_running = True
      from Products.ERP5Type.tests.ERP5TypeLiveTestCase import runLiveTest
      try:
        result = runLiveTest(test_list,
                             run_only=run_only,
                             debug=debug,
                             stream=global_stream,
                             request_server_url=request_server_url,
                             verbosity=verbosity)
      except ImportError:
        import traceback
        traceback.print_exc(file=global_stream)
      global_stream.seek(0)
      return global_stream.read()
    finally:
      live_test_running = False