Example #1
0
class DjangoCommandsTesting(TestCase):
    def setUp(self):
        self.errpart = StringIO()
        self.outpart = StringIO()

    def test_command(self):
        call_command("models_stat", stderr=self.errpart, stdout=self.outpart)
        self.errpart.seek(0)
        self.outpart.seek(0)
        error_list = self.errpart.readlines()
        out_list = self.outpart.readlines()
        for model in get_models():
            self.assertTrue("error: [%s] - %s objects\n" % (model.__name__,
                        model._default_manager.count()) in error_list or \
                        "error: [%s] - %s objects" % (model.__name__,
                        model._default_manager.count()) in error_list)
            self.assertTrue("[%s] - %s objects\n" % (model.__name__,
                        model._default_manager.count()) in out_list or \
                        "[%s] - %s objects" % (model.__name__,
                        model._default_manager.count()) in out_list)
        self.assertEqual(len(get_models()), len(error_list))
        self.assertEqual(len(get_models()), len(out_list))

    def tearDown(self):
        self.errpart = None
        self.outpart = None
Example #2
0
    def list_sizes(self, path):
        """Returns a dictionary containing sizes for each version of a path.
        """
        snap_dir = self.snapshot_snap_path(path)
        command = ['rdiff-backup',
                   '--parsable-output',
                   '--list-increment-sizes',
                   snap_dir]
        process = Popen(command, stdout=PIPE, stderr=PIPE)
        stdout = process.communicate()[0]

        listing_file = StringIO(stdout)
        if len(listing_file.readlines()) < 3:
            return {}

        listing_file.seek(0)

        # skip the first two lines of output
        for _ in range(2):
            next(listing_file)

        # generate a dictionary
        sizes = dict()
        for version, line in enumerate(reversed(listing_file.readlines())):
            size = "%s %s" % (line.split()[5], line.split()[6])
            sizes[version+1] = size

        return sizes
Example #3
0
class ListOrderTest(TestCase):
    def setUp(self):
        self.stdout = StringIO()

    def test_simple_order(self):
        models.CharModel.objects.create(field='FOO2')
        models.CharModel.objects.create(field='FOO1')
        models.CharModel.objects.create(field='FOO3')
        call_command('cli', 'charmodel', 'list', order=['field'], stdout=self.stdout)
        self.stdout.seek(0)
        lines = self.stdout.readlines()[1:]
        self.assertIn('FOO1', lines[0])
        self.assertIn('FOO2', lines[1])
        self.assertIn('FOO3', lines[2])

    def test_revert_order(self):
        models.CharModel.objects.create(field='FOO2')
        models.CharModel.objects.create(field='FOO1')
        models.CharModel.objects.create(field='FOO3')
        call_command('cli', 'charmodel', 'list', order=['~field'], stdout=self.stdout)
        self.stdout.seek(0)
        lines = self.stdout.readlines()[1:]
        self.assertIn('FOO3', lines[0])
        self.assertIn('FOO2', lines[1])
        self.assertIn('FOO1', lines[2])
Example #4
0
def diffdump(buf1,
             buf2,
             header1='Buffer 1',
             header2='Buffer 2',
             startaddr=None,
             fd=None):
    """Print the difference of 2 buffers"""
    from difflib import Differ
    from StringIO import StringIO

    f1 = StringIO()
    f2 = StringIO()

    dump(buf1, header=header1, startaddr=startaddr, fd=f1)
    dump(buf2, header=header2, startaddr=startaddr, fd=f2)

    f1.seek(0)  # reposition to file start
    f2.seek(0)

    d = Differ()
    result = list(d.compare(f1.readlines(), f2.readlines()))
    """
    print('results of dump()')
    print(f1.readlines())
    print(f2.readlines())
    print(result)
    """

    if fd == None:
        sys.stdout.writelines(result)
    else:
        fd.writelines(result)
Example #5
0
class DjangoCommandsTesting(TestCase):

    def setUp(self):
        self.errpart = StringIO()
        self.outpart = StringIO()

    def test_command(self):
        call_command("models_stat", stderr=self.errpart, stdout=self.outpart)
        self.errpart.seek(0)
        self.outpart.seek(0)
        error_list = self.errpart.readlines()
        out_list = self.outpart.readlines()
        for model in get_models():
            self.assertTrue("error: [%s] - %s objects\n" % (model.__name__,
                        model._default_manager.count()) in error_list or \
                        "error: [%s] - %s objects" % (model.__name__,
                        model._default_manager.count()) in error_list)
            self.assertTrue("[%s] - %s objects\n" % (model.__name__,
                        model._default_manager.count()) in out_list or \
                        "[%s] - %s objects" % (model.__name__,
                        model._default_manager.count()) in out_list)
        self.assertEqual(len(get_models()), len(error_list))
        self.assertEqual(len(get_models()), len(out_list))

    def tearDown(self):
        self.errpart = None
        self.outpart = None
Example #6
0
def test_custom_exceptions():
    curr_stdout = sys.stdout
    curr_stderr = sys.stderr
    try:
        out = StringIO()
        err = StringIO()
        sys.stdout = out
        sys.stderr = err
        excs = [
            NotFound, InvalidParameter, InvalidState, RepositoryError,
            ScriptError
        ]
        for Exc in excs:
            try:
                with Console(exit_on_error=False):
                    raise Exc("urgh")
            except:
                pass
        out.seek(0)
        err.seek(0)
        assert len(out.readlines()) == 0
        assert len(err.readlines()) == len(excs)
    finally:
        sys.stdout = curr_stdout
        sys.stderr = curr_stderr
Example #7
0
    def list_sizes(self, path):
        """Returns a dictionary containing sizes for each version of a path.
        """
        snap_dir = self.snapshot_snap_path(path)
        command = [
            'rdiff-backup', '--parsable-output', '--list-increment-sizes',
            snap_dir
        ]
        process = Popen(command, stdout=PIPE, stderr=PIPE)
        stdout = process.communicate()[0]

        listing_file = StringIO(stdout)
        if len(listing_file.readlines()) < 3:
            return {}

        listing_file.seek(0)

        # skip the first two lines of output
        for _ in range(2):
            next(listing_file)

        # generate a dictionary
        sizes = dict()
        for version, line in enumerate(reversed(listing_file.readlines())):
            size = "%s %s" % (line.split()[5], line.split()[6])
            sizes[version + 1] = size

        return sizes
Example #8
0
class ListOrderTest(TestCase):
    def setUp(self):
        self.stdout = StringIO()
    
    def tearDown(self):
        self.stdout.close()

    def test_simple_order(self):
        models.CharModel.objects.create(field='FOO2')
        models.CharModel.objects.create(field='FOO1')
        models.CharModel.objects.create(field='FOO3')
        call_command('cli', 'charmodel', 'list', order=['field'], stdout=self.stdout)
        self.stdout.seek(0)
        lines = self.stdout.readlines()[1:]
        self.assertIn('FOO1', lines[0])
        self.assertIn('FOO2', lines[1])
        self.assertIn('FOO3', lines[2])

    def test_revert_order(self):
        models.CharModel.objects.create(field='FOO2')
        models.CharModel.objects.create(field='FOO1')
        models.CharModel.objects.create(field='FOO3')
        call_command('cli', 'charmodel', 'list', order=['~field'], stdout=self.stdout)
        self.stdout.seek(0)
        lines = self.stdout.readlines()[1:]
        self.assertIn('FOO3', lines[0])
        self.assertIn('FOO2', lines[1])
        self.assertIn('FOO1', lines[2])
Example #9
0
 def test_all_models_command(self):
     stdout = StringIO()
     stderr = StringIO()
     call_command('all_models', stderr=stderr, stdout=stdout)
     # check if stderr output is duplicated
     for l_out, l_err in zip(stdout.readlines(), stderr.readlines()):
         self.assertEquals(l_err, 'Error: %s' % l_out)
     self.check_all_models_output(stdout.getvalue())
Example #10
0
class ParseInfo(object):
    
    def __init__(self):
        self.request = StringIO()
        self.response = StringIO()
        self.request_header = {}
        self.response_header = {}
    
    def write(self, method, s):
        """写入请求数据"""
        if method == 'request':
            self.request.write(s)
        elif method == 'response':
            self.response.write(s)
    
    def parse(self):
        #request header
        self.request.seek(0)
        self.request_commend = self.request.readline()
        self.request_header = httplib.HTTPMessage(self.request)
        #request body
        self.request_body = ''.join(self.request.readlines())
        request_body_encode = chardet.detect(self.request_body).get('encoding')
        if request_body_encode:
            self.request_body = self.request_body.decode(request_body_encode, 'replace')
        
        #response header
        self.response.seek(0)
        self.response_status = self.response.readline()
        self.response_header = httplib.HTTPMessage(self.response)
        #response body
        chunked = self.response_header.get('Transfer-Encoding', '')
        if chunked == 'chunked':
            content = []
            chunk_size = int(self.response.readline()[:-2], 16)
            while chunk_size > 0:
                content.append(self.response.read(chunk_size))
                self.response.read(2)
                chunk_size = int(self.response.readline()[:-2], 16)
            self.response_body = ''.join(content)
        else:
            self.response_body = ''.join(self.response.readlines())
        try:
            if self.response_header.get('Content-Encoding') == 'gzip':
                self.response_body = gzip.GzipFile(fileobj=StringIO(self.response_body)).read()
        except Exception, e:
            pass
        response_body_encode = chardet.detect(self.response_body).get('encoding')
        if response_body_encode:
            if response_body_encode.lower() == 'gb2312':
                response_body_encode = 'gb18030'
            self.response_body = self.response_body.decode(response_body_encode, 'replace')
        self.raw_request = self.request.getvalue()
        self.raw_response = self.response.getvalue()
        del self.request
        del self.response
Example #11
0
class CommandLine(api.CommandLine):
    def __init__(self, returncode=0, stdout="", stderr=""):
        self.returncode = returncode
        self.stdout = StringIO(stdout)
        self.stderr = StringIO(stderr)

    def __call__(self, executable, args, input=None, cwd=None):
        return self

    def execute(self):
        return map(lambda x, y: (x, y), self.stdout.readlines(), self.stderr.readlines())
Example #12
0
class CommandLine(api.CommandLine):
    def __init__(self, returncode=0, stdout='', stderr=''):
        self.returncode = returncode
        self.stdout = StringIO(stdout)
        self.stderr = StringIO(stderr)

    def __call__(self, executable, args, input=None, cwd=None):
        return self

    def execute(self):
        return map(lambda x, y: (x, y), self.stdout.readlines(),
                   self.stderr.readlines())
Example #13
0
class CommandLine(api.CommandLine):

    def __init__(self, returncode=0, stdout='', stderr=''):
        self.returncode = returncode
        self.stdout = StringIO(stdout)
        self.stderr = StringIO(stderr)

    def __call__(self, executable, args, input=None, cwd=None):
        return self

    def execute(self):
        return api._combine(self.stdout.readlines(), self.stderr.readlines())
Example #14
0
def add_device_details(adb_devices, resolved):
    for deviceId in adb_devices:
        try:
            osRawVersion = StringIO(subprocess.check_output([adb_path, '-s', deviceId, 'shell', 'getprop', 'ro.build.version.release']))
            osVersion = osRawVersion.readlines()[0].strip(' \t\n\r')
            modelRawVersion = StringIO(subprocess.check_output([adb_path, '-s', deviceId, 'shell', 'getprop', 'ro.product.model']))
            model = modelRawVersion.readlines()[0].strip(' \t\n\r')
            resolved[deviceId]["osVersion"] = osVersion
            resolved[deviceId]["productModel"] = model
        except:
            resolved[deviceId]["osVersion"] = "Error"
            resolved[deviceId]["productModel"] = "Error"
Example #15
0
 def test_all_models_command(self):
     stdout = StringIO()
     stderr = StringIO()
     call_command('models_info', stderr=stderr, stdout=stdout)
     for out, err in zip(stdout.readlines(), stderr.readlines()):
         self.assertEquals(err, 'Error: %s' % out)
     for model in ContentType.objects.all():
         self.assertIn('%s: %s - %s' % (
             model.app_label,
             model.model,
             model.model_class().objects.count()),
             stdout.getvalue()
         )
Example #16
0
    def test_array(self):
        a = np.array([[1, 2], [3, 4]], float)
        fmt = "%.18e"
        c = StringIO()
        np.savetxt(c, a, fmt=fmt)
        c.seek(0)
        assert_equal(
            c.readlines(), asbytes_nested([(fmt + " " + fmt + "\n") % (1, 2), (fmt + " " + fmt + "\n") % (3, 4)])
        )

        a = np.array([[1, 2], [3, 4]], int)
        c = StringIO()
        np.savetxt(c, a, fmt="%d")
        c.seek(0)
        assert_equal(c.readlines(), asbytes_nested(["1 2\n", "3 4\n"]))
Example #17
0
    def test_jetpack_core(self):
        # this has a side-effect of asserting that all the SDK's api-utils
        # modules are clean.
        jp_core = "packages/api-utils/lib"
        assert os.path.isdir(jp_core)  # we expect to be run from the SDK top
        stderr = StringIO()
        manifest, has_problems = scan_package("prefix-", "resource:foo/", "api-utils", "lib", jp_core, stderr)
        stderr.seek(0)
        err = stderr.readlines()
        self.failUnlessEqual(err, [], "".join(err))
        self.failUnlessEqual(has_problems, False)
        update_manifest_with_fileinfo(["api-utils"], "api-utils", manifest)

        # look at a few samples from the manifest: this depends upon the
        # behavior of other files in the SDK, so when those files change
        # (specifically when they move or add dependencies), this test must
        # be updated
        self.failUnless("resource:foo/tab-browser.js" in manifest, manifest.keys())
        tb = manifest["resource:foo/tab-browser.js"]
        self.failUnlessEqual(tb.chrome, True)
        self.failUnlessEqual(tb.name, "tab-browser")
        self.failUnlessEqual(tb.packageName, "api-utils")
        self.failUnless("window-utils" in tb.requires, tb.requires.values())
        self.failUnlessEqual(tb.requires["window-utils"].url, "resource:foo/window-utils.js")
        self.failUnlessEqual(tb.sectionName, "lib")
        self.failUnlessEqual(tb.zipname, "resources/prefix-api-utils-lib/tab-browser.js")
        h = tb.hash
        self.failUnless(re.search(r"^[0-9a-f]{64}$", h), h)
        # don't assert the actual value, since that will change each time
        # page-mod.js changes

        self.failUnless("resource:foo/api-utils.js" in manifest, manifest.keys())
Example #18
0
def update():
    local_paths = {
        'project': local_django_project
    }
    server = servers[env.host_string]
    # check / update requirements
    run('echo "* git update"')
    git_output = run('cd {path} && git pull'.format(**server))
    print(git_output)
    new_requirements = StringIO()
    current_requirements = run('{activate} && pip freeze'.format(**server))
    current_requirements = [
        r.strip() for r in current_requirements.split("\n")
    ]
    get("{path}/requirements.txt".format(**server), new_requirements)
    new_requirements.seek(0)
    new_requirements = [
        r.strip() for r in new_requirements.readlines() if r[0] != "#"
    ]

    if env_needs_update(current_requirements, new_requirements):
        run('''{activate} && pip install -q -r \
               {path}/requirements.txt'''.format(**server))
        run('''{activate} && ./manage.py syncdb && \
               ./manage.py migrate'''.format(**server))
    else:
        if any(["/migrations/" in l for l in git_output]):
            run('{activate} && ./manage.py migrate'.format(**server))

    # collect static & restart
    run('{activate} && ./manage.py collectstatic --noinput'.format(**server))
    run('touch {path}/project/uwsgi.ini'.format(**server))
Example #19
0
def _test():
    from StringIO import StringIO
    ifile = StringIO("""\
set gridfile = somefile.grid
set add boundary indicator nodes = n=1 b4=[0,1]x[-2,-2]
set time step = 0.5 h ! [s]
set heat heatflux 1 = 0.01  ! [K/m]
set heat heatflux 2 = 0.02  ! [K/m]
set time points for plot = [0, 1.5, 3, 10, 100]
sub heat LinEqAdmFE  ! submenu
sub heat Matrix_prm
set heat matrix type = MatSparse
set max no of iterations = 120
ok  ! return back to previous level
ok
ok
""")
    parsed_lines, dummy = parse_input_file(ifile.readlines())
    import pprint
    print 'parsed_lines:\n', pprint.pformat(parsed_lines)
    print 'output_lines:\n', pprint.pformat(dummy)
    # set some new values (with units :-)
    new_values = {}
    new_values['heat heatflux 1'] = '1 K/cm'
    new_values['heat heatflux 2'] = '4.5 K/cm'
    new_values['gridfile'] = 'my.grid'
    new_values['max no of iterations'] = 30
    lines = prms2lines(new_values, parsed_lines)
    print '\n\nnew input file:\n', ''.join(lines)
Example #20
0
 def test_format(self):
     self.make_trees()
     s = StringIO(self.run_bzr('send -f branch -o- --format=4')[0])
     md = merge_directive.MergeDirective.from_lines(s.readlines())
     self.assertIs(merge_directive.MergeDirective2, md.__class__)
     s = StringIO(self.run_bzr('send -f branch -o- --format=0.9')[0])
     md = merge_directive.MergeDirective.from_lines(s.readlines())
     self.assertContainsRe(md.get_raw_bundle().splitlines()[0],
         '# Bazaar revision bundle v0.9')
     s = StringIO(self.run_bzr('bundle -f branch -o- --format=0.9')[0])
     md = merge_directive.MergeDirective.from_lines(s.readlines())
     self.assertContainsRe(md.get_raw_bundle().splitlines()[0],
         '# Bazaar revision bundle v0.9')
     self.assertIs(merge_directive.MergeDirective, md.__class__)
     self.run_bzr_error(['Bad value .* for option .format.'],
                         'send -f branch -o- --format=0.999')[0]
Example #21
0
    def test_toc_with_code_block(self):
        input = StringIO(
            '!TOC\n'
            '# Header 1\n'
            '```\n'
            'code block\n'
            '``` \n'
            '# Header 2\n')

        result = """1\.  [Header 1](#header1)
        2\.  [Header 2](#header2)
        <a name="header1"></a>

        # 1\. Header 1
        ```
        code block
        ```
        <a name="header2"></a>
        
        # 2\. Header 2"""

        output = StringIO()
        MarkdownPP(input=input, modules=['tableofcontents'], output=output)
        output.seek(0)
        self.assertEqual([l.strip() for l in output.readlines()],
                         [l.strip() for l in result.split('\n')])
Example #22
0
def test_remdups():

    remdups(open('b/a'), hashonly=True)

    outfile = StringIO()
    remdups(open('b/a'), outfile, hashonly=True)
    outfile.seek(0)
    assert 'b/a' in outfile.read()

    remdups(hashonly=True, exclude_dir=['k'])
    assert len(remdups()) > 0

    wherename = remdups(where_name='a')
    assert set(wherename) == set(['./c/a', './u/v/a', ''])

    wherefile = remdups(where_file='c/a')
    assert set(wherefile) == set(['./c/a', './u/v/a'])

    outfile = StringIO()
    with open('hashes.txt', 'r') as _file:
        remdups(infile=_file, outfile=outfile, keep_in=[
                'a'], keep_out=['y'], comment_out=['u'])
    outfile.seek(0)
    res = [ln.strip()
           for ln in outfile.readlines() if ln.strip() and '####' not in ln]
    assert '#rm -f "./c/x"' in res
Example #23
0
    def test_array(self):
        a = np.array([[1, 2], [3, 4]], float)
        fmt = "%.18e"
        c = StringIO()
        np.savetxt(c, a, fmt=fmt)
        c.seek(0)
        assert_equal(
            c.readlines(),
            asbytes_nested([(fmt + ' ' + fmt + '\n') % (1, 2),
                            (fmt + ' ' + fmt + '\n') % (3, 4)]))

        a = np.array([[1, 2], [3, 4]], int)
        c = StringIO()
        np.savetxt(c, a, fmt='%d')
        c.seek(0)
        assert_equal(c.readlines(), asbytes_nested(['1 2\n', '3 4\n']))
Example #24
0
def main(file_path):
    with ZipFile(file_path) as z:
        file_name = (os.path.basename(file_path).rsplit('.', 1)[0]) + '.txt'
        f = StringIO(z.read(file_name))
        header = json.loads(f.readline())
        i = 0
        for line in f.readlines():
            hash, coinbase, merkle_branch = json.loads(line.replace('\n', ''))
            if not validate_origin(hash, coinbase, merkle_branch):
                return {
                    'status': 'ERR',
                    'err':
                    'wrong origin (missing /slush/ in coinbase or wrong merkle root)',
                    'hashrate': None,
                }
            if not validate(hash, header['difficulty']):
                return {
                    'status': 'ERR',
                    'err': 'too low difficulty',
                    'hashrate': None,
                }
            i += 1
    return {
        'status': 'OK',
        'err': None,
        'hashrate': hashrate_from_proof(i, header['difficulty'])
    }
    def test_api_key_should_be_revoked(self):
        user = user_factory(id=67890)
        # The test csv contains an entry with this user and the "right" secret.
        right_secret = (
            'ab2228544a061cb2af21af97f637cc58e1f8340196f1ddc3de329b5974694b26')
        apikey = APIKey.objects.create(
            key='user:{}:{}'.format(user.pk, '333'), secret=right_secret,
            user=user, is_active=True)
        stdout = StringIO()
        call_command('revoke_api_keys', self.csv_path, stdout=stdout)
        stdout.seek(0)
        output = stdout.readlines()
        assert output[0] == (
            'Ignoring APIKey user:12345:666, it does not exist.\n')
        assert output[1] == (
            'Revoked APIKey user:67890:333.\n')
        assert output[2] == (
            'Ignoring APIKey garbage, it does not exist.\n')
        assert output[3] == (
            'Done. Revoked 1 keys out of 3 entries.\n')

        # API key is now inactive, secret hasn't changed, the other user api
        # key is still there, there are no additional APIKeys.
        apikey.reload()
        assert apikey.secret == right_secret
        assert apikey.is_active is None
        assert APIKey.objects.filter(user=user).count() == 2
        assert APIKey.objects.filter(user=user, is_active=True).count() == 1
Example #26
0
def _parse_text(text):
    #cfg.logger.debug('text: %s', text)
    _columns = [
        'OK_UNITpro', 'IDpro', 'APP_NAMEpro', 'LOCATIONpro', 'CB_DATEpro',
        'APPROVE_DATEpro'
    ]

    f = StringIO(text)
    lines = f.readlines()
    lines = [line for line in lines if line.strip()]
    n_lines = len(lines)
    if n_lines != 6:
        cfg.logger.error('lines != 6: lines: %s', lines)
        return {}

    cfg.logger.debug('lines: %s', lines)

    data = {
        column: lines[idx].strip()
        for (idx, column) in enumerate(_columns) if column
    }
    if not data.get('OK_UNITpro', ''):
        return {}

    return _parse_data(data)
Example #27
0
    def update_postfix_master_cf(self):
        master_cf_orig = StringIO()
        get('/etc/postfix/master.cf', master_cf_orig)
        master_cf_orig.seek(0)
        lines = [l.rstrip() for l in master_cf_orig.readlines()]
        for n, l in enumerate(lines):
            if l.startswith('{name}_mailer'.format(name=self.settings.name)):
                found = True
                break
        else:
            found = False

        if found:
            lines = lines[0:n] + lines[n+3:]
        lines.extend([
                '{name}_mailer  unix  -       n       n       -       -       pipe'.format(name=self.settings.name),
                '  flags=FR user={user} argv={instance_code_dir}/bin/mailpost http://{host_name}/got_mail {upload_dir}'.format(
                    user=self.user,
                    instance_code_dir=self.settings.instance_code_dir,
                    host_name=self.settings.host_name,
                    upload_dir=self.settings.upload_dir),
                '  ${nexthop} ${user}',
            ])
        master_cf_new = StringIO('\n'.join(lines) + '\n')
        put(master_cf_new, '/etc/postfix/master.cf', mode=0o644)
Example #28
0
    def test_array(self):
        a = np.array([[1, 2], [3, 4]], float)
        fmt = "%.18e"
        c = StringIO()
        np.savetxt(c, a, fmt=fmt)
        c.seek(0)
        assert_equal(c.readlines(),
                     asbytes_nested(
            [(fmt + ' ' + fmt + '\n') % (1, 2),
             (fmt + ' ' + fmt + '\n') % (3, 4)]))

        a = np.array([[1, 2], [3, 4]], int)
        c = StringIO()
        np.savetxt(c, a, fmt='%d')
        c.seek(0)
        assert_equal(c.readlines(), asbytes_nested(['1 2\n', '3 4\n']))
Example #29
0
def use_c_preprocessor():
    content = re.sub(r"#include.*", "", Globals.raw_code)

    import pcpp
    try:
        from StringIO import StringIO
    except Exception as e:
        from io import StringIO
    preproc = pcpp.Preprocessor()
    preproc.parse(content)
    output = StringIO()
    preproc.write(output)
    output.seek(0)
    content = output.readlines()
    content = list(content)
    code = []
    flag = True
    for line in content:
        if flag and not line.startswith('#'):
            code.append(line)
        elif line.startswith('#'):
            if filename in line:
                flag = True
            else:
                flag = False
    code = "\n".join(code)
    code = Pre.process(code)
    code = code.split('\n')
    code = [line.strip() for line in code]
    content = []
    for line in code:
        if len(line) > 0:
            content.append(line)
    print2("preprocessed code: ", content)
    return content
Example #30
0
 def test_1D(self):
     a = np.array([1, 2, 3, 4], int)
     c = StringIO()
     np.savetxt(c, a, fmt='%d')
     c.seek(0)
     lines = c.readlines()
     assert_equal(lines, asbytes_nested(['1\n', '2\n', '3\n', '4\n']))
Example #31
0
    def test_subchapoverview(self):
        auth.login_user(db.auth_user(11))
        session.auth = auth
        request.vars.tablekind = 'sccount'

        res = subchapoverview()
        self.assertIsNotNone(res)
        soup = BeautifulSoup(res['summary'])
        thlist = soup.select('th')
        self.assertEqual(thlist[11].text, 'user_1671')
        rl = soup.select('tr')
        cl = rl[10].select('td')
        self.assertEqual(cl[5].text, '4.0')
        self.assertEqual(cl[17].text, '6.0')
        request.vars.action = 'tocsv'
        request.vars.tablekind = 'dividmin'
        res = subchapoverview()
        csvf = StringIO(res)
        rows = csvf.readlines()
        cols = rows[18].split(',')
        self.assertEqual(cols[0], 'Dictionaries')
        self.assertEqual(cols[2], 'ch12_dict11')
        self.assertEqual(cols[-1].strip(), '2017-10-26 22:25:38')
        cols = rows[122].split(',')
        self.assertEqual(cols[0], 'GeneralIntro')
        self.assertEqual(cols[3], '2017-08-30 22:29:30')
Example #32
0
    def test_api_key_should_be_revoked(self):
        user = user_factory(id=67890)
        # The test csv contains an entry with this user and the "right" secret.
        right_secret = (
            'ab2228544a061cb2af21af97f637cc58e1f8340196f1ddc3de329b5974694b26')
        apikey = APIKey.objects.create(key='user:{}:{}'.format(user.pk, '333'),
                                       secret=right_secret,
                                       user=user,
                                       is_active=True)
        stdout = StringIO()
        call_command('revoke_api_keys', self.csv_path, stdout=stdout)
        stdout.seek(0)
        output = stdout.readlines()
        assert output[0] == (
            'Ignoring APIKey user:12345:666, it does not exist.\n')
        assert output[1] == ('Revoked APIKey user:67890:333.\n')
        assert output[2] == ('Ignoring APIKey garbage, it does not exist.\n')
        assert output[3] == ('Done. Revoked 1 keys out of 3 entries.\n')

        # APIKey is still active, secret hasn't changed, there are no
        # additional APIKeys.
        apikey.reload()
        assert apikey.secret == right_secret
        assert not apikey.is_active
        assert APIKey.objects.filter(user=user).count() == 2
        assert APIKey.objects.filter(user=user, is_active=True).count() == 1
Example #33
0
def main(file_path):
    with ZipFile(file_path) as z:
        file_name = (os.path.basename(file_path).rsplit('.', 1)[0])+'.txt'
        f = StringIO(z.read(file_name))
        header = json.loads(f.readline())
        i = 0
        for line in f.readlines():
            hash, coinbase, merkle_branch = json.loads(line.replace('\n', ''))
            if not validate_origin(hash, coinbase, merkle_branch):
                return {
                    'status': 'ERR',
                    'err': 'wrong origin (missing /slush/ in coinbase or wrong merkle root)',
                    'hashrate': None,
                }
            if not validate(hash, header['difficulty']):
                return {
                    'status': 'ERR',
                    'err': 'too low difficulty',
                    'hashrate': None,
                }
            i += 1
    return {
        'status': 'OK',
        'err': None,
        'hashrate': hashrate_from_proof(i, header['difficulty'])
    }
Example #34
0
 def buildHelp(self):
     from StringIO import StringIO
     help_file = StringIO()
     self.parser.print_help(help_file)
     help_file.seek(0)
     lines = help_file.readlines()[2:]
     self.__doc__ += "".join(lines)
Example #35
0
def _parse_text(text):
    cfg.logger.debug('text: %s', text)
    _columns = [
        'OK_UNITpro', 'IDpro', 'APP_NAMEpro', 'LOCATIONpro', 'CB_DATEpro',
        'APPROVE_DATEpro'
    ]

    f = StringIO(text)
    lines = f.readlines()
    lines = [line for line in lines if line.strip()]
    n_lines = len(lines)
    if n_lines != 6:
        cfg.logger.error('lines != 6: lines: %s', lines)
        return {}

    cfg.logger.debug('lines: %s', lines)

    result = {
        column: lines[idx].strip()
        for (idx, column) in enumerate(_columns) if column
    }
    if not result.get('OK_UNITpro', ''):
        result = {}
    if not result.get('CB_DATEpro', ''):
        result = {}

    cfg.logger.debug('result: %s', result)
    (start_timestamp, end_timestamp) = _parse_time_period(result)
    geo = _parse_geo(result)
    result['start_timestamp'] = start_timestamp
    result['end_timestamp'] = end_timestamp
    result['geo'] = geo

    return result
Example #36
0
def _test():
    from StringIO import StringIO
    ifile = StringIO("""\
set gridfile = somefile.grid
set add boundary indicator nodes = n=1 b4=[0,1]x[-2,-2]
set time step = 0.5 h ! [s]
set heat heatflux 1 = 0.01  ! [K/m]
set heat heatflux 2 = 0.02  ! [K/m]
set time points for plot = [0, 1.5, 3, 10, 100]
sub heat LinEqAdmFE  ! submenu
sub heat Matrix_prm
set heat matrix type = MatSparse
set max no of iterations = 120
ok  ! return back to previous level
ok
ok
""")
    parsed_lines, dummy = parse_input_file(ifile.readlines())
    import pprint
    print 'parsed_lines:\n', pprint.pformat(parsed_lines)
    print 'output_lines:\n', pprint.pformat(dummy)
    # set some new values (with units :-)
    new_values = {}
    new_values['heat heatflux 1'] = '1 K/cm'
    new_values['heat heatflux 2'] = '4.5 K/cm'
    new_values['gridfile'] = 'my.grid'
    new_values['max no of iterations'] = 30
    lines = prms2lines(new_values, parsed_lines)
    print '\n\nnew input file:\n', ''.join(lines)
Example #37
0
 def test_1D(self):
     a = np.array([1, 2, 3, 4], int)
     c = StringIO()
     np.savetxt(c, a, fmt='%d')
     c.seek(0)
     lines = c.readlines()
     assert_equal(lines, asbytes_nested(['1\n', '2\n', '3\n', '4\n']))
def _parse_text(text):
    cfg.logger.debug('text: %s', text)
    _columns = [ 'OK_UNITpro', 'IDpro', 'APP_NAMEpro', 'LOCATIONpro', 'CB_DATEpro', 'APPROVE_DATEpro']

    f = StringIO(text)
    lines = f.readlines()
    lines = [line for line in lines if line.strip()]
    n_lines = len(lines)
    if n_lines != 6:
        cfg.logger.error('lines != 6: lines: %s', lines)
        return {}

    cfg.logger.debug('lines: %s', lines)

    result = {column: lines[idx].strip() for (idx, column) in enumerate(_columns) if column}
    if not result.get('OK_UNITpro', ''):
        result = {}
    if not result.get('CB_DATEpro', ''):
        result = {}

    cfg.logger.debug('result: %s', result)
    (start_timestamp, end_timestamp) = _parse_time_period(result)
    geo = _parse_geo(result)
    result['start_timestamp'] = start_timestamp
    result['end_timestamp'] = end_timestamp
    result['geo'] = geo

    return result
Example #39
0
def cmd_dissociate(options, args):
    """Dissociate a checkout from its local mirror"""

    cache_dir = GetGitConfig("oxide.cacheDir", OXIDESRC_DIR)
    if not cache_dir:
        print("This checkout was not cloned from a local mirror")
        sys.exit(0)

    cache_mode = GetGitConfig("oxide.cacheMode", OXIDESRC_DIR)
    if cache_mode != "reference":
        print("Cannot dissociate checkouts created with --cache-mode=full",
              file=sys.stderr)
        sys.exit(1)

    DissociateRepo(OXIDESRC_DIR)

    revinfo = StringIO(CheckOutput(["gclient", "revinfo"], TOP_DIR))
    for i in revinfo.readlines():
        i = i.strip().split()
        if i[1].strip() == "None":
            continue
        path = re.sub(r'([^:]*):', r'\1', i[0].strip())
        DissociateRepo(os.path.join(TOP_DIR, path))

    CheckCall(["git", "config", "--unset", "oxide.cacheDir"], OXIDESRC_DIR)
    CheckCall(["git", "config", "--unset", "oxide.cacheMode"], OXIDESRC_DIR)
Example #40
0
    def exportDevices(self, deviceClass='/', options={}):
        dumper = BatchDeviceDumper(noopts=True)
        output = StringIO()

        # Set command-line options
        dumper.options.root = deviceClass[1:]
        # Hidden 'option' in BatchDeviceDump
        dumper.options.pruneLSGO = True
        self._setOptions(dumper, options)

        # Export out custom list of properties
        def isPropExportable(propdict):
            id = propdict['id']
            if id == 'zDeviceTemplate':
                return True
            return propdict['islocal']
        dumper.isPropExportable = isPropExportable

        # Don't import out all getter/setter pairs either
        dumper.ignoreSetters += (
            'setLastChange', 'setHWProductKey', 'setHWSerialNumber',
            'setOSProductKey', 'setPriority',
        )

        dumpedCount = dumper.listDeviceTree(output)
        output.seek(0)
        data = output.readlines()
        output.close()

        # Dump the results in sorted order to make file
        # comparisons easier.
        data = '\n'.join(sorted(data))
        return data.lstrip('\n'), dumpedCount
Example #41
0
 def test_1D(self):
     a = np.array([1, 2, 3, 4], int)
     c = StringIO()
     np.savetxt(c, a, fmt="%d")
     c.seek(0)
     lines = c.readlines()
     assert_equal(lines, asbytes_nested(["1\n", "2\n", "3\n", "4\n"]))
Example #42
0
File: ivium.py Project: ww334/nplab
 def parse_data(self, data_file_str):
     s = StringIO(data_file_str)
     lines = [l.strip() for l in s.readlines()]
     print len(lines)
     for line in lines:
         if '=' in line:
             param, value = line.split('=')
             self.params[param] = value
         if 'primary_data' in line:
             i = lines.index(line)
             print 'starting from line {0:d}/{1:d}'.format(
                 i, len(lines)), lines[i]
             #print lines
             #for j in range(i, i+6): print j, lines[j]
             self.data = np.genfromtxt(
                 StringIO(data_file_str),
                 skip_header=i + 4,
                 skip_footer=3,
                 usecols=(0, 1, 2),
                 names=('x', 'y', 'z'),
                 #missing_values = 'x',
                 #filling_values = 0,
                 autostrip=True,
                 unpack=True,
             )
Example #43
0
    def test_toc(self):
        input = StringIO('\n# Document Title\n\n'
                         '!TOC\n\n'
                         '## Header 1\n'
                         '### Header 1.a\n'
                         '## Header 2\n')

        result = """
        # Document Title

        1\.  [Header 1](#header1)
        1.1\.  [Header 1.a](#header1.a)
        2\.  [Header 2](#header2)

        <a name="header1"></a>

        ## 1\. Header 1
        <a name="header1.a"></a>

        ### 1.1\. Header 1.a
        <a name="header2"></a>

        ## 2\. Header 2"""

        output = StringIO()
        MarkdownPP(input=input, modules=['tableofcontents'], output=output)

        output.seek(0)
        self.assertEqual([l.strip() for l in output.readlines()],
                         [l.strip() for l in result.split('\n')])
Example #44
0
        def run_and_parse_output():
            command_output = StringIO()
            error_output = StringIO()
            call_command(
                'modelcount',
                stdout=command_output,
                stderr=error_output)
            command_output.seek(0)
            error_output.seek(0)
            std_list = command_output.readlines()
            err_list = error_output.readlines()

            self.assertEqual(len(std_list), len(err_list))
            for i in range(len(std_list)):
                self.assertEqual('error: ' + std_list[i], err_list[i])
            return dict([parse_line(s) for s in std_list])
Example #45
0
    def test_graph_disconnected_to_dot(self):
        dependencies_expected = (
            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
            ('grammar', 'bacon', 'truffles (>=1.2)'),
            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
            ('banana', 'strawberry', 'strawberry (>=0.5)'),
        )
        disconnected_expected = ('cheese', 'bacon', 'strawberry')

        dists = []
        for name in self.DISTROS_DIST + self.DISTROS_EGG:
            dist = get_distribution(name, use_egg_info=True)
            self.assertNotEqual(dist, None)
            dists.append(dist)

        graph = depgraph.generate_graph(dists)
        buf = StringIO()
        depgraph.graph_to_dot(graph, buf, skip_disconnected=False)
        buf.seek(0)
        lines = buf.readlines()

        dependencies_lines = []
        disconnected_lines = []

        # First sort output lines into dependencies and disconnected lines.
        # We also skip the attribute lines, and don't include the "{" and "}"
        # lines.
        disconnected_active = False
        for line in lines[1:-1]:  # Skip first and last line
            if line.startswith('subgraph disconnected'):
                disconnected_active = True
                continue
            if line.startswith('}') and disconnected_active:
                disconnected_active = False
                continue

            if disconnected_active:
                # Skip the 'label = "Disconnected"', etc. attribute lines.
                if ' = ' not in line:
                    disconnected_lines.append(line)
            else:
                dependencies_lines.append(line)

        dependencies_matches = []
        for line in dependencies_lines:
            if line[-1] == '\n':
                line = line[:-1]
            match = self.EDGE.match(line.strip())
            self.assertIsNot(match, None)
            dependencies_matches.append(match.groups())

        disconnected_matches = []
        for line in disconnected_lines:
            if line[-1] == '\n':
                line = line[:-1]
            line = line.strip('"')
            disconnected_matches.append(line)

        self.checkLists(dependencies_matches, dependencies_expected)
        self.checkLists(disconnected_matches, disconnected_expected)
Example #46
0
    def test_graph_bad_version_to_dot(self):
        expected = (
            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
            ('grammar', 'bacon', 'truffles (>=1.2)'),
            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
            ('banana', 'strawberry', 'strawberry (>=0.5)'),
        )

        dists = []
        for name in self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS:
            dist = get_distribution(name, use_egg_info=True)
            self.assertNotEqual(dist, None)
            dists.append(dist)

        graph = depgraph.generate_graph(dists)
        buf = StringIO()
        depgraph.graph_to_dot(graph, buf)
        buf.seek(0)
        matches = []
        lines = buf.readlines()
        for line in lines[1:-1]:  # skip the first and the last lines
            if line[-1] == '\n':
                line = line[:-1]
            match = self.EDGE.match(line.strip())
            self.assertIsNot(match, None)
            matches.append(match.groups())

        self.checkLists(matches, expected)
Example #47
0
    def test_capacity_csv_checksum(self):
        self._create_capacity_log()
        resp = self.app.get(reverse('CapacityLogCsvHandler'))
        self.assertEqual(200, resp.status_code)

        response_stream = StringIO(resp.body)
        checksum = md5(''.join(response_stream.readlines()[:-2])).hexdigest()

        response_stream.seek(0)
        csvreader = csv.reader(response_stream,
                               delimiter=',',
                               quotechar='|',
                               quoting=csv.QUOTE_MINIMAL)

        rows = [
            ['Fuel version', '0.1b'],
            ['Fuel UUID', 'Unknown'],
            ['Checksum', checksum],
            ['Environment Name', 'Node Count'],
            ['Total number allocated of nodes', '0'],
            ['Total number of unallocated nodes', '0'],
            ['Node role(s)', 'Number of nodes with this configuration'],
            [],
        ]
        for row in csvreader:
            self.assertIn(row, rows)
Example #48
0
 def _choices_as_array(self):
     from StringIO import StringIO
     valuebuffer = StringIO(self.list_values)
     choices = [[item.strip(), item.strip()]
                for item in valuebuffer.readlines()]
     valuebuffer.close()
     return choices
Example #49
0
    def test_toc(self):
        input = StringIO('\n# Document Title\n\n'
                         '!TOC\n\n'
                         '## Header 1\n'
                         '### Header 1.a\n'
                         '## Header 2\n')

        result = """
        # Document Title

        1\.  [Header 1](#header1)
        1.1\.  [Header 1.a](#header1.a)
        2\.  [Header 2](#header2)

        <a name="header1"></a>

        ## 1\. Header 1
        <a name="header1.a"></a>

        ### 1.1\. Header 1.a
        <a name="header2"></a>

        ## 2\. Header 2"""

        output = StringIO()
        MarkdownPP(input=input, modules=['tableofcontents'], output=output)

        output.seek(0)
        self.assertEqual([l.strip() for l in output.readlines()],
                         [l.strip() for l in result.split('\n')])
Example #50
0
def test_write_tape9():
    tape9_file = StringIO()

    tape9_dict = {1: {'_type': 'decay', 'half_life': {10010: 42.0}, 'title': 'decay1'},
                  2: {'_type': 'decay', '_bad_key': None, 'title': 'decay2',
                      'half_life': {922350: 42.0}},
                  3: {'_type': 'decay', 'title': "Sweet Decay",
                      'half_life': {10010: 42.0, 421000: 42.0}},
                  381: {'_type': 'xsfpy', '_subtype': 'activation_products',
                        'sigma_gamma': {10010: 12.0}, 'title': 'xs1'},
                  382: {'_type': 'xsfpy', '_subtype': 'actinides', 'sigma_f': {922350: 16.0},
                        'title': 'xs2'},
                  383: {'_type': 'xsfpy', '_subtype':
                        'fission_products', 'sigma_gamma': {10010: 20.0},
                        'title': 'xsfpy3', 'U235_fiss_yield': {421000: 42.0},
                        'fiss_yields_present': {421000: True}}}

    # Test that basic functionality works
    origen22.write_tape9(tape9_dict, tape9_file)
    tape9_file.seek(0)
    t9str = tape9_file.readlines()
    for line in t9str:
        assert(len(line) <= 81)  # 81 since newline is counted in len

    # Try to round-trip
    full_tape9_file = StringIO(sample_tape9)
    full_tape9 = origen22.parse_tape9(full_tape9_file)

    backout_tape9 = StringIO()
    origen22.write_tape9(full_tape9, backout_tape9)
    backout_tape9.seek(0)

    backin_tape9 = origen22.parse_tape9(backout_tape9)
Example #51
0
    def _choices_as_array(self):
        from StringIO import StringIO

        valuebuffer = StringIO(self.list_values)
        choices = [[item.strip(), item.strip()] for item in valuebuffer.readlines()]
        valuebuffer.close()
        return choices
Example #52
0
 def send_typical_request(self, message):
     input_msg = StringIO(message)
     output = StringIO()
     self.handler.rfile = input_msg
     self.handler.wfile = output
     self.handler.handle_one_request()
     output.seek(0)
     return output.readlines()
class CommandsTest(TestCase):
    """
    This class is the personal_info management commands
    test case
    """
    def setUp(self):
        """
        Initialize testing data
        """
        self.out_file = StringIO()
        self.err_file = StringIO()

        models_list = models.get_models()

        self.model_data = [
            '%s %s' % (model.__name__, model.objects.count())
            for model in models_list
        ]

    def test_call_command(self):
        """
        Test the whole command functionality
        """
        call_command('modeloutput', stdout=self.out_file, stderr=self.err_file)

        self.out_file.seek(0)
        self.err_file.seek(0)

        self.assertEqual(len(self.out_file.readlines()),
                         len(self.err_file.readlines()))

        self.out_file.seek(0)
        self.err_file.seek(0)

        for model_data_entry in self.model_data:
            line = self.out_file.readline()
            error_line = self.err_file.readline()
            self.assertEqual('error: %s' % line, error_line)
            data = line.strip().split('-')
            self.assertIn(data[0], model_data_entry)
            self.assertIn(data[1], model_data_entry)

        self.out_file.seek(0)
        self.err_file.seek(0)

        self.assertEqual(len(self.out_file.readlines()), len(self.model_data))
Example #54
0
 def read_sum_text(self, text, is_racy_file=False):
     cur_file = StringIO(text)
     cur_results = []
     cur_results.append(OrderedDict())
     cur_results.append(dict())
     for line in cur_file.readlines():
         self.parse_sum_line(cur_results, line, is_racy_file=is_racy_file)
     return cur_results
    def test_write_big_csv(self):
        out = StringIO()
        f = resource_filename(__name__, "Wikidata-20131129161111.xml.gz")
        xml = XmlReader.read_xml(gzip.open(f))
        CsvWriter.write_csv(xml, out)

        out.seek(0)
        self.assertThat(len(out.readlines()), Equals(3679))
Example #56
0
 def get_archives(self):
     """A list of archives as returned by --list-archives. Queried
     the first time it is accessed, and then subsequently cached.
     """
     if self._queried_archives is None:
         response = StringIO(self.call('--list-archives'))
         self._queried_archives = [l.rstrip() for l in response.readlines()]
     return self._queried_archives + self._known_archives
Example #57
0
    def run(path, code=None, params=None, **meta):
        """ PEP8 code checking.

        :return list: List of errors.

        """
        P8Style = StyleGuide(reporter=_PEP8Report, **params)
        buf = StringIO(code)
        return P8Style.input_file(path, lines=buf.readlines())