class TestFastqWriter(object):

    def setup(self):
        self.fastq1 = StringIO("@seq1\n"   +
                               "GATTACA\n" +
                               "+\n"       +
                               "789:;<=\n")
        self.fastq2 = StringIO(self.fastq1.getvalue() +
                               "@seq2\n"   +
                               "CATTAGA\n" +
                               "+\n"       +
                               "@@@@@@@\n")

    def test_writeFastq1(self):
        f = StringIO()
        w = FastqWriter(f)
        for record in FastqReader(self.fastq1):
            w.writeRecord(record)
        assert_equal(self.fastq1.getvalue(), f.getvalue())

    def test_writeFastq2(self):
        f = StringIO()
        w = FastqWriter(f)
        for record in FastqReader(self.fastq2):
            w.writeRecord(record)
        assert_equal(self.fastq2.getvalue(), f.getvalue())

    def test_writeFastq3(self):
        f = StringIO()
        w = FastqWriter(f)
        for record in FastqReader(self.fastq2):
            w.writeRecord(record.header, record.sequence, record.quality)
        assert_equal(self.fastq2.getvalue(), f.getvalue())
Example #2
0
    def test_import(self, server_proxy, requests):
        """
        Test import operation
        """
        proxy = MagicMock()
        proxy.ImportInfrastructure.return_value = (True, "newinfid")
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.restapi = None
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("import", options, [get_abs_path("../files/test.radl")], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("New Inf: newinfid", output)

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("import", options, [get_abs_path("../files/test.radl")], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("New Inf: newinfid", output)
        sys.stdout = oldstdout
Example #3
0
    def test_rebootvm(self, server_proxy, requests):
        """
        Test rebootvm operation
        """
        proxy = MagicMock()
        proxy.RebootVM.return_value = (True, "")
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.restapi = None
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("rebootvm", options, ["infid", "vmid"], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("VM successfully rebooted", output)

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("rebootvm", options, ["infid", "vmid"], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("VM successfully rebooted", output)
        sys.stdout = oldstdout
Example #4
0
    def testApplyFilter(self):
        s, out = commands.getstatusoutput('python scripts/vcf_filter.py --site-quality 30 test/example-4.0.vcf sq')
        #print out
        assert s == 0
        buf = StringIO()
        buf.write(out)
        buf.seek(0)

        print buf.getvalue()
        reader = vcf.Reader(buf)


        # check filter got into output file
        assert 'sq30' in reader.filters

        print reader.filters

        # check sites were filtered
        n = 0
        for r in reader:
            if r.QUAL < 30:
                assert 'sq30' in r.FILTER
                n += 1
            else:
                assert 'sq30' not in r.FILTER
        assert n == 2
Example #5
0
    def test_getversion(self, server_proxy, requests):
        """
        Test getversion operation
        """
        proxy = MagicMock()
        proxy.GetVersion.return_value = (True, "1.0")
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.restapi = None
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("getversion", options, [], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("1.0", output)

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("getversion", options, [], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("1.0", output)
        sys.stdout = oldstdout
Example #6
0
    def test_getstate(self, server_proxy, requests):
        """
        Test getstate operation
        """
        proxy = MagicMock()
        proxy.GetInfrastructureState.return_value = (True, {"state": "running", "vm_states": {"vm1": "running"}})
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.restapi = None
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("getstate", options, ["infid"], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("The infrastructure is in state: running\nVM ID: vm1 is in state: running.", output)

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("getstate", options, ["infid"], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("The infrastructure is in state: running\nVM ID: vm1 is in state: running.", output)
        sys.stdout = oldstdout
Example #7
0
    def test_getinfo(self, server_proxy, requests):
        """
        Test getinfo operation
        """
        proxy = MagicMock()
        proxy.GetVMInfo.return_value = (True, "radltest")
        proxy.GetInfrastructureInfo.return_value = (True, ["vm1"])
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.restapi = None
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("getinfo", options, ["infid"], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("Info about VM with ID: vm1\nradltest", output)

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("getinfo", options, ["infid"], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("Info about VM with ID: vm1\nradltest", output)
        sys.stdout = oldstdout
Example #8
0
    def test_addresource(self, server_proxy, requests):
        """
        Test addresource operation
        """
        proxy = MagicMock()
        proxy.AddResource.return_value = (True, ["1"])
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.restapi = None
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("addresource", options, ["infid", get_abs_path("../files/test.radl")], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("Resources with IDs: 1 successfully added.", output)

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("addresource", options, ["infid", get_abs_path("../files/test.radl")], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("Resources with IDs: 1 successfully added.", output)
        sys.stdout = oldstdout
Example #9
0
    def test_getcontmsg(self, server_proxy, requests):
        """
        Test getcontmsg operation
        """
        proxy = MagicMock()
        proxy.GetInfrastructureContMsg.return_value = (True, "contmsg")
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.restapi = None
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("getcontmsg", options, ["infid"], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("Msg Contextualizator: \n\ncontmsg", output)

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("getcontmsg", options, ["infid"], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("Msg Contextualizator: \n\ncontmsg", output)
        sys.stdout = oldstdout
Example #10
0
    def test_list(self, server_proxy, requests):
        """
        Test list operation
        """
        proxy = MagicMock()
        proxy.GetInfrastructureList.return_value = (True, ["inf1", "inf2"])
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.xmlrpc = "https://localhost:8899"
        options.restapi = None
        options.verify = False
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("list", options, [], parser)
        output = out.getvalue().strip()
        self.assertEquals(res, True)
        self.assertIn("IDs: \n  inf1\n  inf2", output)
        sys.stdout = oldstdout

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("list", options, [], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("IDs: \n  inf1\n  inf2", output)
        sys.stdout = oldstdout
Example #11
0
    def test_create(self, server_proxy, requests):
        """
        Test create operation
        """
        proxy = MagicMock()
        proxy.CreateInfrastructure.return_value = (True, "inf1")
        server_proxy.return_value = proxy
        options = MagicMock()
        options.auth_file = get_abs_path("../../auth.dat")
        options.restapi = None
        parser = MagicMock()

        out = StringIO()
        oldstdout = sys.stdout
        sys.stdout = out
        res = main("create", options, [get_abs_path("../files/test.radl")], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("Infrastructure successfully created with ID: inf1", output)
        sys.stdout = oldstdout

        out = StringIO()
        sys.stdout = out
        options.xmlrpc = None
        options.restapi = "https://localhost:8800"
        requests.side_effect = self.get_response
        res = main("create", options, [get_abs_path("../files/test.radl")], parser)
        self.assertEquals(res, True)
        output = out.getvalue().strip()
        self.assertIn("Infrastructure successfully created with ID: inf1", output)
        sys.stdout = oldstdout
Example #12
0
def execute(code):
    if code == 'test_error':
        raise ValueError
    tempdir = gettempdir()
    script_py = os.path.join(tempdir, 'my_script.py')
    write_to_path(script_py, code)
    stdin = AccessRecorder()
    stdout = StringIO()
    stderr = StringIO()
    file_tracker = FileTracker(open)
    with temp_vars([sys, 'stdout', stdout],
                   [sys, 'stderr', stderr],
                   [sys, 'stdin', stdin]):

        # noinspection PyBroadException
        try:
            exec (
                compile(code, script_py, 'exec', dont_inherit=True),
                {'open': file_tracker, '__name__': '__main__'},
                {})
        except QuietExit:
            pass
        except:
            tb_list = [e for e in tb.extract_tb(sys.exc_info()[2]) if os.path.dirname(__file__) not in e[0]]
            print('Traceback (most recent call last):\n' +
                  ''.join(tb.format_list(tb_list) +
                          tb.format_exception_only(*sys.exc_info()[:2])).replace(tempdir, ''),
                  file=sys.stderr)
    return {'stdout': stdout.getvalue(),
            'stderr': stderr.getvalue(),
            'files': file_tracker.result(),
            'stdin_used': stdin.accessed,
            'non_ascii_files': file_tracker.non_ascii_files,
            'version': platform.python_version()}
Example #13
0
def image(path):
  if '..' in path:
    abort(500)
  fd = open(join(app.root_path, "images", path))
  data = fd.read()

  hsize = int(request.args.get("h", 0))
  vsize = int(request.args.get("v", 0))
  if hsize > 1000 or vsize > 1000:
    abort(500)

  if hsize:
    image = Image.open(StringIO(data))
    x, y = image.size

    x1 = hsize
    y1 = int(1.0 * y * hsize / x)
    image.thumbnail((x1, y1), Image.ANTIALIAS)
    output = StringIO()
    image.save(output, "PNG")
    data = output.getvalue()
  if vsize:
    image = Image.open(StringIO(data))
    x, y = image.size

    x1 = int(1.0 * x * vsize / y)
    y1 = vsize
    image.thumbnail((x1, y1), Image.ANTIALIAS)
    output = StringIO()
    image.save(output, "PNG")
    data = output.getvalue()

  response = make_response(data)
  response.headers['content-type'] = mimetypes.guess_type(path)
  return response
Example #14
0
 def _serialize(obj, keypos):
     if keypos:
         if isinstance(obj, (int, long, float, bool)):
             return ('i:%i;' % obj).encode('latin1')
         if isinstance(obj, basestring):
             encoded_obj = obj
             if isinstance(obj, unicode):
                 encoded_obj = obj.encode(charset, errors)
             s = BytesIO()
             s.write(b's:')
             s.write(str(len(encoded_obj)).encode('latin1'))
             s.write(b':"')
             s.write(encoded_obj)
             s.write(b'";')
             return s.getvalue()
         if obj is None:
             return b's:0:"";'
         raise TypeError('can\'t serialize %r as key' % type(obj))
     else:
         if obj is None:
             return b'N;'
         if isinstance(obj, bool):
             return ('b:%i;' % obj).encode('latin1')
         if isinstance(obj, (int, long)):
             return ('i:%s;' % obj).encode('latin1')
         if isinstance(obj, float):
             return ('d:%s;' % obj).encode('latin1')
         if isinstance(obj, basestring):
             encoded_obj = obj
             if isinstance(obj, unicode):
                 encoded_obj = obj.encode(charset, errors)
             s = BytesIO()
             s.write(b's:')
             s.write(str(len(encoded_obj)).encode('latin1'))
             s.write(b':"')
             s.write(encoded_obj)
             s.write(b'";')
             return s.getvalue()
         if isinstance(obj, (list, tuple, dict)):
             out = []
             if isinstance(obj, dict):
                 iterable = obj.items()
             else:
                 iterable = enumerate(obj)
             for key, value in iterable:
                 out.append(_serialize(key, True))
                 out.append(_serialize(value, False))
             return b''.join([
                 b'a:',
                 str(len(obj)).encode('latin1'),
                 b':{',
                 b''.join(out),
                 b'}'
             ])
         if isinstance(obj, phpobject):
             return b'O' + _serialize(obj.__name__, True)[1:-1] + \
                    _serialize(obj.__php_vars__, False)[1:]
         if object_hook is not None:
             return _serialize(object_hook(obj), False)
         raise TypeError('can\'t serialize %r' % type(obj))
Example #15
0
class LogCaptureTestCase(unittest.TestCase):

	def setUp(self):

		# For extended testing of what gets output into logging
		# system, we will redirect it to a string
		logSys = logging.getLogger("fail2ban")

		# Keep old settings
		self._old_level = logSys.level
		self._old_handlers = logSys.handlers
		# Let's log everything into a string
		self._log = StringIO()
		logSys.handlers = [logging.StreamHandler(self._log)]
		logSys.setLevel(getattr(logging, 'DEBUG'))

	def tearDown(self):
		"""Call after every test case."""
		# print "O: >>%s<<" % self._log.getvalue()
		logSys = logging.getLogger("fail2ban")
		logSys.handlers = self._old_handlers
		logSys.level = self._old_level

	def _is_logged(self, s):
		return s in self._log.getvalue()

	def printLog(self):
		print(self._log.getvalue())
Example #16
0
    def test_subprocess(self):
        """Instead of the ``argv`` shortcut, subclasses can also use the
        ``subprocess`` helper manually.
        """

        class Filter(ExternalTool): pass

        # Without stdin data
        self.popen.return_value.returncode = 0
        self.popen.return_value.communicate.return_value = ['stdout', 'stderr']
        out = StringIO()
        Filter.subprocess(['test'], out)
        assert out.getvalue() == 'stdout'
        self.popen.return_value.communicate.assert_called_with(None)

        # With stdin data
        self.popen.reset_mock()
        self.popen.return_value.returncode = 0
        self.popen.return_value.communicate.return_value = ['stdout', 'stderr']
        out = StringIO()
        Filter.subprocess(['test'], out, data='data')
        assert out.getvalue() == 'stdout'
        self.popen.return_value.communicate.assert_called_with('data')

        # With error
        self.popen.return_value.returncode = 1
        self.popen.return_value.communicate.return_value = ['stdout', 'stderr']
        assert_raises(FilterError, Filter.subprocess, ['test'], StringIO())
Example #17
0
    def build_zip_from_spreadsheet(self, spreadsheet):
        """
        Returns absolute path to the ZIP file contianing requested media files:
        """
        records = []
        zip_file_str = StringIO()
        zip_file = zipfile.ZipFile(zip_file_str, 'w')
        reader = csv.DictReader(StringIO(spreadsheet))
        header_cells = reader.fieldnames
        # Loop through, add media files to the zip file, and
        # update media paths to relative paths:
        for row in reader:
            # if row.get('overlay_type') != 'print':
            for key in self.URL_PATH_FIELDS:
                for cell in row:
                    # "endswith" handles nested file paths, for example
                    # when record objects reference photo objects
                    if cell.endswith(key):
                        self.add_media_to_zip(zip_file, row, cell)
                        row[cell] = self.make_relative_path_for_csv(row, cell)
            records.append(row)

        # Output resulting spreadsheet:
        spreadsheet_buffer = StringIO() # final product of the spreadsheet
        csv_writer = csv.DictWriter(spreadsheet_buffer, fieldnames=header_cells)
        csv_writer.writeheader()
        for row in records:
            csv_writer.writerow(row)

        # Add spreadsheet to zip file, close, and return:
        zip_file.writestr('content.csv', spreadsheet_buffer.getvalue())
        zip_file.close()
        return zip_file_str.getvalue()
class TestFastaWriter(object):

    def setup(self):
        self.fasta1 = StringIO(
            ">chr1|blah|blah\n"                                              \
            "GATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATT\n" \
            "ACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAG\n" \
            "ATTACAGATTACAGATTACA\n")
        self.fasta2 = StringIO(self.fasta1.getvalue() + "\n" +               \
            ">chr2|blah|blah\n"                                              \
            "GATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATT\n" \
            "ACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAGATTACAG\n" \
            "ATTACAGATTACAGATTACA\n")

    def test_writeFasta1(self):
        f = StringIO()
        w = FastaWriter(f)
        for record in FastaReader(self.fasta1):
            w.writeRecord(record)
        assert_equal(self.fasta1.getvalue(), f.getvalue())

    def test_writeFasta2(self):
        f = StringIO()
        w = FastaWriter(f)
        for record in FastaReader(self.fasta1):
            w.writeRecord(record.header, record.sequence)
        assert_equal(self.fasta1.getvalue(), f.getvalue())
def run_coverage(module):
    module_name = module.__name__
    module_path = module_name.replace('.', os.path.sep) + '.' + module_name.rsplit('_', 1)[-1]

    cov = coverage()
    cov.start()
    assert module.func1(1, 2) == (1 * 2) + 2 + 1
    assert module.func2(2) == 2 * 2
    if '_include_' in module_name:
        assert module.main_func(2) == (2 * 3) + ((2 * 2) + 4 + 1) + (2 * 2)
    cov.stop()

    out = StringIO()
    cov.report(file=out)
    #cov.report([module], file=out)
    lines = out.getvalue().splitlines()
    assert any(module_path in line for line in lines), "'%s' not found in coverage report:\n\n%s" % (
        module_path, out.getvalue())

    mod_file, exec_lines, excl_lines, missing_lines, _ = cov.analysis2(source_file_for(module))
    assert module_path in mod_file

    if '_include_' in module_name:
        executed = set(exec_lines) - set(missing_lines)
        assert all(line in executed for line in [7, 12]), '%s / %s' % (exec_lines, missing_lines)

        # rest of test if for include file
        mod_file, exec_lines, excl_lines, missing_lines, _ = cov.analysis2(
            os.path.join(os.path.dirname(module.__file__), "pkg", "coverage_test_pyx.pxi"))

    executed = set(exec_lines) - set(missing_lines)
    assert all(line in executed for line in [5, 6, 7, 11]), '%s / %s' % (exec_lines, missing_lines)
Example #20
0
def publish_archive(archive):
    # Now really publish it
    proxy = CONFIG['shinken.io']['proxy']
    api_key = CONFIG['shinken.io']['api_key']
    
    # Ok we will push the file with a 10s timeout
    c = pycurl.Curl()
    c.setopt(c.POST, 1)
    c.setopt(c.CONNECTTIMEOUT, 10)
    c.setopt(c.TIMEOUT, 10)
    if proxy:
        c.setopt(c.PROXY, proxy)
    c.setopt(c.URL, "http://shinken.io/push")
    c.setopt(c.HTTPPOST, [("api_key", api_key),
                          ("data",
                           (c.FORM_FILE, str(archive),
                            c.FORM_CONTENTTYPE, "application/x-gzip"))
                          ])
    response = StringIO()
    c.setopt(pycurl.WRITEFUNCTION, response.write)
    c.setopt(c.VERBOSE, 1)
    c.perform()
    r = c.getinfo(pycurl.HTTP_CODE)
    c.close()
    if r != 200:
        logger.error("There was a critical error : %s" % response.getvalue())
        sys.exit(2)
    else:    
        ret  = json.loads(response.getvalue().replace('\\/', '/'))
        status = ret.get('status')
        text   = ret.get('text')
        if status == 200:
            logger.log(text)
        else:
            logger.error(text)
Example #21
0
def _log_chk(hdr, level):
    # utility function to check header checking / logging
    # If level == 0, this header should always be OK
    str_io = StringIO()
    logger = logging.getLogger('test.logger')
    handler = logging.StreamHandler(str_io)
    logger.addHandler(handler)
    str_io.truncate(0)
    hdrc = hdr.copy()
    if level == 0: # Should never log or raise error
        logger.setLevel(0)
        hdrc.check_fix(logger=logger, error_level=0)
        assert_true(str_io.getvalue() == '')
        logger.removeHandler(handler)
        return hdrc, '', ()
    # Non zero level, test above and below threshold
    # Logging level above threshold, no log
    logger.setLevel(level+1)
    e_lev = level+1
    hdrc.check_fix(logger=logger, error_level=e_lev)
    assert_true(str_io.getvalue() == '')
    # Logging level below threshold, log appears
    logger.setLevel(level+1)
    logger.setLevel(level-1)
    hdrc = hdr.copy()
    hdrc.check_fix(logger=logger, error_level=e_lev)
    assert_true(str_io.getvalue() != '')
    message = str_io.getvalue().strip()
    logger.removeHandler(handler)
    hdrc2 = hdr.copy()
    raiser = (HeaderDataError,
              hdrc2.check_fix,
              logger,
              level)
    return hdrc, message, raiser
Example #22
0
def search(look_at):
    # Now really publish it
    proxy = CONFIG['shinken.io']['proxy']
    api_key = CONFIG['shinken.io']['api_key']
    
    # Ok we will push the file with a 10s timeout
    c = pycurl.Curl()
    c.setopt(c.POST, 0)
    c.setopt(c.CONNECTTIMEOUT, 10)
    c.setopt(c.TIMEOUT, 10)
    if proxy:
        c.setopt(c.PROXY, proxy)

    args = {'keywords':','.join(look_at)}
    c.setopt(c.URL, str('shinken.io/searchcli?'+urllib.urlencode(args)))
    response = StringIO()
    c.setopt(pycurl.WRITEFUNCTION, response.write)
    #c.setopt(c.VERBOSE, 1)
    c.perform()
    r = c.getinfo(pycurl.HTTP_CODE)
    c.close()
    if r != 200:
        logger.error("There was a critical error : %s" % response.getvalue())
        sys.exit(2)
    else:    
        ret  = json.loads(response.getvalue().replace('\\/', '/'))
        status = ret.get('status')
        result   = ret.get('result')
        if status != 200:
            logger.log(result)
            return []
        return result
Example #23
0
    def do_test_init(self,basedir):
        # Let's init the addon, no error admitted
        f = open(".ignoreme","w")
        f.write("stuff")
        f.close()

        out, err = StringIO(), StringIO()
        init_run = initializer(None, ["init"], out, err)
        out, err = out.getvalue(), err.getvalue()
        self.assertEqual(init_run["result"], 0)
        self.assertTrue("* lib directory created" in out)
        self.assertTrue("* data directory created" in out)
        self.assertTrue("Have fun!" in out)
        self.assertEqual(err,"")
        self.assertTrue(len(os.listdir(basedir))>0)
        main_js = os.path.join(basedir,"lib","main.js")
        package_json = os.path.join(basedir,"package.json")
        test_main_js = os.path.join(basedir,"test","test-main.js")
        self.assertTrue(os.path.exists(main_js))
        self.assertTrue(os.path.exists(package_json))
        self.assertTrue(os.path.exists(test_main_js))
        self.assertEqual(open(main_js,"r").read(),"")
        self.assertEqual(open(package_json,"r").read() % {"id":"tmp_addon_id" },
                         PACKAGE_JSON % {"name":"tmp_addon_sample",
                                         "fullName": "tmp_addon_SAMPLE",
                                         "id":init_run["jid"] })
        self.assertEqual(open(test_main_js,"r").read(),TEST_MAIN_JS)

        # Let's check that the addon is initialized
        out, err = StringIO(), StringIO()
        init_run = initializer(None, ["init"], out, err)
        out, err = out.getvalue(), err.getvalue()
        self.failIfEqual(init_run["result"],0)
        self.assertTrue("This command must be run in an empty directory." in err)
Example #24
0
def publish_archive(archive):
    # Now really publish it
    api_key = CONFIG['shinken.io']['api_key']
    c = prepare_curl_connection('/push', post=1, verbose=1)
    c.setopt(c.HTTPPOST, [("api_key", api_key),
                          ("data",
                           (c.FORM_FILE, str(archive),
                            c.FORM_CONTENTTYPE, "application/x-gzip"))
                          ])
    response = StringIO()
    c.setopt(pycurl.WRITEFUNCTION, response.write)

    try:
        c.perform()
    except pycurl.error as exp:
        logger.error("There was a critical error : %s", exp)
        sys.exit(2)
        return
    r = c.getinfo(pycurl.HTTP_CODE)
    c.close()
    if r != 200:
        logger.error("There was a critical error : %s", response.getvalue())
        sys.exit(2)
    else:
        ret  = json.loads(response.getvalue().replace('\\/', '/'))
        status = ret.get('status')
        text   = ret.get('text')
        if status == 200:
            logger.info(text)
        else:
            logger.error(text)
            sys.exit(2)
Example #25
0
def test_compression():
    arr = np.zeros(100).reshape((5,20))
    arr[2,10] = 1
    stream = StringIO()
    savemat(stream, {'arr':arr})
    raw_len = len(stream.getvalue())
    vals = loadmat(stream)
    yield assert_array_equal, vals['arr'], arr
    stream = StringIO()
    savemat(stream, {'arr':arr}, do_compression=True)
    compressed_len = len(stream.getvalue())
    vals = loadmat(stream)
    yield assert_array_equal, vals['arr'], arr
    yield assert_true, raw_len>compressed_len
    # Concatenate, test later
    arr2 = arr.copy()
    arr2[0,0] = 1
    stream = StringIO()
    savemat(stream, {'arr':arr, 'arr2':arr2}, do_compression=False)
    vals = loadmat(stream)
    yield assert_array_equal, vals['arr2'], arr2
    stream = StringIO()
    savemat(stream, {'arr':arr, 'arr2':arr2}, do_compression=True)
    vals = loadmat(stream)
    yield assert_array_equal, vals['arr2'], arr2
Example #26
0
def test_write_quoting():
    """
    Especially for writing content CSV has specific quoting rules. For
    instance if you want to quote everything including numeric values,
    you can absolutely do that.
    """
    output = StringIO()
    writer = csv.writer(output, quoting=csv.QUOTE_ALL)
    writer.writerow([1, 1.2])
    assert '''"1","1.2"\r\n''' == output.getvalue()

    # Or don't quote them
    output = StringIO()
    writer = csv.writer(output, quoting=csv.QUOTE_NONNUMERIC)
    writer.writerow([1, 1.2])
    assert '''1,1.2\r\n''' == output.getvalue()

    # Or quote only if it would otherwise confuse the parser
    output = StringIO()
    writer = csv.writer(output, quoting=csv.QUOTE_MINIMAL)
    writer.writerow(["1,2", 1])
    assert '''"1,2",1\r\n''' == output.getvalue()

    # If we select QUOTE_NONE, then we should (and in this case must)
    # set an escape character to prevent the generation of invalid output.
    output = StringIO()
    writer = csv.writer(output, quoting=csv.QUOTE_NONE, escapechar='|')
    writer.writerow(["1,2", 1])
    assert '''1|,2,1\r\n''' == output.getvalue()
Example #27
0
 def DTC_runTest(self):
     test = self._dt_test
     old = sys.stdout
     new = StringIO()
     optionflags = self._dt_optionflags
     if not (optionflags & REPORTING_FLAGS):
         # The option flags don't include any reporting flags,
         # so add the default reporting flags
         optionflags |= _unittest_reportflags
     # Patching doctestcase to enable verbose mode
     global g_doctest_verbose
     runner = DocTestRunner(optionflags=optionflags,
                            checker=self._dt_checker,
                            verbose=g_doctest_verbose)
     # End of patch
     try:
         runner.DIVIDER = "-"*70
         failures, tries = runner.run(
             test, out=new.write, clear_globs=False)
     finally:
         sys.stdout = old
     if failures:
         raise self.failureException(self.format_failure(new.getvalue()))
     elif g_doctest_verbose:
         print new.getvalue()
Example #28
0
def create_thumb_and_reduce_size(img,data):
    try:
        import Image as PImage
        from StringIO import StringIO
        from settings import DB_HOST,DB_PORT,DB_NAME
        import pymongo,gridfs
        fs = gridfs.GridFS(pymongo.Connection("%s:%s"%(DB_HOST,DB_PORT))[DB_NAME])

        im = PImage.open(StringIO(data))
        img.width,img.height = im.size

        if (len(data)>500*1024) and (img.ext != "gif"):
            buff = StringIO()
            im.convert("RGB").save(buff,"jpeg",quality=75)
            data = buff.getvalue()
        else:
            data = data

        im = utils.square_crop(im)
        #medium thumb
        im.thumbnail((100,100), PImage.ANTIALIAS)
        buff = StringIO()
        im.convert("RGB").save(buff,"jpeg",quality=75)
        fs.put(buff.getvalue(),filename=img.uid+"m."+img.ext,content_type="image/jpeg")

        #small thumb
        im.thumbnail((40,40), PImage.ANTIALIAS)
        buff = StringIO()
        im.convert("RGB").save(buff,"jpeg",quality=75)
        fs.put(buff.getvalue(),filename=img.uid+"s."+img.ext,content_type="image/jpeg")
    except Exception,what:
        print repr(what)
Example #29
0
    def test_debug_output(self):
        out = StringIO()
        pdebug.pdebugger.setLevel(logging.ERROR)
        pdebug.set_stream(out)
        pdebug.pdebugger.error('error console')
        pdebug.pdebugger.info('info console')
        pdebug.pdebugger.info('debug console')
        output = out.getvalue().strip()
        assert output == "error console"


        pdebug.pdebugger.setLevel(logging.INFO)
        pdebug.pdebugger.error('error console')
        pdebug.pdebugger.info('info console')
        pdebug.pdebugger.debug('debug console')
        output = out.getvalue().strip()
        assert output == "error console\nerror console\ninfo console"


        #add in file handler
        filename = "tempunittestdebug.log"
        pdebug.set_file(filename)
        pdebug.pdebugger.error('error file')
        pdebug.pdebugger.info('info file')
        pdebug.pdebugger.debug('debug file')
        with open(filename, 'r') as dbfile:
            data=dbfile.read()
        assert data == "error file\ninfo file\n"
        os.remove("tempunittestdebug.log")
        output = out.getvalue().strip()
        assert output == "error console\nerror console\ninfo console\nerror file\ninfo file"
Example #30
0
def grab_package(pname):
    print "Trying to grab package", pname

    # Now really publish it
    proxy = CONFIG['shinken.io']['proxy']
    api_key = CONFIG['shinken.io']['api_key']
    
    # Ok we will push the file with a 10s timeout
    c = pycurl.Curl()
    c.setopt(c.POST, 0)
    c.setopt(c.CONNECTTIMEOUT, 10)
    c.setopt(c.TIMEOUT, 10)
    if proxy:
        c.setopt(c.PROXY, proxy)

    c.setopt(c.URL, str('shinken.io/grab/%s' % pname))
    response = StringIO()
    c.setopt(pycurl.WRITEFUNCTION, response.write)
    #c.setopt(c.VERBOSE, 1)
    c.perform()
    r = c.getinfo(pycurl.HTTP_CODE)
    c.close()
    if r != 200:
        logger.error("There was a critical error : %s" % response.getvalue())
        sys.exit(2)
    else:
        ret = response.getvalue()
        print "GOT A RETURN OF", len(ret)
        return ret
Example #31
0
class DummyRequest(object):
    """Dummy request object that imitates twisted.web.http.Request's
    interfaces."""
    
    finished = 0
    response_code = 200
    response_msg = None
    metrics = Metrics(FakeStatsDClient(), 'webprotectme.null')
    
    def __init__(self, api_mode="test", api_version="0.1", api_name="TestAPI", uri="",
                 method="GET", user="", password=""):
        self.headers = {}
        self.args = {}
        self.cookies = []
        self.received_cookies = {}
        self.client_ip = "4.3.2.1"
        self.content = StringIO()
        self._finishedDeferreds = []
        self.api_mode = api_mode
        self.api_version = api_version
        self.api_name = api_name
        self.uri = uri
        self.user = user
        self.password = password
        self.method = method
        self.ignore_auth = True
        self.ignore_secure_cookies = True
        
        x = self.uri.split(b'?', 1)

        if len(x) == 1:
            self.path = self.uri
        else:
            self.path, argstring = x
            self.args = parse_qs(argstring, 1)
    
    def _reset_body(self):
        self.content = StringIO()
    
    def setHeader(self, name, value):
        self.headers[name] = value
    
    def getAllHeaders(self):
        return self.headers
    
    def getHeader(self, header):
        try:
            return self.headers[header]
        except KeyError:
            return
    
    def addCookie(self, k, v, expires=None, domain=None, path=None, max_age=None, comment=None, secure=None):
        """
        Set an outgoing HTTP cookie.

        In general, you should consider using sessions instead of cookies, see
        L{twisted.web.server.Request.getSession} and the
        L{twisted.web.server.Session} class for details.
        """
        cookie = '%s=%s' % (k, v)
        if expires is not None:
            cookie = cookie +"; Expires=%s" % expires
        if domain is not None:
            cookie = cookie +"; Domain=%s" % domain
        if path is not None:
            cookie = cookie +"; Path=%s" % path
        if max_age is not None:
            cookie = cookie +"; Max-Age=%s" % max_age
        if comment is not None:
            cookie = cookie +"; Comment=%s" % comment
        if secure:
            cookie = cookie +"; Secure"
        self.cookies.append(cookie)
    
    def getCookie(self, key):
        """
        Get a cookie that was sent from the network.
        """
        return self.received_cookies.get(key)
    
    def write(self, data):
        cookie_data = ""
        for cookie in self.cookies:
            cookie_data = cookie_data + ('%s: %s\r\n' % ("Set-Cookie", cookie))
        
        if cookie_data != "":
            cookie_data = cookie_data + "\r\n"
            self.content = StringIO(cookie_data + self.content.getvalue() + data)
            self.cookies = []
        else:
            self.content = StringIO(self.content.getvalue() + data)
    
    def notifyFinish(self):
        """
        Return a L{Deferred} which is called back with C{None} when the request
        is finished.  This will probably only work if you haven't called
        C{finish} yet.
        """
        finished = Deferred()
        self._finishedDeferreds.append(finished)
        return finished
    
    def getClientIP(self, honor_xrealip=False):
        """
        Return request IP
        """
        if honor_xrealip and self.getHeader("X-Real-IP"):
            return self.getHeader("X-Real-IP")
        
        return self.client_ip
    
    def finish(self):
        self.finished = self.finished + 1
        if self._finishedDeferreds is not None:
            observers = self._finishedDeferreds
            self._finishedDeferreds = None
            for obs in observers:
                obs.callback(None)
    
    def setResponseCode(self, code, message=None):
        self.response_code = code
        self.response_msg = message
    
    def getUser(self):
        return self.user
    
    def getPassword(self):
        return self.password
Example #32
0
    def write(self):
        func = ast.Function('nullfunc',
                            ast.Return(ast.TYPE_NONE, transfer=ast.PARAM_TRANSFER_NONE),
                            [], False, self.gen.gen_symbol('nullfunc'))
        self.namespace.append(func)
        body = "  return;\n"
        self.gen.set_function_body(func, body)

        # First pass, generate constant returns
        prefix = 'const return '
        for typeval in ast.INTROSPECTABLE_BASIC:
            name = prefix + uscore_from_type(typeval)
            sym = self.gen.gen_symbol(name)
            func = ast.Function(name,
                                ast.Return(typeval, transfer=ast.PARAM_TRANSFER_NONE),
                                [], False, sym)
            self.namespace.append(func)
            default = get_default_for_typeval(typeval)
            body = "  return %s;\n" % (default, )
            self.gen.set_function_body(func, body)

        # Void return, one parameter
        prefix = 'oneparam '
        for typeval in ast.INTROSPECTABLE_BASIC:
            if typeval is ast.TYPE_NONE:
                continue
            name = prefix + uscore_from_type(typeval)
            sym = self.gen.gen_symbol(name)
            func = ast.Function(name,
                                ast.Return(ast.TYPE_NONE, transfer=ast.PARAM_TRANSFER_NONE),
                                [ast.Parameter('arg0', typeval, transfer=ast.PARAM_TRANSFER_NONE,
                                               direction=ast.PARAM_DIRECTION_IN)], False, sym)
            self.namespace.append(func)
            self.gen.set_function_body(func, "  return;\n")

        # Void return, one (out) parameter
        prefix = 'one_outparam '
        for typeval in ast.INTROSPECTABLE_BASIC:
            if typeval is ast.TYPE_NONE:
                continue
            name = prefix + uscore_from_type(typeval)
            sym = self.gen.gen_symbol(name)
            func = ast.Function(name,
                                ast.Return(ast.TYPE_NONE, transfer=ast.PARAM_TRANSFER_NONE),
                                [ast.Parameter('arg0', typeval, transfer=ast.PARAM_TRANSFER_NONE,
                                               direction=ast.PARAM_DIRECTION_OUT)], False, sym)
            self.namespace.append(func)
            body = StringIO('w')
            default = get_default_for_typeval(func.retval)
            body.write("  *arg0 = %s;\n" % (default, ))
            body.write("  return;\n")
            self.gen.set_function_body(func, body.getvalue())

        # Passthrough one parameter
        prefix = 'passthrough_one '
        for typeval in ast.INTROSPECTABLE_BASIC:
            if typeval is ast.TYPE_NONE:
                continue
            name = prefix + uscore_from_type(typeval)
            sym = self.gen.gen_symbol(name)
            func = ast.Function(name, ast.Return(typeval, transfer=ast.PARAM_TRANSFER_NONE),
                            [ast.Parameter('arg0', typeval, transfer=ast.PARAM_TRANSFER_NONE,
                                       direction=ast.PARAM_DIRECTION_IN)], False, sym)
            self.namespace.append(func)
            body = StringIO('w')
            default = get_default_for_typeval(func.retval)
            body.write("  return arg0;\n")
            self.gen.set_function_body(func, body.getvalue())

        self.gen.codegen()
Example #33
0
def generateTypesAndFunctions(file_tlb, out_asn, out_support):
    global sourceName, libName, userDefsSource, userDefsLib

    templates_dir = os.path.join(os.path.dirname(__file__), 'templates')

    source_name = file_tlb.split('/')
    source_name = source_name[-1]
    source_name = source_name.rstrip('.tlb')
    name_lib = source_name.capitalize()
    userDefsSource = 'userdefs-' + source_name + '.asn'
    userDefsLib = 'UserDefs-' + name_lib
    source_name = source_name + '.asn'

    sourceName = source_name
    libName = name_lib

    tree = ET.parse(file_tlb)
    root = tree.getroot()


    # Find all numeric types

    for numeric_types in root.findall('numeric'):
        names = numeric_types.get('name')
        cpp_name = names.strip('/')
        cpp_include =[]
        names = process_name_type(names)
        #names = names.strip('/')
        if names in basicTypes:
            #print (names, basicTypes[names])
            if not basicTypes[names] in allTypes:
                allTypes[names] = ConfigTypes(basicTypes[names])
                allTypes[names].cppName = process_cpp_name(cpp_name)
                allTypes[names].tag = 'numeric'

                for subfields in numeric_types.findall('metadata'):
                    key = subfields.get('key')
                    if key == 'orogen_include':
                        cpp_include.append(subfields.text)
                allTypes[names].cppInclude = cpp_include
        else:

            print('Does not exist the basic type '+names)

    allInfo[userDefsSource] = AsnFile(userDefsLib)

    allInfo[userDefsSource].nameTypes.append('Dummy'+libName+'-T')
    allInfo[userDefsSource].strTypes.append('Dummy'+libName+'-T ::= T-UInt32')
    libraries['Dummy'+libName+'-T'] = userDefsLib + '-Types'

    #Soluci/'on temporal
    cpp_name = '/std/string'
    cpp_name = cpp_name.strip('/')
    name = 'Std-string'
    cpp_include =[]
    allTypes[name] = ConfigTypes(basicTypes[name])


    if not basicTypes[name] in allTypes:
        name = 'T-String'

        new_type = ConfigTypes(name)
        new_type.cppName = process_cpp_name(cpp_name)
        new_type.tag = 'inst'
        #Faltar/'ia include cpp includes

        #[max_dim, parameter] = add_parameter_type('T-String')
        max_dim = 'maxT-String'
        parameter = 'numT-String'

        new_type.maxDim.append(max_dim)

        new_type.asnParameters.append(parameter)
        new_type.asn1SccParameters.append(parameter.replace('-','_'))
        new_type.rootTypes.append('T-StringP')

        allTypes['T-StringP'] = ConfigTypes('T-StringP')
        allTypes['T-StringP'].asnParameters.append(parameter)

        # Added by Raquel
        new_type.depsTypes.append(parameter)
        new_type.depsTypes.append('T-UInt32')
        allTypes['Std-string'] = new_type
        allTypes['T-String'] = new_type

    else:
        print('Does not exist this basic type')



    #Adding Dummy2-xxx-T to make sure thar userdefs-xxx.h was included in xxx.asn
    allInfo[sourceName] = AsnFile(libName)
    allInfo[sourceName].nameTypes.append('Dummy2'+libName+'-T')
    allInfo[sourceName].strTypes.append('Dummy2'+libName+'-T ::= Dummy'+libName+'-T')
    allInfo[sourceName].depsTypes.append('Dummy'+libName+'-T')
    libraries['Dummy2'+libName+'-T'] = libName+'-Types'


    process_opaque(root)

    process_alias(root)

    process_xml(root, 'enum')

    process_xml (root, 'container')

    process_xml(root, 'compound')

    asn_template = Template(filename=os.path.join(templates_dir, 'template.asn.mako'))

    if not os.path.exists(out_asn):
        os.makedirs(out_asn)



    #Adding correct dependencies
    for k in allInfo:
        pkg = allInfo[k]
        for t in pkg.nameTypes:
            if t in allTypes:
                deps_types = allTypes[t].depsTypes
                for deps in deps_types:
                    if deps in allTypes:
                        deps_types2 = allTypes[deps].depsTypes
                        for d in deps_types2:
                            if d not in allInfo[k].depsTypes:
                                allInfo[k].depsTypes.append(d)


    generated_files = ['taste-extended', 'taste-types']

    for k in allInfo:
        asn_file=k.split('.')
        asn_file=asn_file[0]
        generated_files.append(asn_file)
        pkg = allInfo[k]
        buf = StringIO()
        pkg_libs = find_libraries(pkg.depsTypes, pkg.name)
        ctx = Context(buf, config=pkg, librariesConfig=pkg_libs, configTypes=allTypes)
        asn_template.render_context(ctx)
        name_file = os.path.join(out_asn, k)
        f = open(name_file, 'w')
        f.write(buf.getvalue())
        f.close()

    shutil.copyfile(os.path.join(templates_dir, 'taste-types.asn'), os.path.join(out_asn, 'taste-types.asn'))
    shutil.copyfile(os.path.join(templates_dir, 'taste-extended.asn'), os.path.join(out_asn, 'taste-extended.asn'))

    hpp_template = Template(filename=os.path.join(templates_dir, 'template_convert.hpp.mako'))
    cpp_template = Template(filename=os.path.join(templates_dir, 'template_convert.cpp.mako'))
    h_template = Template(filename=os.path.join(templates_dir, 'template_types.h.mako'))
    hpp_convert_template = Template(filename=os.path.join(templates_dir, 'opaque_convert.hpp.mako'))
    cpp_convert_template = Template(filename=os.path.join(templates_dir, 'opaque_convert.cpp.mako'))

    buf = StringIO()
    ctx = Context(buf, root='BASE', generated_files=generated_files)
    h_template.render_context(ctx)
    name_file = os.path.join(out_support, 'baseTypes.h')
    f = open(name_file, 'w')
    f.write(buf.getvalue())
    f.close()

    for t in allTypes:
        type_info = allTypes[t]
        buf = StringIO()
        if type_info.tag != 'numeric':

            if not type_info.asnParameters or type_info.tag == 'inst':



                define_name = type_info.asnName.upper()
                define_name = define_name.replace('-','_')+'_CONVERT'

                deps_include = [];

                if type_info.tag != 'inst':

                    for d in allTypes[t].depsTypes:
                        if d in allTypes:
                            if allTypes[d].tag != 'numeric':
                                deps_include.append(d + 'Convert.hpp')
                else:
                    root_type = allTypes[t].rootTypes[0]
                    for d in allTypes[root_type].depsTypes:
                        if d in allTypes:
                            if allTypes[d].tag != 'numeric':
                                deps_include.append(d + 'Convert.hpp')

                if type_info.tag == 'inst':
                    ctx = Context(buf, config=type_info, name=define_name, all_info=allTypes, includeConvert=deps_include,opaqueTypes=opaqueTypes)
                else:
                    ctx = Context(buf, config=type_info, name=define_name, all_info=allTypes, includeConvert=deps_include)
                hpp_template.render_context(ctx)
                name_file = os.path.join(out_support, t+'Convert.hpp')
                f = open(name_file, 'w')
                f.write(buf.getvalue())
                f.close()




                #Cpp Convert
                if type_info.tag != 'inst':
                    buf = StringIO()
                    ctx = Context(buf, config=type_info, includeName =t+'Convert.hpp', all_info=allTypes, basic_info=basicTypes, includeConvert=deps_include, opaqueTypes=opaqueTypes)
                    cpp_template.render_context(ctx)
                    name_file = os.path.join(out_support, t + 'Convert.cpp')
                    f = open(name_file, 'w')
                    f.write(buf.getvalue())
                    f.close()


    #Creation of OpaqueConversion.hpp
    cpp_includes=[]
    for t in opaqueTypes:
        opaque_info = opaqueTypes[t]
        for f2 in opaque_info.cppInclude:
            if not f2 in cpp_includes:
                cpp_includes.append(f2)

    buf = StringIO()
    ctx = Context(buf, opaqueTypes=opaqueTypes,cppIncludes=cpp_includes)
    hpp_convert_template.render_context(ctx)
    name_file = os.path.join(out_support, 'OpaqueConversion.hpp')
    f = open(name_file, 'w')
    f.write(buf.getvalue())
    f.close()

    # Creation of OpaqueConversion.hpp

    name_file = os.path.join(out_support, 'OpaqueConversion.cpp')
    if not os.path.isfile(name_file):
        buf = StringIO()
        ctx = Context(buf, opaqueTypes=opaqueTypes)
        cpp_convert_template.render_context(ctx)
        f = open(name_file, 'w')
        f.write(buf.getvalue())
        f.close()
class NDBStore(Store):
    """
    A triple store using NDB on GAE (Google App Engine)
    
    Every triple is stored in 2 GraphShards. For example, (URIRef('http://s'), URIRef('http://p'), Literal(42)) will be stored in:
      * The GraphShard containing every triple with http://p as the predicate
      * The GraphShard containing every triple with a subject that hashes to the same as http://s
    The hash used for a subject is the last digit of the subject's hex SHA1. 
    
    An NDBStore contains an internal log to which it writes information about SPARQL query execution
    and calls to triples(). This information can be logged by calling flush_log().
    If you do not wish to use memory for this log, set configuration to {'log': False} in the constructor.
    
    This implementation heavily favours
      * batch updates, i.e. using addN() with many triples
      * triple() queries where either subject or predicate is bound
      * not asking for the length of the NDBStore
      
    This module registers a custom SPARQL query evaluator that
      * Writes the parsed form of every SELECT query to the internal log
      * Performs all joins as lazy joins, which is much faster for NDBStore in my experience.
    """
    
    def __init__(self, configuration={}, identifier=None):
        '''@param configuration: A dict mapping 'log' to True or False
           @param identifier: A nonempty string or unicode. It's length must be <64
           to keep internal keys reasonably small.
        '''
        super(NDBStore, self).__init__(configuration)
        assert identifier is not None, "NDBStore requires a basestring identifier"
        assert isinstance(identifier, basestring), "NDBStore requires a basestring identifier"
        assert len(identifier) > 0, "NDBStore requires a non-empty identifier"
        assert len(identifier) < 64, "NDBStore requires a brief identifier"
        self._ID = identifier
        self._log = StringIO()
        self._log_begin = time()
        self._setup(**configuration)
        
    def _setup(self, 
               log = False, 
               no_of_subject_shards = 16, 
               no_of_shards_per_predicate_default = 1,
               no_of_shards_per_predicate_dict = {}):
        assert isinstance(log, bool), _CONF_ERR_MSG.format('log', [True, False], log)
        self._is_logging = log
        assert no_of_subject_shards in _VALID_NO_SHARDS, _CONF_ERR_MSG.format('no_of_subject_shards', _VALID_NO_SHARDS, no_of_subject_shards)
        self._no_of_subject_shard_digits = _NO_OF_SHARDS_TO_NO_OF_HEX_DIGITS[no_of_subject_shards]
        assert no_of_shards_per_predicate_default in _VALID_NO_SHARDS, _CONF_ERR_MSG.format('no_of_shards_per_predicate_default', _VALID_NO_SHARDS, no_of_shards_per_predicate_default)
        self._no_of_shards_per_predicate_default = no_of_shards_per_predicate_default
        assert isinstance(no_of_shards_per_predicate_dict, dict), _CONF_ERR_MSG.format('no_of_shards_per_predicate_dict', 'a dict', no_of_shards_per_predicate_dict)
        for (_, no_of_shards) in no_of_shards_per_predicate_dict.iteritems():
            assert no_of_shards in _VALID_NO_SHARDS, _CONF_ERR_MSG.format('no_of_shards_per_predicate_dict values', _VALID_NO_SHARDS, no_of_shards)
        self._no_of_shards_per_predicate_dict = no_of_shards_per_predicate_dict

    def _hex_digits(self, predicate):
        no_of_shards = self._no_of_shards_per_predicate_dict.get(predicate, self._no_of_shards_per_predicate_default)
        return _NO_OF_SHARDS_TO_NO_OF_HEX_DIGITS[no_of_shards]
    
    def keys_for(self, graph_ID, uri_ref, index):
        '''Assemble all NDB keys for the GraphShards containing triples relevant to the given parameters.
           @param graph_ID: The name of the graph to get triples from, e.g. 'current'
           @param uri_ref: The rdflib.URIRef to get triples for
           @param index: 0, or 1 to indicate at which position the triples should have the given uri_ref.
                         0=subject, 1=predicate
           @return A list of ndb.Keys for the relevant GraphShards.
                   Example Key: 'p-prov#endedAtTime_6f11f819383b6a6bb619fbea25b5696372ba0b62--newdata'
        '''
        assert index in range(2), 'index was {}, must be one of 0 for subject, 1 for predicate'.format(index)
        if index == 1: #A predicate
            no_of_hex_digits = self._hex_digits(uri_ref)
            random_sub_shards = [''.join(t) for t in product(_HEX_DIGITS, repeat = no_of_hex_digits)]
            #Keep last part of the URIRef as a "whiff". This is useful when inspecting data in GAE's datastore viewer
            whiff = uri_ref.split('/')[-1].replace('-','')
            if len(whiff) > 20:
                whiff = whiff[-20:]
            uri_ref_digest = '{}_{}'.format(whiff, sha1(uri_ref))#Example: prov#endedAtTime_6f11f819383b6a6bb619fbea25b5696372ba0b62
        else: #A subject
            uri_ref_digest = sha1(uri_ref)[-self._no_of_subject_shard_digits:]
            random_sub_shards = ['']
        return [ndb.Key(GraphShard, '{}-{}-{}-{}'.format('spo'[index], uri_ref_digest, r, graph_ID)) for r in random_sub_shards]

    def log(self, msg):
        '''Add a message to this objects internal log.
           @param msg: The message, a string. It may contain newlines.
        '''
        if self._is_logging:
            self._log.write('\n{:.3f}s: '.format(time() - self._log_begin))
            self._log.write(msg)

    def flush_log(self, level):
        '''Logs all stored log messages on the given level and clears the internal log.
           @level: The (integer) level from the logging module, e.g. logging.DEBUG 
        '''
        if self._is_logging:
            logging.log(level, self._log.getvalue())
            self._log = StringIO()
        
    def destroy(self, _configuration):
        more = True
        cursor = None
        while more:
            (results, cursor, more) = GraphShard.query().filter(GraphShard.graph_ID == self._ID).fetch_page(20, keys_only = True, start_cursor=cursor)
            logging.debug('Deleting {}'.format(results))
            ndb.delete_multi(results)
        
    def addN(self, quads):
        #TODO: Handle splitting large graphs into two entities
        #Note: quads is a generator, not a list. It cannot be traversed twice.
        #Step 1: Collect the triples into the Graphs reflecting the GraphShards they will be added to.
        new_shard_dict = defaultdict(Graph)
        for (s, p, o, _) in quads: #Last component ignored as this Store is not context_aware
            subject_shard = choice(self.keys_for(self._ID, s, 0))
            new_shard_dict[subject_shard].add((s, p, o))
            predicate_shard = choice(self.keys_for(self._ID, p, 1))
            new_shard_dict[predicate_shard].add((s, p, o))
        keys = list(new_shard_dict.keys())
        #Step 2: Load all existing, corresponding GraphShards
        keys_models = zip(keys, ndb.get_multi(keys)) #TODO: Use async get
        #Step 3: Update or create GraphShards with the added triples
        updated = list()
        for index in range(len(keys_models)):
            (key, model) = keys_models[index]
            if model is None:
                model = GraphShard(key = key, graph_ID = self._ID, graph_n3 = new_shard_dict[key].serialize(format='n3'))
            else:
                new_shard_dict[key].parse(data = model.graph_n3, format='n3')
                model.graph_n3 = new_shard_dict[key].serialize(format='n3')
            updated.append(model)
        #Step 4: Invalidate and store all created/updated GraphShards
        if len(updated) > 0:
            GraphShard.invalidate(updated)
            ndb.put_multi(updated)

    def add(self, (subject, predicate, o), context, quoted=False):
Example #35
0
def highlightCode(text):
	if SyntaxHighlighter is None:
		return '<br/>'.join(map(clean, text.split('\n')))
	target = StringIO()
	SyntaxHighlighter.PythonHTMLGenerator().generate_html(target, text)
	return target.getvalue()
Example #36
0
    def train(self, input_train, target_train=None, input_test=None,
              target_test=None, epochs=100, epsilon=None,
              summary_type='table'):
        """
        Method train neural network.

        Parameters
        ----------
        input_train : array-like
        target_train : array-like or None
        input_test : array-like or None
        target_test : array-like or None
        epochs : int
            Defaults to `100`.
        epsilon : float or None
            Defaults to ``None``.
        """
        show_epoch = self.show_epoch
        logs = self.logs
        training = self.training = AttributeKeyDict()

        if epochs <= 0:
            raise ValueError("Number of epochs needs to be greater than 0.")

        if epsilon is not None and epochs <= 2:
            raise ValueError("Network should train at teast 3 epochs before "
                             "check the difference between errors")

        if summary_type == 'table':
            logging_info_about_the_data(self, input_train, input_test)
            logging_info_about_training(self, epochs, epsilon)
            logs.newline()

            summary = SummaryTable(
                table_builder=table.TableBuilder(
                    table.Column(name="Epoch #"),
                    table.NumberColumn(name="Train err"),
                    table.NumberColumn(name="Valid err"),
                    table.TimeColumn(name="Time", width=10),
                    stdout=logs.write
                ),
                network=self,
                delay_limit=1.,
                delay_history_length=10,
            )

        elif summary_type == 'inline':
            summary = InlineSummary(network=self)

        else:
            raise ValueError("`{}` is unknown summary type"
                             "".format(summary_type))

        iterepochs = create_training_epochs_iterator(self, epochs, epsilon)
        show_epoch = parse_show_epoch_property(self, epochs, epsilon)
        training.show_epoch = show_epoch

        # Storring attributes and methods in local variables we prevent
        # useless __getattr__ call a lot of times in each loop.
        # This variables speed up loop in case on huge amount of
        # iterations.
        training_errors = self.errors
        validation_errors = self.validation_errors
        shuffle_data = self.shuffle_data

        train_epoch = self.train_epoch
        epoch_end_signal = self.epoch_end_signal
        train_end_signal = self.train_end_signal
        on_epoch_start_update = self.on_epoch_start_update

        is_first_iteration = True
        can_compute_validation_error = (input_test is not None)
        last_epoch_shown = 0


        symMatrix = tt.dmatrix("symMatrix")
        symEigenvalues, eigenvectors = tt.nlinalg.eig(symMatrix)
        get_Eigen = theano.function([symMatrix], [symEigenvalues, eigenvectors] )

        epsilon = []
        alpha = []
        alpha0 = []
        with logs.disable_user_input():
            for epoch in iterepochs:
                validation_error = None
                epoch_start_time = time.time()
                on_epoch_start_update(epoch)

                if shuffle_data:
                    input_train, target_train = shuffle(input_train,
                                                        target_train)
                try:
                    train_error = train_epoch(input_train, target_train)
                    H = self.variables.hessian.get_value()
                    ev, _ = get_Eigen(H)
                    if can_compute_validation_error:
                        validation_error = self.prediction_error(input_test,
                                                                 target_test)
                    epsilon.append(train_error)
                    alpha.append(numpy.sum(ev < 0))
                    alpha0.append(numpy.sum(ev == 0))
                    
                    training_errors.append(train_error)
                    validation_errors.append(validation_error)

                    epoch_finish_time = time.time()
                    training.epoch_time = epoch_finish_time - epoch_start_time

                    if epoch % training.show_epoch == 0 or is_first_iteration:
                        summary.show_last()
                        last_epoch_shown = epoch

                    if epoch_end_signal is not None:
                        epoch_end_signal(self)

                    is_first_iteration = False

                except StopNetworkTraining as err:
                    # TODO: This notification breaks table view in terminal.
                    # I need to show it in a different way.
                    logs.message("TRAIN", "Epoch #{} stopped. {}"
                                          "".format(epoch, str(err)))
                    break

            if epoch != last_epoch_shown:
                summary.show_last()

            if train_end_signal is not None:
                train_end_signal(self)

            summary.finish()
            logs.newline()
            plt.plot(alpha,epsilon,'r')
            plt.plot(alpha0,epsilon,'b')
            plt.xlabel('alpha')
            plt.ylabel('epsilon')
            
            # want to collect the output of stdout in a variable
            capture = StringIO()
            capture.truncate(0)
            save_stdout = sys.stdout
            sys.stdout = capture
            print self.connection
            sys.stdout=save_stdout
            s =  capture.getvalue()
            s=s.split('\n')[0:][0]
            str = self.class_name()
                        
            str1 = s+'---'+str+'-alpha-epsilon'+'.eps'
            plt.savefig(str1,format='eps',dpi=1000)
            plt.plot(iterepochs,epsilon)
            plt.xlabel('iterepochs')
            plt.ylabel('epsilon')
            str2=s+'---'+str+'-epsilon-iterepochs'+'.eps'
            plt.savefig(str2,format='eps',dpi=1000)
Example #37
0
    def display_current_results(self, visuals, epoch, step):
        if self.tf_log:  # show images in tensorboard output
            img_summaries = []
            for label, image_numpy in visuals.items():
                # Write the image to a string
                try:
                    s = StringIO()
                except:
                    s = BytesIO()
                scipy.misc.toimage(image_numpy).save(s, format="jpeg")
                # Create an Image object
                img_sum = self.tf.Summary.Image(
                    encoded_image_string=s.getvalue(),
                    height=image_numpy.shape[0],
                    width=image_numpy.shape[1])
                # Create a Summary value
                img_summaries.append(
                    self.tf.Summary.Value(tag=label, image=img_sum))

            # Create and write Summary
            summary = self.tf.Summary(value=img_summaries)
            self.writer.add_summary(summary, step)

        if self.use_html:  # save images to a html file
            for label, image_numpy in visuals.items():
                if isinstance(image_numpy, list):
                    for i in range(len(image_numpy)):
                        img_path = os.path.join(
                            self.img_dir,
                            'epoch%.3d_%s_%d.jpg' % (epoch, label, i))
                        utility.save_image(image_numpy[i], img_path)
                else:
                    img_path = os.path.join(
                        self.img_dir, 'epoch%.3d_%s.jpg' % (epoch, label))
                    utility.save_image(image_numpy, img_path)

            # update website
            webpage = html.HTML(self.web_dir,
                                'Experiment name = %s' % self.name,
                                refresh=30)
            for n in range(epoch, 0, -1):
                webpage.add_header('epoch [%d]' % n)
                ims = []
                txts = []
                links = []

                for label, image_numpy in visuals.items():
                    if isinstance(image_numpy, list):
                        for i in range(len(image_numpy)):
                            img_path = 'epoch%.3d_%s_%d.jpg' % (n, label, i)
                            ims.append(img_path)
                            txts.append(label + str(i))
                            links.append(img_path)
                    else:
                        img_path = 'epoch%.3d_%s.jpg' % (n, label)
                        ims.append(img_path)
                        txts.append(label)
                        links.append(img_path)
                if len(ims) < 10:
                    webpage.add_images(ims, txts, links, width=self.win_size)
                else:
                    num = int(round(len(ims) / 2.0))
                    webpage.add_images(ims[:num],
                                       txts[:num],
                                       links[:num],
                                       width=self.win_size)
                    webpage.add_images(ims[num:],
                                       txts[num:],
                                       links[num:],
                                       width=self.win_size)
            webpage.save()
Example #38
0
    def render(self, coord, format):
        """ Render a tile for a coordinate, return PIL Image-like object.
        
            Perform metatile slicing here as well, if required, writing the
            full set of rendered tiles to cache as we go.

            Note that metatiling and pass-through mode of a Provider
            are mutually exclusive options
        """
        if self.bounds and self.bounds.excludes(coord):
            raise NoTileLeftBehind(Image.new('RGB', (self.dim, self.dim), (0x99, 0x99, 0x99)))
        
        srs = self.projection.srs
        xmin, ymin, xmax, ymax = self.envelope(coord)
        width, height = self.dim, self.dim
        
        provider = self.provider
        metatile = self.metatile
        pass_through = provider.pass_through if hasattr(provider, 'pass_through') else False

        
        if self.doMetatile():

            if pass_through:
                raise KnownUnknown('Your provider is configured for metatiling and pass_through mode. That does not work')

            # adjust render size and coverage for metatile
            xmin, ymin, xmax, ymax = self.metaEnvelope(coord)
            width, height = self.metaSize(coord)

            subtiles = self.metaSubtiles(coord)
        
        if self.doMetatile() or hasattr(provider, 'renderArea'):
            # draw an area, defined in projected coordinates
            tile = provider.renderArea(width, height, srs, xmin, ymin, xmax, ymax, coord.zoom)
        
        elif hasattr(provider, 'renderTile'):
            # draw a single tile
            width, height = self.dim, self.dim
            tile = provider.renderTile(width, height, srs, coord)

        else:
            raise KnownUnknown('Your provider lacks renderTile and renderArea methods.')

        if not hasattr(tile, 'save'):
            raise KnownUnknown('Return value of provider.renderArea() must act like an image; e.g. have a "save" method.')

        if hasattr(tile, 'size') and tile.size[1] != height:
            raise KnownUnknown('Your provider returned the wrong image size: %s instead of %d pixels tall.' % (repr(tile.size), self.dim))
        
        if self.bitmap_palette:
            # this is where we apply the palette if there is one

            if pass_through:
                raise KnownUnknown('Cannot apply palette in pass_through mode')

            if format.lower() == 'png':
                t_index = self.png_options.get('transparency', None)
                tile = apply_palette(tile, self.bitmap_palette, t_index)
        
        if self.doMetatile():
            # tile will be set again later
            tile, surtile = None, tile
            
            for (other, x, y) in subtiles:
                buff = StringIO()
                bbox = (x, y, x + self.dim, y + self.dim)
                subtile = surtile.crop(bbox)
                subtile.save(buff, format)
                body = buff.getvalue()

                if self.write_cache:
                    self.config.cache.save(body, self, other, format)
                
                if other == coord:
                    # the one that actually gets returned
                    tile = subtile
                
                _addRecentTile(self, other, format, body)
        
        return tile
Example #39
0
def send_mail(book_id, kindle_mail):
    '''Send email with attachments'''
    is_mobi = False
    is_azw = False
    is_azw3 = False
    is_epub = False
    is_pdf = False
    file_path = None
    settings = ub.get_mail_settings()
    # create MIME message
    msg = MIMEMultipart()
    msg['From'] = settings["mail_from"]
    msg['To'] = kindle_mail
    msg['Subject'] = _('Send to Kindle')
    text = _('This email has been sent via calibre web.')
    msg.attach(MIMEText(text))

    use_ssl = settings.get('mail_use_ssl', 0)

    # attach files
        #msg.attach(self.get_attachment(file_path))

    book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
    data = db.session.query(db.Data).filter(db.Data.book == book.id)

    formats = {}

    for entry in data:
        if entry.format == "MOBI":
            formats["mobi"] = os.path.join(config.DB_ROOT, book.path, entry.name + ".mobi")
        if entry.format == "EPUB":
            formats["epub"] = os.path.join(config.DB_ROOT, book.path, entry.name + ".epub")
        if entry.format == "PDF":
            formats["pdf"] = os.path.join(config.DB_ROOT, book.path, entry.name + ".pdf")

    if len(formats) == 0:
        return _("Could not find any formats suitable for sending by email")

    if 'mobi' in formats:
        msg.attach(get_attachment(formats['mobi']))
    elif 'epub' in formats:
        filepath = make_mobi(book.id)
        if filepath is not None:
            msg.attach(get_attachment(filepath))
        elif filepath is None:
            return _("Could not convert epub to mobi")
        elif 'pdf' in formats:
            msg.attach(get_attachment(formats['pdf']))
    elif 'pdf' in formats:
        msg.attach(get_attachment(formats['pdf']))
    else:
        return _("Could not find any formats suitable for sending by email")

    # convert MIME message to string
    fp = StringIO()
    gen = Generator(fp, mangle_from_=False)
    gen.flatten(msg)
    msg = fp.getvalue()

    # send email
    try:
        mailserver = smtplib.SMTP(settings["mail_server"],settings["mail_port"])
        mailserver.set_debuglevel(0)

        if int(use_ssl) == 1:
            mailserver.ehlo()
            mailserver.starttls()
            mailserver.ehlo()

        if settings["mail_password"]:
            mailserver.login(settings["mail_login"], settings["mail_password"])
        mailserver.sendmail(settings["mail_login"], kindle_mail, msg)
        mailserver.quit()
    except (socket.error, smtplib.SMTPRecipientsRefused, smtplib.SMTPException), e:
        app.logger.error(traceback.print_exc())
        return _("Failed to send mail: %s" % str(e))
        abundant terrestrial carnivore.</animal>
    </zoo>
    '''), 'zoo', Zoo)

for animal in zoo.animal:
    print animal.type

cow = Animal(type='cow')
cow.description = (
    "Cattle-colloquially cows-are the most common type of large "
    "domesticated ungulates.")
zoo.animal.append(cow)

outstream = StringIO()
serialize_xml(outstream, 'zoo', zoo, pretty=True)
print outstream.getvalue()

## Advanced usage I


class AnimalCategory(Animal):
    @property
    def description(self):  # remove the text content from the schema
        raise NotImplementedError

    subtype = SerializableChildObject(Animal, required=True, multiple=True)


class BetterZoo(Zoo):
    category = SerializableChildObject(AnimalCategory,
                                       required=True,
Example #41
0
def get_response_content(fs):
    N_diploid = fs.N_diploid
    N = N_diploid * 2
    k = 2
    gamma = fs.gamma
    # define the fitnesses and the selection value
    f0 = 1.0
    f1 = 1.0 - gamma / N
    s = 1 - f1 / f0
    if f1 <= 0:
        raise ValueError('the extreme selection caused a non-positive fitness')
    # get a wright fisher transition matrix
    P = np.exp(wfengine.create_genic_diallelic(N_diploid, s))
    """
    # condition on no fixation
    for i in range(N):
        P[i] /= 1 - P[i, N]
    # remove the fixed state from the transition matrix
    P = P[:N, :N]
    """
    # add mutations
    P[0, 0] = 0
    P[0, 1] = 1
    P[N, N] = 0
    P[N, 1] = 1
    # compute the stationary distribution
    v = MatrixUtil.get_stationary_distribution(P)
    # get the distribution over dimorphic states
    h = v[1:-1]
    h /= np.sum(h)
    # look at continuous approximations
    w = np.zeros(N + 1)
    for i in range(1, N):
        x = i / float(N)
        #x0 = i / float(N)
        #x1 = (i + 1) / float(N)
        #value = sojourn_definite(x0, x1, gamma)
        value = sojourn_kernel(x, gamma)
        w[i] = value
    w = w[1:-1]
    w /= np.sum(w)
    # get the array for the R plot
    arr = [h.tolist(), w.tolist()]
    # define the r script
    out = StringIO()
    print >> out, 'title.string <- "allele 1 vs allele 2"'
    print >> out, 'mdat <-', RUtil.matrix_to_R_string(arr)
    print >> out, mk_call_str(
        'barplot',
        'mdat',
        'legend.text=' + mk_call_str(
            'c',
            '"exact discrete distribution"',
            '"continuous approximation"',
            #'"two-allele large N limit"',
            #'"two-allele"',
            #'"four-allele without mutational bias"',
            #'"four-allele with mutational bias (kappa_{1,2}=2)"',
            #'"four-allele with mutational bias, large N limit"',
        ),
        'args.legend = list(x="topright", bty="n")',
        'names.arg = 1:%s' % (N - 1),
        main='title.string',
        xlab='"frequency of allele 1"',
        ylab='"frequency"',
        col=mk_call_str(
            'c',
            #'"red"',
            #'"white"',
            '"black"',
            #'"gray"',
            '"red"',
        ),
        beside='TRUE',
        border='NA',
    )
    #print >> out, 'box()'
    script = out.getvalue().rstrip()
    # create the R plot image
    device_name = Form.g_imageformat_to_r_function[fs.imageformat]
    retcode, r_out, r_err, image_data = RUtil.run_plotter_no_table(
        script, device_name)
    if retcode:
        raise RUtil.RError(r_err)
    return image_data
Example #42
0
                    except NoTileLeftBehind, e:
                        tile = e.tile
                        save = False

                    if not self.write_cache:
                        save = False
                    
                    if format.lower() == 'jpeg':
                        save_kwargs = self.jpeg_options
                    elif format.lower() == 'png':
                        save_kwargs = self.png_options
                    else:
                        save_kwargs = {}
                    
                    tile.save(buff, format, **save_kwargs)
                    body = buff.getvalue()
                    
                    if save:
                        cache.save(body, self, coord, format)

                    tile_from = 'layer.render()'

            except TheTileLeftANote, e:
                headers = e.headers
                status_code = e.status_code
                body = e.content
                
                if e.emit_content_type:
                    headers.setdefault('Content-Type', mimetype)

            finally:
Example #43
0
def export():
    """export resource list as JSON"""

    resource_type = None

    if request.args.get('resource_type') in RESOURCE_TYPES.keys():
        resource_type = request.args['resource_type']

    query = request.args.get('q')

    response = views.list_resources(resource_type, query)

    if request.url_rule.rule == '/json':
        json_dict = {'total': response['total'], 'resources': []}
        for r in response['resources']:
            ghc_url = '%s/resource/%s' % (CONFIG['GHC_SITE_URL'], r.identifier)
            json_dict['resources'].append({
                'resource_type':
                r.resource_type,
                'title':
                r.title,
                'url':
                r.url,
                'ghc_url':
                ghc_url,
                'ghc_json':
                '%s/json' % ghc_url,
                'ghc_csv':
                '%s/csv' % ghc_url,
                'first_run':
                r.first_run.checked_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'),
                'last_run':
                r.last_run.checked_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'),
                'status':
                r.last_run.success,
                'min_response_time':
                round(r.min_response_time, 2),
                'average_response_time':
                round(r.average_response_time, 2),
                'max_response_time':
                round(r.max_response_time, 2),
                'reliability':
                round(r.reliability, 2),
                'last_report':
                r.last_run.report
            })
        return jsonify(json_dict)
    elif request.url_rule.rule == '/csv':
        output = StringIO()
        writer = csv.writer(output)
        header = [
            'resource_type', 'title', 'url', 'ghc_url', 'ghc_json', 'ghc_csv',
            'first_run', 'last_run', 'status', 'min_response_time',
            'average_response_time', 'max_response_time', 'reliability'
        ]
        writer.writerow(header)
        for r in response['resources']:
            ghc_url = '%s%s' % (CONFIG['GHC_SITE_URL'],
                                url_for('get_resource_by_id',
                                        identifier=r.identifier))
            writer.writerow([
                r.resource_type, r.title, r.url, ghc_url,
                '%s/json' % ghc_url,
                '%s/csv' % ghc_url,
                r.first_run.checked_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'),
                r.last_run.checked_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'),
                r.last_run.success,
                round(r.average_response_time, 2),
                round(r.reliability, 2)
            ])
        return output.getvalue(), 200, {'Content-type': 'text/csv'}
Example #44
0
def add_publications(generator):
    """
    Populates context with a list of BibTeX publications.

    Configuration
    -------------
    generator.settings['PUBLICATIONS_SRC']:
        local path to the BibTeX file to read.

    Output
    ------
    generator.context['publications']:
        List of tuples (key, year, text, bibtex, pdf, slides, poster).
        See Readme.md for more details.
    """
    if 'PUBLICATIONS_SRC' not in generator.settings:
        return
    try:
        from StringIO import StringIO
    except ImportError:
        from io import StringIO
    try:
        from pybtex.database.input.bibtex import Parser
        from pybtex.database.output.bibtex import Writer
        from pybtex.database import BibliographyData, PybtexError
        from pybtex.backends import html
        from pybtex.style.formatting import plain
    except ImportError:
        logger.warn('`pelican_bibtex` failed to load dependency `pybtex`')
        return

    refs_file = generator.settings['PUBLICATIONS_SRC']
    try:
        bibdata_all = Parser().parse_file(refs_file)
    except PybtexError as e:
        logger.warn('`pelican_bibtex` failed to parse file %s: %s' % (
            refs_file,
            str(e)))
        return

    publications = []

    # format entries
    plain_style = plain.Style()
    html_backend = html.Backend()
    formatted_entries = plain_style.format_entries(bibdata_all.entries.values())

    for formatted_entry in formatted_entries:
        key = formatted_entry.key
        entry = bibdata_all.entries[key]
        year = entry.fields.get('year')
        # This shouldn't really stay in the field dict
        # but new versions of pybtex don't support pop
        pdf = entry.fields.get('pdf', None)
        slides = entry.fields.get('slides', None)
        poster = entry.fields.get('poster', None)
        video = entry.fields.get('video', None)

        #render the bibtex string for the entry
        bib_buf = StringIO()
        bibdata_this = BibliographyData(entries={key: entry})
        Writer().write_stream(bibdata_this, bib_buf)
        text = formatted_entry.text.render(html_backend)

        publications.append((key,
                             year,
                             text,
                             bib_buf.getvalue(),
                             pdf,
                             slides,
                             poster,
                             video))

    generator.context['publications'] = publications
    def _pInicializarModulosYSimbolosCadenasOrdenados( self, theParentExecutionRecord=None, ):

        unExecutionRecord = self.fStartExecution( 'method',  'pInicializarModulosYSimbolosCadenasOrdenados', theParentExecutionRecord, False) 

        if cLogInicializarSimbolosCadenasOrdenados:
            unStartTime = self.fMillisecondsNow()
        
        try:
                
            unCatalogo = self.getCatalogo()
            if unCatalogo == None:
                return self
            
            
            unaBusqueda = cBusquedaTodasCadenasOrdenadasPorSimbolo.copy()
            
            unCatalogFiltroCadenas = unCatalogo.fCatalogFiltroCadenas()
            if ( unCatalogFiltroCadenas == None):
                return self
            unosDatosCadenas = unCatalogFiltroCadenas.searchResults( **unaBusqueda ) 
            
            if not unosDatosCadenas or len( unosDatosCadenas) < 1:
                self.setSimbolosCadenasOrdenados( '')
                self.setModulosYSimbolosCadenasOrdenados( '')
                return self
            
            
            unosSimbolosCadenasOrdenadosString  = '\n'.join( [ unosDatosCadena[ 'getSimbolo'] for unosDatosCadena in unosDatosCadenas ])
            
            unosModulosYSimbolosDict         = {}
            unosSimbolosModuloNoEspecificado = []

            for unosDatosCadena in unosDatosCadenas:
                unSimbolo =  unosDatosCadena[ 'getSimbolo']
                
                unosNombresModulosString = unosDatosCadena[ 'getNombresModulos']
                unosNombresModulosString = unosNombresModulosString.strip()
                unosNombresModulosString = unosNombresModulosString.replace( '\n', cTRAModuleNameSeparator)
                unosNombresModulosString = unosNombresModulosString.replace( '\r', cTRAModuleNameSeparator)
                unosNombresModulosString = unosNombresModulosString.strip()
                if unosNombresModulosString:
                    unosNombresModulos = unosNombresModulosString.split( cTRAModuleNameSeparator)
                    if unosNombresModulos:
                        for unNombreModulo in unosNombresModulos:
                            if unNombreModulo:
                                unosSimbolosModulo = unosModulosYSimbolosDict.get( unNombreModulo, None)
                                if not unosSimbolosModulo:
                                    unosModulosYSimbolosDict[ unNombreModulo] = [ unSimbolo,]
                                else:
                                    unosSimbolosModulo.append( unSimbolo)
                else:
                    unosSimbolosModuloNoEspecificado.append( unSimbolo)
    
            todosNombresModulos = unosModulosYSimbolosDict.keys()
            todosNombresModulosOrdenados = sorted( todosNombresModulos)
            
            anOutput = StringIO()

            if unosSimbolosModuloNoEspecificado:
                anOutput.write( '%s\n%s\n%s\n' % ( cModuleStartLine, cNombreModuloNoEspecificadoSentinel, '\n'.join( unosSimbolosModuloNoEspecificado), ))
            else:
                anOutput.write( '%s\n%s\n' % ( cModuleStartLine, cNombreModuloNoEspecificadoSentinel, ))
            
            for unNombreModulo in todosNombresModulosOrdenados:
                unosSimbolosModulo = unosModulosYSimbolosDict[ unNombreModulo]
                if unosSimbolosModulo:
                    anOutput.write( '%s\n%s\n%s\n' % ( cModuleStartLine, unNombreModulo, '\n'.join( unosSimbolosModulo), ))
                else:
                    anOutput.write( '%s\n%s\n' % ( cModuleStartLine, unNombreModulo,  ))
                    
                
            unosModulosYSimbolosCadenasOrdenadosString  = anOutput.getvalue()
            
            
            
            self.setSimbolosCadenasOrdenados( unosSimbolosCadenasOrdenadosString)            
            self.setModulosYSimbolosCadenasOrdenados( unosModulosYSimbolosCadenasOrdenadosString)

        
        finally:
            unExecutionRecord and unExecutionRecord.pEndExecution()

            if cLogInicializarSimbolosCadenasOrdenados:
                unEndTime = self.fMillisecondsNow()
                logging.getLogger( 'gvSIGi18n').info( 'pInicializarModulosYSimbolosCadenasOrdenados::TOTAL milliseconds=%d' % ( unEndTime - unStartTime))
        
        
        return self
Example #46
0
def export_resource(identifier):
    """export resource as JSON or CSV"""

    resource = views.get_resource_by_id(identifier)

    history_csv = '%s/resource/%s/history/csv' % (CONFIG['GHC_SITE_URL'],
                                                  resource.identifier)
    history_json = '%s/resource/%s/history/json' % (CONFIG['GHC_SITE_URL'],
                                                    resource.identifier)
    if 'json' in request.url_rule.rule:
        json_dict = {
            'identifier':
            resource.identifier,
            'title':
            resource.title,
            'url':
            resource.url,
            'resource_type':
            resource.resource_type,
            'owner':
            resource.owner.username,
            'min_response_time':
            resource.min_response_time,
            'average_response_time':
            resource.average_response_time,
            'max_response_time':
            resource.max_response_time,
            'reliability':
            resource.reliability,
            'status':
            resource.last_run.success,
            'first_run':
            resource.first_run.checked_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'),
            'last_run':
            resource.last_run.checked_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'),
            'history_csv':
            history_csv,
            'history_json':
            history_json,
            'last_report':
            resource.last_run.report
        }
        return jsonify(json_dict)
    elif 'csv' in request.url_rule.rule:
        output = StringIO()
        writer = csv.writer(output)
        header = [
            'identifier', 'title', 'url', 'resource_type', 'owner',
            'min_response_time', 'average_response_time', 'max_response_tie',
            'reliability', 'status', 'first_run', 'last_run', 'history_csv',
            'history_json'
        ]
        writer.writerow(header)
        writer.writerow([
            resource.identifier, resource.title, resource.url,
            resource.resource_type, resource.owner.username,
            resource.min_response_time, resource.average_response_time,
            resource.max_response_time, resource.reliability,
            resource.last_run.success,
            resource.first_run.checked_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'),
            resource.last_run.checked_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'),
            history_csv, history_json
        ])
        return output.getvalue(), 200, {'Content-type': 'text/csv'}
Example #47
0
    return ''


html_text = StringIO()

#twse : http://isin.twse.com.tw/isin/C_public.jsp?strMode=2
#       http://isin.twse.com.tw/isin/C_public.jsp?strMode=4

c = pycurl.Curl()
c.setopt(c.URL, 'http://www.emega.com.tw/js/StockTable.htm')
c.setopt(c.WRITEFUNCTION, html_text.write)
c.setopt(c.FOLLOWLOCATION, True)
c.perform()
c.close()

lines = html_text.getvalue().split('\n')

lines_count = len(lines)

list_file_path = 'list.txt'

with open(list_file_path, 'w') as list_file:

    list_file.write('0050\n')
    list_file.write('0056\n')
    #for i in range(lines_count):
        #stock_id = GetID(lines[i])
    for i in range(1000, 10000):
        stock_id = str(i).zfill(4)
        if stock_id != '':
            google_stock_url = 'http://finance.google.com/finance/info?client=ig&q=TPE:' + str(stock_id)
Example #48
0
 def dumps_atoms(self):
     sio = StringIO()
     self.dump_atoms(sio)
     result = sio.getvalue()
     sio.close()
     return result
def get_gzip(str):
    gzbuff = StringIO()
    gzfile = gzip.GzipFile(fileobj=gzbuff, mode='wb')
    gzfile.write(str)
    gzfile.close()
    return gzbuff.getvalue()
Example #50
0
    def search_image(self, file_name):
        """Search tags with Google Reverse Image Search

        Send image to Google Reverse Image Search.
        Retrieve the redirect URL and get the
        corresponding HTML using pycurl and parsing
        it for the tags using BeautifulSoup.

        Args:
            image_path (str): full path to image

        Returns:
            list: tags for image
        """

        app.logger.info('Getting Google Tags for image {file_name}'.format(
            file_name=file_name))

        image_path = os.path.join(self.images_dir, file_name)

        tags = list()

        html_page = StringIO()

        with open(image_path, 'rb') as image:

            multipart = {
                'encoded_image': (image_path, image),
                'image_content': ''
            }

            try:
                # Google search image
                response = requests.post(ImageSearch.SEARCH_BY_IMAGE_URL,
                                         files=multipart,
                                         allow_redirects=False)

                search_url = response.headers.get('Location')

                # Retrieve html results
                conn = pycurl.Curl()
                conn.setopt(conn.URL, search_url)
                conn.setopt(conn.FOLLOWLOCATION, 1)
                conn.setopt(
                    conn.USERAGENT, 'Mozilla/5.0 (Windows NT 6.1; WOW64) \
                                            AppleWebKit/537.11 (KHTML, like Gecko) \
                                            Chrome/23.0.1271.97 \
                                            Safari/537.11')
                conn.setopt(conn.WRITEFUNCTION, html_page.write)
                conn.perform()

                # Parse html page for image description
                soup = BeautifulSoup(html_page.getvalue(), 'html.parser')

                tags = soup.find('a', {'class': 'fKDtNb'}).string.split(' ')

                tags = list(map(lambda tag: tag.lower(), tags))

            except requests.exceptions.RequestException as e:
                app.logger.warn(
                    'Request to get tags from Google Images failed')
                app.logger.warn(e)
            except pycurl.error as e:
                app.logger.warn('Performing pycurl connection failed')
                app.logger.warn(e)
            except Exception as e:
                app.logger.warn('Something unexpected happened')
                app.logger.warn(e)
            finally:
                conn.close()

        self.google_tags[file_name] = set(tags)

        app.logger.info('Received Google Tags: {tags}'.format(tags=tags))

        return
        self.name = name
        self.name_backwards = name[::-1]
        return


data = []
data.append(SimpleObject('pickle'))
data.append(SimpleObject('cPickle'))
data.append(SimpleObject('last'))

# Simulate a file with StringIO
out_s = StringIO()

# Write to the stream
for o in data:
    print 'WRITING : %s (%s)' % (o.name, o.name_backwards)
    pickle.dump(o, out_s)
    out_s.flush()

# Set up a read-able stream
in_s = StringIO(out_s.getvalue())

# Read the data
while True:
    try:
        o = pickle.load(in_s)
    except EOFError:
        break
    else:
        print 'READ    : %s (%s)' % (o.name, o.name_backwards)
Example #52
0
 def __str__(self):
     fp = StringIO()
     self.write(fp=fp)
     value = fp.getvalue()
     return value
Example #53
0
 def do_print_help(parser):
     """Capture output of parser.print_help()"""
     string_io = StringIO()
     parser.print_help(file=string_io)
     return string_io.getvalue()
def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
    try:
        keylist = input_tf_zip.read("META/otakeys.txt").split()
    except KeyError:
        raise common.ExternalError("can't read META/otakeys.txt from input")

    extra_recovery_keys = misc_info.get("extra_recovery_keys")
    if extra_recovery_keys:
        extra_recovery_keys = [
            OPTIONS.key_map.get(k, k) + ".x509.pem"
            for k in extra_recovery_keys.split()
        ]
        if extra_recovery_keys:
            print("extra recovery-only key(s): " +
                  ", ".join(extra_recovery_keys))
    else:
        extra_recovery_keys = []

    mapped_keys = []
    for k in keylist:
        m = re.match(r"^(.*)\.x509\.pem$", k)
        if not m:
            raise common.ExternalError(
                "can't parse \"%s\" from META/otakeys.txt" % (k, ))
        k = m.group(1)
        mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")

    if mapped_keys:
        print("using:\n   ", "\n   ".join(mapped_keys))
        print("for OTA package verification")
    else:
        devkey = misc_info.get("default_system_dev_certificate",
                               "build/target/product/security/testkey")
        mapped_devkey = OPTIONS.key_map.get(devkey, devkey)
        if mapped_devkey != devkey:
            misc_info["default_system_dev_certificate"] = mapped_devkey
        mapped_keys.append(mapped_devkey + ".x509.pem")
        print("META/otakeys.txt has no keys; using %s for OTA package"
              " verification." % (mapped_keys[0], ))

    # recovery uses a version of the key that has been slightly
    # predigested (by DumpPublicKey.java) and put in res/keys.
    # extra_recovery_keys are used only in recovery.
    cmd = ([OPTIONS.java_path] + OPTIONS.java_args + [
        "-jar",
        os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")
    ] + mapped_keys + extra_recovery_keys)
    p = common.Run(cmd, stdout=subprocess.PIPE)
    new_recovery_keys, _ = p.communicate()
    if p.returncode != 0:
        raise common.ExternalError("failed to run dumpkeys")

    # system_root_image puts the recovery keys at BOOT/RAMDISK.
    if misc_info.get("system_root_image") == "true":
        recovery_keys_location = "BOOT/RAMDISK/res/keys"
    else:
        recovery_keys_location = "RECOVERY/RAMDISK/res/keys"
    common.ZipWriteStr(output_tf_zip, recovery_keys_location,
                       new_recovery_keys)

    # Save the base64 key representation in the update for key-change
    # validations
    p = common.Run(
        ["python", "vendor/arrow/build/tools/getb64key.py", mapped_keys[0]],
        stdout=subprocess.PIPE)
    data, _ = p.communicate()
    if p.returncode == 0:
        common.ZipWriteStr(output_tf_zip, "META/releasekey.txt", data)

    # SystemUpdateActivity uses the x509.pem version of the keys, but
    # put into a zipfile system/etc/security/otacerts.zip.
    # We DO NOT include the extra_recovery_keys (if any) here.

    try:
        from StringIO import StringIO
    except ImportError:
        from io import StringIO
    temp_file = StringIO()
    certs_zip = zipfile.ZipFile(temp_file, "w")
    for k in mapped_keys:
        common.ZipWrite(certs_zip, k)
    common.ZipClose(certs_zip)
    common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
                       temp_file.getvalue())

    # For A/B devices, update the payload verification key.
    if misc_info.get("ab_update") == "true":
        # Unlike otacerts.zip that may contain multiple keys, we can only specify
        # ONE payload verification key.
        if len(mapped_keys) > 1:
            print(
                "\n  WARNING: Found more than one OTA keys; Using the first one"
                " as payload verification key.\n\n")

        print("Using %s for payload verification." % (mapped_keys[0], ))
        pubkey = common.ExtractPublicKey(mapped_keys[0])
        common.ZipWriteStr(
            output_tf_zip,
            "SYSTEM/etc/update_engine/update-payload-key.pub.pem", pubkey)
        common.ZipWriteStr(
            output_tf_zip,
            "BOOT/RAMDISK/etc/update_engine/update-payload-key.pub.pem",
            pubkey)

    return new_recovery_keys
Example #55
0
    def on_epoch_end(self, epoch, logs=None):
        logs = logs or {}

        if not self.validation_data and self.histogram_freq:
            raise ValueError("If printing histograms, validation_data must be "
                             "provided, and cannot be a generator.")
        if self.embeddings_data is None and self.embeddings_freq:
            raise ValueError("To visualize embeddings, embeddings_data must "
                             "be provided.")
        if self.validation_data and self.histogram_freq:
            if epoch % self.histogram_freq == 0:

                val_data = self.validation_data
                tensors = (self.model.inputs + self.model.targets +
                           self.model.sample_weights)

                if self.model.uses_learning_phase:
                    tensors += [K.learning_phase()]

                assert len(val_data) == len(tensors)
                val_size = val_data[0].shape[0]
                i = 0
                while i < val_size:
                    step = min(self.batch_size, val_size - i)
                    if self.model.uses_learning_phase:
                        # do not slice the learning phase
                        batch_val = [x[i:i + step] for x in val_data[:-1]]
                        batch_val.append(val_data[-1])
                    else:
                        batch_val = [x[i:i + step] for x in val_data]
                    assert len(batch_val) == len(tensors)
                    feed_dict = dict(zip(tensors, batch_val))
                    result = self.sess.run([self.merged], feed_dict=feed_dict)
                    summary_str = result[0]
                    self.writer.add_summary(summary_str, epoch)
                    i += self.batch_size

        if self.embeddings_freq and self.embeddings_data is not None:
            if epoch % self.embeddings_freq == 0:
                # We need a second forward-pass here because we're passing
                # the `embeddings_data` explicitly. This design allows to pass
                # arbitrary data as `embeddings_data` and results from the fact
                # that we need to know the size of the `tf.Variable`s which
                # hold the embeddings in `set_model`. At this point, however,
                # the `validation_data` is not yet set.

                # More details in this discussion:
                # https://github.com/keras-team/keras/pull/7766#issuecomment-329195622

                embeddings_data = self.embeddings_data
                n_samples = embeddings_data[0].shape[0]

                i = 0
                while i < n_samples:
                    step = min(self.batch_size, n_samples - i)
                    batch = slice(i, i + step)

                    if type(self.model.input) == list:
                        feed_dict = {
                            model_input: embeddings_data[idx][batch]
                            for idx, model_input in enumerate(self.model.input)
                        }
                    else:
                        feed_dict = {
                            self.model.input: embeddings_data[0][batch]
                        }

                    feed_dict.update({self.batch_id: i, self.step: step})

                    if self.model.uses_learning_phase:
                        feed_dict[K.learning_phase()] = False

                    self.sess.run(self.assign_embeddings, feed_dict=feed_dict)
                    self.saver.save(
                        self.sess,
                        os.path.join(self.log_dir, 'keras_embedding.ckpt'),
                        epoch)

                    i += self.batch_size

        for name, value in logs.items():
            if name in ['batch', 'size']:
                continue
            summary = tf.Summary()
            summary_value = summary.value.add()
            summary_value.simple_value = value.item()
            summary_value.tag = name
            self.writer.add_summary(summary, epoch)

        val_x = self.data
        img_vis, eta_vis, p_vis, bbox_vis = val_x
        y_pred = self.fun_lastfeaturemap(val_x)[0]
        x = [y_pred, eta_vis, p_vis, bbox_vis]

        loss1, loss2, loss3 = self.total_loss(x)

        summary_l1 = tf.Summary(value=[
            tf.Summary.Value(tag='loss1(p=1,eta=1)', simple_value=loss1)
        ])
        self.writer.add_summary(summary_l1, epoch)

        summary_l2 = tf.Summary(value=[
            tf.Summary.Value(tag='loss2(p=1,eta=0)', simple_value=loss2)
        ])
        self.writer.add_summary(summary_l2, epoch)

        summary_l3 = tf.Summary(value=[
            tf.Summary.Value(tag='loss3(p=0,eta=0)', simple_value=loss3)
        ])
        self.writer.add_summary(summary_l3, epoch)

        # summary_l1 = tf.Summary.Value(tag='loss1(p=1,eta=1)', simple_value=loss1)
        # summary_l2 = tf.Summary.Value(tag='loss2(p=1,eta=0)', simple_value=loss2)
        # summary_l3 = tf.Summary.Value(tag='loss3(p=0,eta=0)', simple_value=loss3)
        # scalar_summaries = [summary_l1,summary_l2,summary_l3]

        img_summaries = []

        # writer.add_summary(value=[tf.Summary.Value(tag='pred', simple_value=y_pred[0][:10])])
        for i in range(14):
            try:
                s = StringIO()
            except:
                s = BytesIO()
            img = y_pred[0][:, :, i] * 255
            np.save('npout', img)
            image = Image.fromarray(img)
            image = image.convert('RGB')
            image.save(s, format='PNG')
            # scipy.misc.toimage(img).save(s, format="png")
            img_sum = tf.Summary.Image(encoded_image_string=s.getvalue(),
                                       height=img.shape[0],
                                       width=img.shape[1])
            s.close()

            # Create a Summary value
            img_summaries.append(
                tf.Summary.Value(tag='pred%d layer' % i, image=img_sum))

        # Create and write Summary
        summary = tf.Summary(value=img_summaries)
        self.writer.add_summary(summary, epoch)

        self.writer.flush()
Example #56
0
class SubProcess(object):

    """
    Run a subprocess in the background, collecting stdout/stderr streams.
    """

    def __init__(self, cmd, verbose=True, allow_output_check='all',
                 shell=False, env=None, sudo=False):
        """
        Creates the subprocess object, stdout/err, reader threads and locks.

        :param cmd: Command line to run.
        :type cmd: str
        :param verbose: Whether to log the command run and stdout/stderr.
        :type verbose: bool
        :param allow_output_check: Whether to log the command stream outputs
                                   (stdout and stderr) in the test stream
                                   files. Valid values: 'stdout', for
                                   allowing only standard output, 'stderr',
                                   to allow only standard error, 'all',
                                   to allow both standard output and error
                                   (default), and 'none', to allow
                                   none to be recorded.
        :type allow_output_check: str
        :param shell: Whether to run the subprocess in a subshell.
        :type shell: bool
        :param env: Use extra environment variables.
        :type env: dict
        :param sudo: Whether the command requires admin privileges to run,
                     so that sudo will be prepended to the command.
                     The assumption here is that the user running the command
                     has a sudo configuration such that a password won't be
                     prompted. If that's not the case, the command will
                     straight out fail.
        """
        # Now assemble the final command considering the need for sudo
        self.cmd = self._prepend_sudo(cmd, sudo, shell)
        self.verbose = verbose
        self.allow_output_check = allow_output_check
        self.result = CmdResult(self.cmd)
        self.shell = shell
        if env:
            self.env = os.environ.copy()
            self.env.update(env)
        else:
            self.env = None
        self._popen = None

    def __repr__(self):
        if self._popen is None:
            rc = '(not started)'
        elif self.result.exit_status is None:
            rc = '(running)'
        else:
            rc = self.result.exit_status
        return '%s(cmd=%r, rc=%r)' % (self.__class__.__name__, self.cmd, rc)

    def __str__(self):
        if self._popen is None:
            rc = '(not started)'
        elif self.result.exit_status is None:
            rc = '(running)'
        else:
            rc = '(finished with exit status=%d)' % self.result.exit_status
        return '%s %s' % (self.cmd, rc)

    @staticmethod
    def _prepend_sudo(cmd, sudo, shell):
        if sudo and os.getuid() != 0:
            try:
                sudo_cmd = '%s -n' % path.find_command('sudo')
            except path.CmdNotFoundError as details:
                log.error(details)
                log.error('Parameter sudo=True provided, but sudo was '
                          'not found. Please consider adding sudo to '
                          'your OS image')
                return cmd
            if shell:
                if ' -s' not in sudo_cmd:
                    sudo_cmd = '%s -s' % sudo_cmd
            cmd = '%s %s' % (sudo_cmd, cmd)
        return cmd

    def _init_subprocess(self):
        if self._popen is None:
            if self.verbose:
                log.info("Running '%s'", self.cmd)
            if self.shell is False:
                cmd = shlex.split(self.cmd)
            else:
                cmd = self.cmd
            try:
                self._popen = subprocess.Popen(cmd,
                                               stdout=subprocess.PIPE,
                                               stderr=subprocess.PIPE,
                                               shell=self.shell,
                                               env=self.env)
            except OSError as details:
                if details.errno == 2:
                    exc = OSError("File '%s' not found" % self.cmd.split()[0])
                    exc.errno = 2
                    raise exc
                else:
                    raise

            self.start_time = time.time()
            self.stdout_file = StringIO()
            self.stderr_file = StringIO()
            self.stdout_lock = threading.Lock()
            self.stdout_thread = threading.Thread(target=self._fd_drainer,
                                                  name="%s-stdout" % self.cmd,
                                                  args=[self._popen.stdout])
            self.stdout_thread.daemon = True
            self.stderr_lock = threading.Lock()
            self.stderr_thread = threading.Thread(target=self._fd_drainer,
                                                  name="%s-stderr" % self.cmd,
                                                  args=[self._popen.stderr])
            self.stderr_thread.daemon = True
            self.stdout_thread.start()
            self.stderr_thread.start()

            def signal_handler(signum, frame):
                self.result.interrupted = "signal/ctrl+c"
                self.wait()
            try:
                signal.signal(signal.SIGINT, signal_handler)
            except ValueError:
                if self.verbose:
                    log.info("Command %s running on a thread", self.cmd)

    def _fd_drainer(self, input_pipe):
        """
        Read from input_pipe, storing and logging output.

        :param input_pipe: File like object to the stream.
        """
        stream_prefix = "%s"
        if input_pipe == self._popen.stdout:
            prefix = '[stdout] %s'
            if self.allow_output_check in ['none', 'stderr']:
                stream_logger = None
            else:
                stream_logger = stdout_log
            output_file = self.stdout_file
            lock = self.stdout_lock
        elif input_pipe == self._popen.stderr:
            prefix = '[stderr] %s'
            if self.allow_output_check in ['none', 'stdout']:
                stream_logger = None
            else:
                stream_logger = stderr_log
            output_file = self.stderr_file
            lock = self.stderr_lock

        fileno = input_pipe.fileno()

        bfr = ''
        while True:
            tmp = os.read(fileno, 1024)
            if tmp == '':
                if self.verbose and bfr:
                    for line in bfr.splitlines():
                        log.debug(prefix, line)
                        if stream_logger is not None:
                            stream_logger.debug(stream_prefix, line)
                break
            lock.acquire()
            try:
                output_file.write(tmp)
                if self.verbose:
                    bfr += tmp
                    if tmp.endswith('\n'):
                        for line in bfr.splitlines():
                            log.debug(prefix, line)
                            if stream_logger is not None:
                                stream_logger.debug(stream_prefix, line)
                        bfr = ''
            finally:
                lock.release()

    def _fill_results(self, rc):
        self._init_subprocess()
        self.result.exit_status = rc
        if self.result.duration == 0:
            self.result.duration = time.time() - self.start_time
        if self.verbose:
            log.info("Command '%s' finished with %s after %ss", self.cmd, rc,
                     self.result.duration)
        self._fill_streams()

    def _fill_streams(self):
        """
        Close subprocess stdout and stderr, and put values into result obj.
        """
        # Cleaning up threads
        self.stdout_thread.join()
        self.stderr_thread.join()
        # Clean subprocess pipes and populate stdout/err
        self._popen.stdout.close()
        self._popen.stderr.close()
        self.result.stdout = self.get_stdout()
        self.result.stderr = self.get_stderr()

    def start(self):
        """
        Start running the subprocess.

        This method is particularly useful for background processes, since
        you can start the subprocess and not block your test flow.

        :return: Subprocess PID.
        :rtype: int
        """
        self._init_subprocess()
        return self._popen.pid

    def get_stdout(self):
        """
        Get the full stdout of the subprocess so far.

        :return: Standard output of the process.
        :rtype: str
        """
        self._init_subprocess()
        self.stdout_lock.acquire()
        stdout = self.stdout_file.getvalue()
        self.stdout_lock.release()
        return stdout

    def get_stderr(self):
        """
        Get the full stderr of the subprocess so far.

        :return: Standard error of the process.
        :rtype: str
        """
        self._init_subprocess()
        self.stderr_lock.acquire()
        stderr = self.stderr_file.getvalue()
        self.stderr_lock.release()
        return stderr

    def terminate(self):
        """
        Send a :attr:`signal.SIGTERM` to the process.
        """
        self._init_subprocess()
        self.send_signal(signal.SIGTERM)

    def kill(self):
        """
        Send a :attr:`signal.SIGKILL` to the process.
        """
        self._init_subprocess()
        self.send_signal(signal.SIGKILL)

    def send_signal(self, sig):
        """
        Send the specified signal to the process.

        :param sig: Signal to send.
        """
        self._init_subprocess()
        self._popen.send_signal(sig)

    def poll(self):
        """
        Call the subprocess poll() method, fill results if rc is not None.
        """
        self._init_subprocess()
        rc = self._popen.poll()
        if rc is not None:
            self._fill_results(rc)
        return rc

    def wait(self):
        """
        Call the subprocess poll() method, fill results if rc is not None.
        """
        self._init_subprocess()
        rc = self._popen.wait()
        if rc is not None:
            self._fill_results(rc)
        return rc

    def stop(self):
        """
        Stop background subprocess.

        Call this method to terminate the background subprocess and
        wait for it results.
        """
        self._init_subprocess()
        if self.result.exit_status is None:
            self.terminate()
        return self.wait()

    def get_pid(self):
        """
        Reports PID of this process
        """
        self._init_subprocess()
        return self._popen.pid

    def run(self, timeout=None, sig=signal.SIGTERM):
        """
        Start a process and wait for it to end, returning the result attr.

        If the process was already started using .start(), this will simply
        wait for it to end.

        :param timeout: Time (seconds) we'll wait until the process is
                        finished. If it's not, we'll try to terminate it
                        and get a status.
        :type timeout: float
        :param sig: Signal to send to the process in case it did not end after
                    the specified timeout.
        :type sig: int
        :returns: The command result object.
        :rtype: A :class:`CmdResult` instance.
        """
        def timeout_handler():
            self.send_signal(sig)
            self.result.interrupted = "timeout after %ss" % timeout

        self._init_subprocess()

        if timeout is None:
            self.wait()
        elif timeout > 0.0:
            timer = threading.Timer(timeout, timeout_handler)
            try:
                timer.start()
                self.wait()
            finally:
                timer.cancel()

        if self.result.exit_status is None:
            stop_time = time.time() + 1
            while time.time() < stop_time:
                self.poll()
                if self.result.exit_status is not None:
                    break
            else:
                self.kill()
                self.poll()

        # If all this work fails, we're dealing with a zombie process.
        e_msg = 'Zombie Process %s' % self._popen.pid
        assert self.result.exit_status is not None, e_msg

        return self.result
Example #57
0
def mon_add(distro, args, monitor_keyring):
    hostname = distro.conn.remote_module.shortname()
    logger = distro.conn.logger
    path = paths.mon.path(args.cluster, hostname)
    monmap_path = paths.mon.monmap(args.cluster, hostname)
    done_path = paths.mon.done(args.cluster, hostname)
    init_path = paths.mon.init(args.cluster, hostname, distro.init)

    configuration = conf.ceph.load(args)
    conf_data = StringIO()
    configuration.write(conf_data)

    # write the configuration file
    distro.conn.remote_module.write_conf(
        args.cluster,
        conf_data.getvalue(),
        args.overwrite_conf,
    )

    # if the mon path does not exist, create it
    distro.conn.remote_module.create_mon_path(path)

    logger.debug('checking for done path: %s' % done_path)
    if not distro.conn.remote_module.path_exists(done_path):
        logger.debug('done path does not exist: %s' % done_path)
        if not distro.conn.remote_module.path_exists(
                paths.mon.constants.tmp_path):
            logger.info('creating tmp path: %s' % paths.mon.constants.tmp_path)
            distro.conn.remote_module.makedir(paths.mon.constants.tmp_path)
        keyring = paths.mon.keyring(args.cluster, hostname)

        logger.info('creating keyring file: %s' % keyring)
        distro.conn.remote_module.write_monitor_keyring(
            keyring,
            monitor_keyring,
        )

        # get the monmap
        remoto.process.run(
            distro.conn,
            [
                'ceph',
                'mon',
                'getmap',
                '-o',
                monmap_path,
            ],
        )

        # now use it to prepare the monitor's data dir
        remoto.process.run(
            distro.conn,
            [
                'ceph-mon',
                '--cluster',
                args.cluster,
                '--mkfs',
                '-i',
                hostname,
                '--monmap',
                monmap_path,
                '--keyring',
                keyring,
            ],
        )

        # add it
        remoto.process.run(
            distro.conn,
            [
                'ceph',
                'mon',
                'add',
                hostname,
                args.address,
            ],
        )

        logger.info('unlinking keyring file %s' % keyring)
        distro.conn.remote_module.unlink(keyring)

    # create the done file
    distro.conn.remote_module.create_done_path(done_path)

    # create init path
    distro.conn.remote_module.create_init_path(init_path)

    # start the mon using the address
    remoto.process.run(
        distro.conn,
        [
            'ceph-mon',
            '-i',
            hostname,
            '--public-addr',
            args.address,
        ],
    )
Example #58
0
 def _image_tile(self, image):
     out = StringIO()
     image.save(out, self._tile_extension[1:])
     return out.getvalue()
Example #59
0
class PycViewer(object):
    def __init__(self, filename):
        with open(filename, 'rb') as pysrc:
            self._content = pysrc.read()
        if b'\0' in self._content:
            # Assume we are reading pyc file
            self._magic = self._content[:4]
            self._modtime = struct.unpack('I', self._content[4:8])[0]
            self._code = marshal.loads(self._content[8:])
        else:
            # Assume |filename| is python script
            self._magic = 'No magic number'
            self._modtime = 'No modification time'
            self._code = compile(self._content, filename, 'exec')
        self._string_io = StringIO()

    def _write_string(self, string, depth=0):
        space = ' '
        self._string_io.write(space * 2 * depth)
        self._string_io.write(string)
        self._string_io.write('\n')

    def _write_string_hex(self, label, string, depth):
        space = ' '
        print(string)
        try:
            string = string.encode('hex')
        except:
            string = string.hex()

        if len(string) < 60:
            self._string_io.write(space * 2 * depth)
            self._string_io.write(label + ': ')
            self._string_io.write(string)
            self._string_io.write('\n')
        else:
            self._string_io.write(space * 2 * depth)
            self._string_io.write(label)
            self._string_io.write(':\n')
            for i in range(0, len(string), 60):
                self._string_io.write(space * 2 * (depth + 1))
                self._string_io.write(string[i:i + 60])
                self._string_io.write('\n')

    def codeobj_to_string(self, code, depth=0):
        self._write_string("CodeObject:", depth)
        self._write_string("argcount: %d" % (code.co_argcount, ), depth + 1)
        self._write_string("nlocals: %d" % (code.co_nlocals, ), depth + 1)
        self._write_string("stacksize: %d" % (code.co_stacksize, ), depth + 1)
        self._write_string(
            "flags: 0x%04x (%s)" %
            (code.co_flags, interpret_code_flags(code.co_flags)), depth + 1)
        self._write_string_hex("code", code.co_code, depth + 1)
        self._write_string("names: %r" % (code.co_names, ), depth + 1)
        self._write_string("consts:", depth + 1)

        codeobj_list = []
        for const in code.co_consts:
            if type(const) == types.CodeType:
                codeobj_list.append(const)
                self._write_string("<code object>", depth + 2)
            else:
                self._write_string("%r" % (const, ), depth=depth + 2)

        self._write_string("varnames: %r" % (code.co_varnames, ), depth + 1)
        self._write_string("freevars: %r" % (code.co_freevars, ), depth + 1)
        self._write_string("cellvars: %r" % (code.co_cellvars, ), depth + 1)
        self._write_string("filename: %r" % (code.co_filename, ), depth + 1)
        self._write_string("name: %r" % (code.co_name, ), depth + 1)
        self._write_string("firstlineno: %d" % (code.co_firstlineno, ),
                           depth + 1)
        self._write_string_hex("lnotab", code.co_lnotab, depth + 1)
        self._write_string("assembly code:", depth + 1)
        self._string_io.write(disassemble(code, depth + 1))

        for codeobj in codeobj_list:
            self.codeobj_to_string(codeobj, depth + 2)
        return self._string_io.getvalue()

    def show(self):
        print(self.codeobj_to_string(self._code))
Example #60
0
 def __compress_data(self, data):
     # GZIP THE DATA
     out = StringIO()
     with gzip.GzipFile(fileobj=out, mode="w") as f:
         f.write(data)
     return base64.standard_b64encode(out.getvalue())