Ejemplo n.º 1
0
 def printClicked(self):
     try:
         imgData = StringIO()
         self.imageWidget.figure.savefig(imgData, format="svg")  # dpi=...)
         imgData.flush()
         imgData.seek(0)
         svgData = imgData.read()
         svgRenderer = qt.QSvgRenderer()
         svgRenderer.load(
             qt.QXmlStreamReader(svgData.encode(errors="replace")))
         self.printPreview.addSvgItem(svgRenderer)
     except:
         try:
             if hasattr(qt.QPixmap, "grabWidget"):
                 pixmap = qt.QPixmap.grabWidget(self.imageWidget)
             else:
                 pixmap = self.imageWidget.grab()
             self.printPreview.addPixmap(pixmap)
         except:
             msg = qt.QMessageBox(self)
             msg.setIcon(qt.QMessageBox.Critical)
             msg.setText("Error printing image: %s" % sys.exc_info()[1])
             msg.setWindowTitle('Matplotlib Save Image')
             msg.exec_()
             return
     if self.printPreview.isHidden():
         self.printPreview.show()
     self.printPreview.raise_()
Ejemplo n.º 2
0
    def repl(self,
             prompt='lispy> ',
             inport=InPort(sys.stdin),
             out=sys.stdout,
             err=sys.stderr,
             return_value=False,
             catch_exceptions=True):
        "A prompt-read-eval-print loop."
        if out is None:
            out = StringIO()

        if err is None:
            err = StringIO()

        while True:
            try:
                if prompt:
                    sys.stderr.write(prompt)
                x = parse(inport)
                if x is EOF_OBJECT:
                    return
                val = eval(x)
                if val is not None and out and return_value is False:
                    err.write(to_string(val) + "\n")
                    err.flush()
                elif return_value:
                    return val
            except Exception as e:
                if catch_exceptions:
                    exc_type, exc_value, exc_traceback = sys.exc_info()
                    traceback.print_exception(exc_type, exc_value,
                                              exc_traceback)
                else:
                    raise e
Ejemplo n.º 3
0
def download_project_dir(request, job_id):

    job_dir = settings.JOBS_PATH + sep + job_id
    fname = "%s.zip" % job_id

    response = HttpResponse(content_type='application/zip')
    response['Content-Disposition'] = 'filename=%s' % fname
    all_files = []

    try:
        job_status = Job.objects.get(job_id=job_id.replace('/', ''))
    except:
        job_status = None

    for root, dirs, files in os.walk(job_dir):
        for fn in files:
            abs_fn = os.path.join(root, fn)
            #print os.path.relpath(abs_fn, job_dir)
            with open(abs_fn, 'rb') as ifile:
                all_files.append((os.path.relpath(abs_fn,
                                                  job_dir), ifile.read()))

    buffer = StringIO()
    zip = zipfile.ZipFile(buffer, "w", zipfile.ZIP_DEFLATED)
    for name, f in all_files:
        zip.writestr(os.path.join(job_id, name), f)
    zip.close()
    buffer.flush()
    #the import detail - we return the content of the buffer
    ret_zip = buffer.getvalue()
    buffer.close()
    response.write(ret_zip)

    return response
Ejemplo n.º 4
0
    def __init__(self, host, username, password=None, key_file=None):
        self.username = username
        self.transport = paramiko.Transport(sock="{}:{}".format(host, 22))

        # passwd = password
        # if passwd:
        #     password = base64.b64decode(passwd).decode('utf-8')
        # else:
        #     password = None

        if key_file:
            with open(key_file, 'r') as f:
                ssh_key = f.read()
            f.close()
            string_io = StringIO()
            string_io.write(ssh_key)
            string_io.flush()
            string_io.seek(0)
            ssh_key = string_io
            key = get_key_obj(paramiko.RSAKey, pkey_obj=ssh_key, password=password) or \
                  get_key_obj(paramiko.DSSKey, pkey_obj=ssh_key, password=password) or \
                  get_key_obj(paramiko.ECDSAKey, pkey_obj=ssh_key, password=password) or \
                  get_key_obj(paramiko.Ed25519Key, pkey_obj=ssh_key, password=password)
            self.transport.connect(username=self.username, pkey=key)
        else:
            self.transport.connect(username=self.username, password=password)

        self.sftp = paramiko.SFTPClient.from_transport(self.transport)
Ejemplo n.º 5
0
    def _getSvgRendererAndViewbox(self):
        """Return a SVG renderer displaying the plot and its viewbox
        (interactively specified by the user the first time this is called).

        The size of the renderer is adjusted to the printer configuration
        and to the geometry configuration (width, height, ratio) specified
        by the user."""
        imgData = StringIO()
        assert self._plot.saveGraph(imgData, fileFormat="svg"), \
            "Unable to save graph"
        imgData.flush()
        imgData.seek(0)
        svgData = imgData.read()

        svgRenderer = qt.QSvgRenderer()

        viewbox = self._getViewBox()

        svgRenderer.setViewBox(viewbox)

        xml_stream = qt.QXmlStreamReader(svgData.encode(errors="replace"))

        # This is for PyMca compatibility, to share a print preview with PyMca plots
        svgRenderer._viewBox = viewbox
        svgRenderer._svgRawData = svgData.encode(errors="replace")
        svgRenderer._svgRendererData = xml_stream

        if not svgRenderer.load(xml_stream):
            raise RuntimeError("Cannot interpret svg data")

        return svgRenderer, viewbox
Ejemplo n.º 6
0
    def create_gon_file(self):

        output = """        {
            "type": "Goniometer", 
            "properties": {
                "radius": %(radius)f, 
                "divergence": %(divergence)f, 
                "soller1": %(soller1)f, 
                "soller2": %(soller2)f, 
                "min_2theta": %(twotheta_min)f, 
                "max_2theta": %(twotheta_max)f, 
                "steps": %(twotheta_count)f, 
                "wavelength": %(alpha_average)f, 
                "has_ads": false, 
                "ads_fact": 1.0, 
                "ads_phase_fact": 1.0, 
                "ads_phase_shift": 0.0, 
                "ads_const": 0.0
            }
        }""" % dict(
            radius=float(not_none(self.radius, 25)),
            divergence=float(not_none(self.divergence, 0.5)),
            soller1=float(not_none(self.soller1, 2.5)),
            soller2=float(not_none(self.soller2, 2.5)),
            twotheta_min=float(not_none(self.twotheta_min, 3.0)),
            twotheta_max=float(not_none(self.twotheta_max, 45.0)),
            twotheta_count=float(not_none(self.twotheta_count, 2500)),
            alpha_average=float(not_none(self.alpha_average, 0.154056)),
        )
        f = StringIO(output)
        f.flush()
        return f
Ejemplo n.º 7
0
    def _getSvgRendererAndViewbox(self):
        """Return a SVG renderer displaying the plot and its viewbox
        (interactively specified by the user the first time this is called).

        The size of the renderer is adjusted to the printer configuration
        and to the geometry configuration (width, height, ratio) specified
        by the user."""
        imgData = StringIO()
        assert self._plot.saveGraph(imgData, fileFormat="svg"), \
            "Unable to save graph"
        imgData.flush()
        imgData.seek(0)
        svgData = imgData.read()

        svgRenderer = qt.QSvgRenderer()

        viewbox = self._getViewBox()

        svgRenderer.setViewBox(viewbox)

        xml_stream = qt.QXmlStreamReader(svgData.encode(errors="replace"))

        # This is for PyMca compatibility, to share a print preview with PyMca plots
        svgRenderer._viewBox = viewbox
        svgRenderer._svgRawData = svgData.encode(errors="replace")
        svgRenderer._svgRendererData = xml_stream

        if not svgRenderer.load(xml_stream):
            raise RuntimeError("Cannot interpret svg data")

        return svgRenderer, viewbox
Ejemplo n.º 8
0
class CaptureOutput(object):
    """docstring for CaptureOutput"""
    def __init__(self):
        self._stdout = None
        self._stderr = None

        self.s_stdout = StringIO()
        self.s_stderr = StringIO()

    def __enter__(self):
        self._stdout = sys.stdout
        self._stderr = sys.stderr

        sys.stdout = self.s_stdout
        sys.stderr = self.s_stderr

        return self

    def __exit__(self, type, value, traceback):
        sys.stdout = self._stdout
        sys.stderr = self._stderr

    def stdout(self):
        self.s_stdout.flush()
        return self.s_stdout.getvalue()

    def stderr(self):
        self.s_stderr.flush()
        return self.s_stderr.getvalue()
Ejemplo n.º 9
0
    def test_phenotype_IO(self):
        """Test basic functionalities of phenotype IO methods."""
        p1 = phenotype.read(SMALL_JSON_PLATE, "pm-json")
        p2 = next(phenotype.parse(SMALL_CSV_PLATES, "pm-csv"))

        handle = StringIO()

        c = phenotype.write([p1, p2], handle, "pm-json")
        self.assertEqual(c, 2)

        handle.flush()
        handle.seek(0)
        # Now ready to read back from the handle...
        try:
            records = list(phenotype.parse(handle, "pm-json"))
        except ValueError as e:
            # This is BAD.  We can't read our own output.
            # I want to see the output when called from the test harness,
            # run_tests.py (which can be funny about new lines on Windows)
            handle.seek(0)
            self.fail("%s\n\n%r\n\n%r" % (str(e), handle.read(), records))

        self.assertEqual(p1, records[0])

        handle.close()
        handle = StringIO()
        self.assertRaises(TypeError, phenotype.write, p1, handle, 1)
        self.assertRaises(ValueError, phenotype.write, p1, handle, "PM-JSON")
        self.assertRaises(ValueError, phenotype.write, p1, handle, "pm-csv")
        handle.close()
Ejemplo n.º 10
0
 def executeTemplate(self, name):
     """execeute a single template with the data, return the output buffer"""
     tmplType = self.cfg[name].get('type', 'mako')
     if tmplType == 'mako':
         self.logger.debug('GEN | calling mako template')
         tLookup = TemplateLookup(directories=[self.getTemplateFolder()])
         template = Template("""<%%include file="%s"/>""" %
                             self.cfg[name].get('topFile'),
                             lookup=tLookup,
                             strict_undefined=True)
         buf = StringIO()
         ctx = Context(buf,
                       d=self.data,
                       systemCfg=self.controller.systemCfg,
                       generatorCfg=self.cfg,
                       logger=self.logger)
         template.render_context(ctx)
         buf.flush()
         buf.seek(0)
         return buf
     elif tmplType == 'jinja2':
         self.logger.debug('GEN | calling jinja2 template')
         env = Environment(
             loader=FileSystemLoader(self.getTemplateFolder()))
         template = env.get_template(self.cfg[name].get('topFile'))
         ns = {'d': self.data}
         ns['systemCfg'] = self.controller.systemCfg
         ns['generatorCfg'] = self.cfg
         ns['logger'] = self.logger
         tmp = template.render(ns)
         buf = StringIO(tmp)
         return (buf)
     else:
         raise Exception('Unknown template system: ' + tmplType)
Ejemplo n.º 11
0
 def flush(self):
     self.file.flush()
     StringIO.flush(self)
     if self.queue is not None:
         data = (current_process().pid, ''.join(self.queue_buffer))  # pylint: disable=E1102
         self.queue.put(data)
         self.queue_buffer = []
Ejemplo n.º 12
0
 def flush(self):
     """
     Calls two flush.
     """
     StringIO.flush(self)
     if self.handle:
         self.handle.flush()
Ejemplo n.º 13
0
class LoggerTest(unittest.TestCase):
    """Testcase for the Logger actor.
    """
    def setUp(self):
        self._file_like = StringIO()
        self._logger = Logger.start(file_like=self._file_like).proxy()

    def tearDown(self):
        self._logger.stop()

    def _stop_logger(self):
        self._logger.stop()
        self._logger.actor_stopped.get().wait()

    def test_records_are_recorded(self):
        """Verify that logging records produces messages.
        """
        mutation_record = MutationRecord(
            'foo', 'foo.py', 'operator',
            {'description': 'desc',
             'line_number': 3},
            None)
        test_result = TestResult(Outcome.KILLED, 'ok')

        self._logger.handle_result(mutation_record, test_result)
        self._stop_logger()
        self._file_like.flush()
        self._file_like.seek(0)

        self.assertGreater(len(self._file_like.read()), 0)
Ejemplo n.º 14
0
 def test_writer_log(self, stream, encoding, monkeypatch, caplog):
     writer = StringIO()
     terminal._Write(_writer=writer).raw(self.message)
     writer.flush()
     writer.seek(0)
     output = writer.readlines()[0]
     assert self.octpus_and_squid_en in output
Ejemplo n.º 15
0
def checkInputs(phaseSettingsSeed):

    ampInputSignal = '0'

    for phaseSetting in phaseSettingsSeed:
        # reset the stdin/STDOUT
        myStdIn = StringIO()
        myStdOut = StringIO()

        # Seed the input feed
        sys.stdout = myStdIn
        print(str(phaseSetting))
        print(str(ampInputSignal))
        printToLog(1, "\n===================================================")
        printToLog(
            1, "Excecuting Program with Phase {0} and Amp {1}".format(
                phaseSetting, ampInputSignal))

        # Reset Input File and redirect Stdin
        sys.stdin = myStdIn
        myStdIn.seek(0)
        sys.stdout = myStdOut

        # Execute the program
        computer = IntcodeComputer(projectInput, logLevel)
        computer.execCode()

        # Now get the output
        myStdOut.seek(0)
        ampInputSignal = str(myStdOut.getvalue().split()[0]).zfill(5)
        printToLog(1, "Amp Out Signal: {0}".format(ampInputSignal))
        myStdIn.flush()
        myStdOut.flush()

    return ampInputSignal
def check_print(do_train, expected_warnings, nr_dead=None, perc_dead=None):
    """
    Receive stdout to check if correct warning message is delivered
    :param nr_dead: int
    :param perc_dead: float, 10% should be written as 0.1
    """

    saved_stdout = sys.stdout

    out = StringIO()
    out.flush()
    sys.stdout = out  # overwrite current stdout

    do_train()

    # get prints, can be something like: "Layer
    # dense (#0) has 2 dead neurons (20.00%)!"
    stdoutput = out.getvalue().strip()
    str_to_count = "dead neurons"
    count = stdoutput.count(str_to_count)

    sys.stdout = saved_stdout  # restore stdout
    out.close()

    assert expected_warnings == count
    if expected_warnings and (nr_dead is not None):
        str_to_check = 'has {} dead'.format(nr_dead)
        assert str_to_check in stdoutput, '"{}" not in "{}"'.format(
            str_to_check, stdoutput)
    if expected_warnings and (perc_dead is not None):
        str_to_check = 'neurons ({:.2%})!'.format(perc_dead)
        assert str_to_check in stdoutput, '"{}" not in "{}"'.format(
            str_to_check, stdoutput)
Ejemplo n.º 17
0
 def flush(self):
     self.file.flush()
     #super(TeeFile, self).flush()
     StringIO.flush(self)
     if self.queue is not None:
         self.queue.put((current_process().pid, ''.join(self.queue_buffer)))
         self.queue_buffer = []
Ejemplo n.º 18
0
    def testTaskCreateViewWithTwoCsv(self):
        # given
        tmpFile1 = StringIO()
        tmpFile1.write('key,col1,col2\n')
        tmpFile1.write('key1,value2,value3\n')
        tmpFile1.write('key2,value4,value5\n')
        tmpFile1.flush()
        tmpFile1.seek(0)

        tmpFile2 = StringIO()
        tmpFile2.write('key,col1,col2\n')
        tmpFile2.write('key1,value2,value3\n')
        tmpFile2.write('key2,value4,value5\n')
        tmpFile2.flush()
        tmpFile2.seek(0)

        # when
        response = self.client.post(
            reverse('task_create'),
            {
                'file1': tmpFile1,
                'file2': tmpFile2,
                'summary': 'summary for test! TEST'
            }
        )
        # then
        self.assertRedirects(response, reverse(
            'task_detail', kwargs={'pk': self.taskModel.id + 1}))
Ejemplo n.º 19
0
 def printClicked(self):
     try:
         imgData = StringIO()
         self.imageWidget.figure.savefig(imgData, format="svg")  # dpi=...)
         imgData.flush()
         imgData.seek(0)
         svgData = imgData.read()
         svgRenderer = qt.QSvgRenderer()
         svgRenderer.load(qt.QXmlStreamReader(svgData.encode(errors="replace")))
         self.printPreview.addSvgItem(svgRenderer)
     except:
         try:
             if hasattr(qt.QPixmap,"grabWidget"):
                 pixmap = qt.QPixmap.grabWidget(self.imageWidget)
             else:
                 pixmap = self.imageWidget.grab()
             self.printPreview.addPixmap(pixmap)
         except:
             msg = qt.QMessageBox(self)
             msg.setIcon(qt.QMessageBox.Critical)
             msg.setText("Error printing image: %s" % sys.exc_info()[1])
             msg.setWindowTitle('Matplotlib Save Image')
             msg.exec_()
             return
     if self.printPreview.isHidden():
         self.printPreview.show()
     self.printPreview.raise_()
Ejemplo n.º 20
0
 def _parser_to_string_io(parser):
     """Turns a ConfigParser into a StringIO stream."""
     memory_file = StringIO()
     parser.write(memory_file)
     memory_file.flush()
     memory_file.seek(0)
     return memory_file
Ejemplo n.º 21
0
def dumpIO_source(object, **kwds):
    """write object source to a buffer (instead of dill.dump)
Loads by with dill.temp.loadIO_source.  Returns the buffer object.

    >>> f = lambda x:x**2
    >>> pyfile = dill.temp.dumpIO_source(f, alias='_f')
    >>> _f = dill.temp.loadIO_source(pyfile)
    >>> _f(4)
    16

Optional kwds:
    If 'alias' is specified, the object will be renamed to the given string.
    """
    from .source import importable, getname
    if PY3:
        from io import BytesIO as StringIO
    else:
        from StringIO import StringIO
    alias = kwds.pop('alias', '')  #XXX: include an alias so a name is known
    name = str(alias) or getname(object)
    name = "\n#NAME: %s\n" % name
    #XXX: assumes kwds['dir'] is writable and on $PYTHONPATH
    file = StringIO()
    file.write(b(''.join([importable(object, alias=alias), name])))
    file.flush()
    return file
Ejemplo n.º 22
0
 def _parser_to_string_io(parser):
     """Turns a ConfigParser into a StringIO stream."""
     memory_file = StringIO()
     parser.write(memory_file)
     memory_file.flush()
     memory_file.seek(0)
     return memory_file
Ejemplo n.º 23
0
def create_movies_similarity(md):
    tf = CountVectorizer(analyzer='word',
                         ngram_range=(1, 2),
                         min_df=0,
                         stop_words='english')
    count_matrix = tf.fit_transform(md['soup'])
    cosine_sim = cosine_similarity(count_matrix, count_matrix)
    md.set_index(md['id'], inplace=True)
    cols = md.index.values
    inx = md.index
    movies_sim = pd.DataFrame(cosine_sim, columns=cols, index=inx)

    output = StringIO()
    csv_writer = writer(output)

    csv_writer.writerow(['id', 'sim_movieId', 'relevance'])
    for x in movies_sim.index.tolist():
        for row in get_similar(x, movies_sim).iterrows():
            csv_writer.writerow(row[1])

    output.seek(0)  # we need to get back to the start of the BytesIO
    movies_similarity = pd.read_csv(output)
    output.flush()
    output.close()
    return movies_similarity
Ejemplo n.º 24
0
    def create_gon_file(self):

        output = """        {
            "type": "Goniometer", 
            "properties": {
                "radius": %(radius)f, 
                "divergence": %(divergence)f, 
                "soller1": %(soller1)f, 
                "soller2": %(soller2)f, 
                "min_2theta": %(twotheta_min)f, 
                "max_2theta": %(twotheta_max)f, 
                "steps": %(twotheta_count)f, 
                "wavelength": %(alpha_average)f, 
                "has_ads": false, 
                "ads_fact": 1.0, 
                "ads_phase_fact": 1.0, 
                "ads_phase_shift": 0.0, 
                "ads_const": 0.0
            }
        }""" % dict(
            radius=float(not_none(self.radius, 25)),
            divergence=float(not_none(self.divergence, 0.5)),
            soller1=float(not_none(self.soller1, 2.5)),
            soller2=float(not_none(self.soller2, 2.5)),
            twotheta_min=float(not_none(self.twotheta_min, 3.0)),
            twotheta_max=float(not_none(self.twotheta_max, 45.0)),
            twotheta_count=float(not_none(self.twotheta_count, 2500)),
            alpha_average=float(not_none(self.alpha_average, 0.154056)),
        )
        f = StringIO(output)
        f.flush()
        return f
Ejemplo n.º 25
0
def _run_multi_helper(func, i, args, kwargs, log_level):  # pragma: no cover
    # Note: This is covered by test_wcs.py:test_parallel, but for some reason it's not
    # showing up in codecov.  It's supposed to get captured by the combination of
    # concurrency=multiprocessing and calling coverage combine before uploading.
    # We're doing both of those things, but it's still not showing up.
    from io import StringIO
    import logging

    # In multiprocessing, we cannot pass in the logger, so log to a string and then
    # return that back at the end to be logged by the parent process.
    logger = logging.getLogger('logtostring_%d' % i)
    buf = StringIO()
    handler = logging.StreamHandler(buf)
    logger.addHandler(handler)
    logger.setLevel(
        log_level)  # Input logger in this case is the level to use.

    try:
        out = func(*args, logger=logger, **kwargs)
    except Exception as e:
        # Exceptions don't propagate through multiprocessing.  So best alternative
        # is to catch it and return it.  We can deal with it somehow on the other end.
        # Also add more details here with verbose>=2 to help with debugging.
        tr = traceback.format_exc()
        logger.info("Caught exception:\n%s", tr)
        out = e

    handler.flush()
    buf.flush()
    return i, out, buf.getvalue()
Ejemplo n.º 26
0
        def parse_pdf_log(self, logfile):
            """
            Strip down tex output to only the warnings, errors etc. and discard all the noise
            :param logfile:
            :return: string
            """
            with logger.debug("Parsing LaTeX log file"):
                from io import StringIO
                log_buffer = StringIO()
                log_handler = logging.StreamHandler(log_buffer)

                typesetter = Typesetter(self.tmp('tex'))
                typesetter.halt_on_errors = False

                handlers = typesetter.logger.handlers
                for handler in handlers:
                    typesetter.logger.removeHandler(handler)

                typesetter.logger.addHandler(log_handler)
                typesetter.process_log(logfile)

                typesetter.logger.removeHandler(log_handler)

                log_handler.flush()
                log_buffer.flush()

                return log_buffer.getvalue()
Ejemplo n.º 27
0
def dict_to_csv(dicts: list,
                output_file: str = "",
                csv_delimiter: str = ";",
                write_header: bool = True):
    if output_file:
        output = open(output_file, mode="w", encoding="utf-8")
    else:
        output = StringIO()

    if not dicts:
        if output_file:
            write_empty_file(output_file)
            return
        else:
            return ""

    w = csv.DictWriter(output, dicts[0].keys(), delimiter=csv_delimiter)
    if write_header:
        w.writeheader()
    w.writerows(dicts)

    if not output_file:
        contents = output.getvalue()
        output.close()
        return contents
    else:
        output.flush()
        output.close()
Ejemplo n.º 28
0
def github_release():
    msg = request.get_json()

    buffer = StringIO()
    logHandler = logging.StreamHandler(buffer)
    logHandler.setLevel(logging.INFO)
    formatter = logging.Formatter(
        "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
    )
    logHandler.setFormatter(formatter)
    logger.addHandler(logHandler)

    logger.debug(
        f"Received release event: "
        f"{msg['repository']['owner']}/{msg['repository']['name']} - {msg['release']['tag_name']}"
    )

    config = Config()
    api = PackitBotAPI(config)
    # Using fedmsg since the fields are the same
    api.sync_upstream_release_with_fedmsg({"msg": msg})

    logger.removeHandler(logHandler)
    buffer.flush()

    return buffer.getvalue()
Ejemplo n.º 29
0
def insert_into_file(fileobj, data, start, end):
    """
    Insert data into fileobj at position C{start}.

    This function inserts data into a file, overwriting all data between start
    and end. If end == start no data is overwritten. Do not use this function to
    append data to a file.

    @param fileobj: file like object
    @param data:    data to be inserted into fileobj
    @param start:   The position at which to start inserting data
    @param end:     The position in fileobj of data that must not be overwritten
    @return:        C{start + len(data) - end}
    """
    buffer = StringIO()
    fileobj.seek(end)
    copyfileobj(fileobj, buffer, -1)
    buffer.flush()
    buffer.seek(0)
    fileobj.seek(start)
    fileobj.write(data)
    fileobj.flush()
    fileobj.truncate()
    delta = fileobj.tell() - end  # < 0 if len(data) < end-start
    copyfileobj(buffer, fileobj, -1)
    fileobj.flush()
    buffer.close()
    return delta
Ejemplo n.º 30
0
def dumpIO_source(object, **kwds):
    """write object source to a buffer (instead of dill.dump)
Loads by with dill.temp.loadIO_source.  Returns the buffer object.

    >>> f = lambda x:x**2
    >>> pyfile = dill.temp.dumpIO_source(f, alias='_f')
    >>> _f = dill.temp.loadIO_source(pyfile)
    >>> _f(4)
    16

Optional kwds:
    If 'alias' is specified, the object will be renamed to the given string.
    """
    from .source import importable, getname
    if PY3:
        from io import BytesIO as StringIO
    else:
        from StringIO import StringIO
    alias = kwds.pop('alias', '') #XXX: include an alias so a name is known
    name = str(alias) or getname(object)
    name = "\n#NAME: %s\n" % name
    #XXX: assumes kwds['dir'] is writable and on $PYTHONPATH
    file = StringIO()
    file.write(b(''.join([importable(object, alias=alias),name])))
    file.flush()
    return file
Ejemplo n.º 31
0
 def flush(self):
     self.file.flush()
     #super(TeeFile, self).flush()
     StringIO.flush(self)
     if self.queue is not None:
         self.queue.put((current_process().pid, ''.join(self.queue_buffer)))
         self.queue_buffer = []
Ejemplo n.º 32
0
def bcompile(source):
    """Return the compiled bytecode from the given filename as a string ."""
    f = open(source, 'U')
    try:
        try:
            timestamp = int(os.fstat(f.fileno()).st_mtime)
        except AttributeError:
            timestamp = int(os.stat(file).st_mtime)
        codestring = f.read()
        f.close()
        if codestring and codestring[-1] != '\n':
            codestring = codestring + '\n'
        try:
            codeobject = builtins.compile(codestring, source, 'exec')
        except Exception as err:
            raise PyCompileError(err.__class__, err.args, source)
        fc = StringIO()
        try:
            fc.write('\0\0\0\0')
            wr_long(fc, timestamp)
            fc.write(marshal.dumps(codeobject))
            fc.flush()
            fc.seek(0, 0)
            fc.write(MAGIC)
            return fc.getvalue()
        finally:
            fc.close()
    finally:
        f.close()
Ejemplo n.º 33
0
    def redirect_log(self, host: str, addr: str) -> Iterator[None]:
        log_string = StringIO()
        ch = logging.StreamHandler(log_string)
        ch.setLevel(logging.DEBUG)
        asyncssh_logger.addHandler(ch)

        try:
            yield
        except OSError as e:
            self.mgr.offline_hosts.add(host)
            log_content = log_string.getvalue()
            msg = f"Can't communicate with remote host `{addr}`, possibly because python3 is not installed there. {str(e)}" + \
                '\n' + f'Log: {log_content}'
            logger.exception(msg)
            raise OrchestratorError(msg)
        except asyncssh.Error as e:
            self.mgr.offline_hosts.add(host)
            log_content = log_string.getvalue()
            msg = f'Failed to connect to {host} ({addr}). {str(e)}' + '\n' + f'Log: {log_content}'
            logger.debug(msg)
            raise OrchestratorError(msg)
        except Exception as e:
            self.mgr.offline_hosts.add(host)
            log_content = log_string.getvalue()
            logger.exception(str(e))
            raise OrchestratorError(
                f'Failed to connect to {host} ({addr}): {repr(e)}' + '\n'
                f'Log: {log_content}')
        finally:
            log_string.flush()
            asyncssh_logger.removeHandler(ch)
Ejemplo n.º 34
0
def main_script(page, rev=None, params=NotImplemented):  # pylint: disable=unused-argument
    """Main thread."""
    # http://opensourcehacker.com/2011/02/23/temporarily-capturing-python-logging-output-to-a-string-buffer/

    # safety; default mode is safe (no writing)
    pywikibot.config.simulate = True

    pywikibot.output(u'--- ' * 20)

    buffer = StringIO()
    rootLogger = logging.getLogger()

    logHandler = logging.StreamHandler(buffer)
    formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    logHandler.setFormatter(formatter)
    rootLogger.addHandler(logHandler)

    sys.stdout = buffer
    sys.stderr = buffer

    # all output to logging and stdout/stderr is catched BUT NOT lua output (!)
    if rev is None:
        code = page.get()               # shell; "on demand"
    else:
        code = page.getOldVersion(rev)  # crontab; scheduled
    try:
        exec(code)
    except:
        # (done according to subster in trunk and submit in rewrite/.../data/api.py)
        pywikibot.exception(tb=True)  # secure traceback print (from api.py submit)

    sys.stdout = sys.__stdout__
    sys.stderr = sys.__stderr__

    # Remove our handler
    rootLogger.removeHandler(logHandler)

    logHandler.flush()
    buffer.flush()

    pywikibot.output(u'--- ' * 20)

    # safety; restore settings
    pywikibot.config.simulate = __simulate
    sys.argv = __sys_argv
    if resource:
        pywikibot.output(
            u'environment: garbage; %s / memory; %s / members; %s' % (
                gc.collect(),
                resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * resource.getpagesize(),
                len(dir())))
    else:
        pywikibot.output(
            u'environment: garbage; %s / members; %s' % (
                gc.collect(), len(dir())))
    # 'len(dir())' is equivalent to 'len(inspect.getmembers(__main__))'

    # append result to output page
    if rev is None:
        wiki_logger(buffer.getvalue(), page, rev)
Ejemplo n.º 35
0
def main_script(page, rev=None, params=NotImplemented):  # pylint: disable=unused-argument
    """Main thread."""
    # http://opensourcehacker.com/2011/02/23/temporarily-capturing-python-logging-output-to-a-string-buffer/
    # https://docs.python.org/release/2.6/library/logging.html
    from io import StringIO
    import logging

    # safety; default mode is safe (no writing)
    pywikibot.config.simulate = True

    pywikibot.output(u'--- ' * 20)

    buffer = StringIO()
    rootLogger = logging.getLogger()

    logHandler = logging.StreamHandler(buffer)
    formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    logHandler.setFormatter(formatter)
    rootLogger.addHandler(logHandler)

    sys.stdout = buffer
    sys.stderr = buffer

    # all output to logging and stdout/stderr is catched BUT NOT lua output (!)
    if rev is None:
        code = page.get()               # shell; "on demand"
    else:
        code = page.getOldVersion(rev)  # crontab; scheduled
    try:
        exec(code)
    except:
        # (done according to subster in trunk and submit in rewrite/.../data/api.py)
        pywikibot.exception(tb=True)  # secure traceback print (from api.py submit)

    sys.stdout = sys.__stdout__
    sys.stderr = sys.__stderr__

    # Remove our handler
    rootLogger.removeHandler(logHandler)

    logHandler.flush()
    buffer.flush()

    pywikibot.output(u'--- ' * 20)

    # safety; restore settings
    pywikibot.config.simulate = __simulate
    sys.argv = __sys_argv

    pywikibot.output(
        u'environment: garbage; %s / memory; %s / members; %s' % (
            gc.collect(),
            resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * resource.getpagesize(),
            len(dir())))
    # 'len(dir())' is equivalent to 'len(inspect.getmembers(__main__))'

    # append result to output page
    if rev is None:
        wiki_logger(buffer.getvalue(), page, rev)
Ejemplo n.º 36
0
class _LoggingTee(object):
    """A tee object to redirect streams to the logger."""

    def __init__(self, src_filename):
        self.logger = logger
        self.src_filename = src_filename
        self.logger_buffer = ''
        self.set_std_and_reset_position()

    def set_std_and_reset_position(self):
        if not isinstance(sys.stdout, _LoggingTee):
            self.origs = (sys.stdout, sys.stderr)
        sys.stdout = sys.stderr = self
        self.first_write = True
        self.output = StringIO()
        return self

    def restore_std(self):
        sys.stdout.flush()
        sys.stderr.flush()
        sys.stdout, sys.stderr = self.origs

    def write(self, data):
        self.output.write(data)

        if self.first_write:
            self.logger.verbose('Output from %s', self.src_filename,
                                color='brown')
            self.first_write = False

        data = self.logger_buffer + data
        lines = data.splitlines()
        if data and data[-1] not in '\r\n':
            # Wait to write last line if it's incomplete. It will write next
            # time or when the LoggingTee is flushed.
            self.logger_buffer = lines[-1]
            lines = lines[:-1]
        else:
            self.logger_buffer = ''

        for line in lines:
            self.logger.verbose('%s', line)

    def flush(self):
        self.output.flush()
        if self.logger_buffer:
            self.logger.verbose('%s', self.logger_buffer)
            self.logger_buffer = ''

    # When called from a local terminal seaborn needs it in Python3
    def isatty(self):
        return self.output.isatty()

    # When called in gen_rst, conveniently use context managing
    def __enter__(self):
        return self

    def __exit__(self, type_, value, tb):
        self.restore_std()
Ejemplo n.º 37
0
def export_csv_iter(*args, **kwargs):
    s = StringIO()
    w = csv.writer(s)
    for row in export_iter(*args, **kwargs):
        w.writerow(row)
        s.flush()
        yield s.getvalue()
        s.truncate(0)
Ejemplo n.º 38
0
    def flush(self):
        """

        :type self: StringIONotifying or StringIO
        """
        # noinspection PyArgumentList
        StringIO.flush(self)
        self.listener()
Ejemplo n.º 39
0
 def flush(self):
     self.file.flush()
     #super(TeeFile, self).flush()
     StringIO.flush(self)
     if self.queue is not None:
         data = (current_process().pid, ''.join(self.queue_buffer)) # pylint: disable=E1102
         self.queue.put(data)
         self.queue_buffer = []
Ejemplo n.º 40
0
def command_name(cls):
    '''Command names are calculated as class names in lower case inserting a
    hyphen before each new capital letter. For example "MyCommand" will be
    used as "my-command".

    It's defined as an external function because a class method don't apply to
    minimal commands (those with only the "run" method).

    Example::

        >>> class SomeCommand(object):
        ...     pass

        >>> command_name(SomeCommand) == 'some-command'
        True

    If the command class has an attribute `command_cli_name`, this will be
    used instead::

        >>> class SomeCommand(object):
        ...    command_cli_name = 'adduser'

        >>> command_name(SomeCommand) == 'adduser'
        True

    It's an error to have a non-string `command_cli_name` attribute::

        >>> class SomeCommand(object):
        ...    command_cli_name = None

        >>> command_name(SomeCommand)  # doctest: +ELLIPSIS
        Traceback (most recent call last):
           ...
        TypeError: Attribute 'command_cli_name' must be a string.

    '''
    Unset = object()
    res = getattr(cls, 'command_cli_name', Unset)
    if res is not Unset:
        from xoutil.eight import string_types
        if not isinstance(res, string_types):
            raise TypeError("Attribute 'command_cli_name' must be a string.")
    else:
        from io import StringIO
        from xoutil.string import safe_decode
        buf = StringIO()
        start = True
        for letter in cls.__name__:
            if letter.isupper():
                if not start:
                    buf.write(safe_decode('-'))
                letter = letter.lower()
            buf.write(safe_decode(letter))
            start = False
        buf.flush()
        res = buf.getvalue()
        buf.close()
    return res
Ejemplo n.º 41
0
class cnabWriter(object):
    def __init__(self,banco):
        self.banco = banco
        self.file = StringIO()
    def parse_json(self,path,data):
        with open(os.path.join(script_dir, path)) as data_file:
            json_model = json.load(data_file)
        tmp_line = [' '] * 241
        for item in json_model['campos']:
            length_field = int(item['posicao_fim']) - int(item['posicao_inicio']) + 1
            if item['nome'] in data:
                value = str(data[item['nome']])
            elif 'default' in item:
                value = str(item['default'])
            else:
                value = ''
            len_value = len(value)
            comp = length_field - len_value
            if comp > 0:
                default_field = '0'*comp if item['formato'] == 'num' else ' ' * comp
            else:
                default_field = ''
                value = value[:int(item['posicao_fim'])]
            if item['formato'] == 'alfa':
                value_field = value + default_field
            elif item['formato'] == 'num':
                value_field = default_field + value
            tmp_line[item['posicao_inicio']:item['posicao_fim']] = value_field
        del tmp_line[0]
        return ''.join(tmp_line[:240])
    def header_arquivo(self,data):
        rel_path = 'bancos/'+self.banco+'/header_arquivo.json'
        self.file.write(self.parse_json(rel_path,data)+'\n')
    def header_lote(self,data):
        rel_path = 'bancos/'+self.banco+'/header_lote.json'
        self.file.write(self.parse_json(rel_path,data)+'\n')
    def segmento_p(self,data):
        rel_path = 'bancos/'+self.banco+'/segmento_p.json'
        self.file.write(self.parse_json(rel_path,data)+'\n')
    def segmento_q(self,data):
        rel_path = 'bancos/'+self.banco+'/segmento_q.json'
        self.file.write(self.parse_json(rel_path,data)+'\n')
    def segmento_r(self,data):
        rel_path = 'bancos/'+self.banco+'/segmento_r.json'
        self.file.write(self.parse_json(rel_path,data)+'\n')
    def trailer_lote(self,data):
        rel_path = 'bancos/'+self.banco+'/trailer_lote.json'
        self.file.write(self.parse_json(rel_path,data)+'\n')
    def trailer_arquivo(self,data):
        rel_path = 'bancos/'+self.banco+'/trailer_arquivo.json'
        self.file.write(self.parse_json(rel_path,data)+'\n')
    def close(self):
        self.file.flush()
        self.file.seek(0)
        return self.file.read()
Ejemplo n.º 42
0
def string2file(raw_string):
    """The function return a file like object contaiing the given string.
    """
    filelike = StringIO()
    if sys.version_info[0] < 3:  # Python 2
        filelike.write(unicode(raw_string))
    else:  # Python 3
        filelike.write(raw_string)
    filelike.flush()
    filelike.seek(0)
    return filelike
Ejemplo n.º 43
0
class StringFileDescriptor(object):
    """File descriptor that writes to string buffer"""
    def __init__(self, fd):
        self.out = StringIO()
        self.fd = fd
        for a in ['encoding']:
            setattr(self, a, getattr(sys_stdout, a))

    def write(self, *data, **kwargs):
        # io.StringIO requires unicode strings.
        print(unicode(*data), file=self.out, end='')

    def flush(self):
        self.out.flush()
Ejemplo n.º 44
0
    def _make_key_file(self, data):
        if hasattr(data, 'readlines'):
            key_file = data
        else:
            key_file = StringIO()
            key_file.write(data)
            key_file.flush()
            key_file.seek(0)

        my_pkey = paramiko.RSAKey.from_private_key(
            key_file
        )

        return my_pkey
Ejemplo n.º 45
0
def dumpIO(object, **kwds):
    """dill.dump of object to a buffer.
Loads with "dill.temp.loadIO".  Returns the buffer object.

    [1, 2, 3, 4, 5]
    """
    import dill as pickle
    if PY3:
        from io import BytesIO as StringIO
    else:
        from StringIO import StringIO
    file = StringIO()
    pickle.dump(object, file)
    file.flush()
    return file
Ejemplo n.º 46
0
    def from_data_frame(df, table_id, table_type, table_name='',
                        document_name='', document='', unit='',
                        sbtab_version='1.0'):
        '''
        Creates SBtab table object from pandas dataframe.

        Parameters
        ----------
        df : pandas.DataFrame
            Dataframe of the Python library pandas.
        table_id: str
            Mandatory table ID for the SBtab object.
        table_type: str
            Mandatory table type for the SBtab object.
        table_name: str
            Optional table name for the SBtab object.
        document_name: str
            Optional document name for the SBtab object.
        document: str
            Optional document for the SBtab object.
        unit: str
            Optional unit for an SBtab TableType Quantity.
        sbtab_version: str
            Optional SBtab Version.

        Returns: SBtab.SBtabTable
            SBtab table object created from pandas dataframe.
        '''        
        table_string = StringIO()
        csv_writer = csv.writer(table_string, delimiter=',')

        header = [('TableID', table_id),
                  ('TableType', table_type),
                  ('TableName', table_name or table_id),
                  ('DocumentName', document_name),
                  ('Document', document),
                  ('Unit', unit),
                  ('SBtabVersion', sbtab_version)]
        
        header_strings = ['!!SBtab'] + list(map(lambda x: "%s='%s'" % x, header))
        
        csv_writer.writerow([' '.join(header_strings)] + [''] * (df.shape[1]-1))
        csv_writer.writerow(map(lambda s: '!' + s, df.columns))
        csv_writer.writerows([row.tolist() for _, row in df.iterrows()])
        table_string.flush()
        
        return SBtabTable(table_string.getvalue(), 'unnamed_sbtab.tsv')
Ejemplo n.º 47
0
    def table_from_html(self, html):
        soup = BeautifulSoup(html, 'html.parser')
        try:
            html_table = soup.find_all('table')[-1]
        except IndexError:
            raise DataEmptyError

        if '<h2>Anal' in html or 'div_analiza_' in html:
            raise DataIsAnalError

        def _header_row_strings(row):
            return chain.from_iterable(
                repeat(th.get_text(), int(th.get('colspan') or 1))
                for th in html_table.select('thead tr:nth-of-type(%d) th[title]' % row))

        # self.DATETIME_VAR (available when Paradata is enabled in 1ka UI)
        # should match this variable name format
        header = [th1.rstrip(':') + ('' if th3 == th1 else ' ({})').format(th3.rstrip(':'))
                  for th1, th3 in zip(_header_row_strings(1),
                                      _header_row_strings(3))]
        values = [[(# If no span, feature is a number or a text field
                    td.get_text() if td.span is None else
                    # If have span, it's a number, but if negative, replace with NaN
                    '' if td.contents[0].strip().startswith('-') else
                    # Else if span, the number is its code, but we want its value
                    td.span.get_text()[1:-1])
                   for td in tr.select('td')
                   if 'data_uid' not in td.get('class', ())]
                  for tr in html_table.select('tbody tr')]

        # Save parsed values into in-mem file for default values processing
        buffer = StringIO()
        writer = csv.writer(buffer, delimiter='\t')
        writer.writerow(header)
        writer.writerows(values)
        buffer.flush()
        buffer.seek(0)

        data = TabReader(buffer).read()

        title = soup.select('body h2:nth-of-type(1)')[0].get_text().split(': ', maxsplit=1)[-1]
        data.name = title

        return data
Ejemplo n.º 48
0
Archivo: tests.py Proyecto: xigt/freki
    def test_read(self):
        args = namedtuple('args', ('format', 'infile', 'outfile'))
        args.format = 'tetml'
        args.infile = self.tetml_path
        inout = StringIO()
        args.outfile = inout
        args.block = 0.
        args.deindent_blocks = False

        # Run the tetml to freki conversion
        run_freki.run(args)

        # Retrieve the contents of the output
        inout.flush()
        inout.seek(0)
        outstr = inout.read()

        # Compare that against the saved document.
        freki_f = open(self.freki_path, 'r')

        self.assertEqual(freki_f.read(), outstr)
Ejemplo n.º 49
0
class TestPrefilterFrontEnd(PrefilterFrontEnd):
    
    input_prompt_template = string.Template('')
    output_prompt_template = string.Template('')
    banner = ''

    def __init__(self):
        self.out = StringIO()
        PrefilterFrontEnd.__init__(self)
        # Some more code for isolation (yeah, crazy)
        self._on_enter()
        self.out.flush()
        self.out.reset()
        self.out.truncate()

    def write(self, string, *args, **kwargs):
       self.out.write(string) 

    def _on_enter(self):
        self.input_buffer += '\n'
        PrefilterFrontEnd._on_enter(self)
Ejemplo n.º 50
0
    def repl(
        self,
        prompt='lispy> ',
        inport=InPort(sys.stdin),
        out=sys.stdout,
        err=sys.stderr,
        return_value=False,
        catch_exceptions=True
    ):
        "A prompt-read-eval-print loop."
        if out is None:
            out = StringIO()

        if err is None:
            err = StringIO()

        while True:
            try:
                if prompt:
                    sys.stderr.write(prompt)
                x = parse(inport)
                if x is EOF_OBJECT:
                    return
                val = eval(x)
                if val is not None and out and return_value is False:
                    err.write(to_string(val) + "\n")
                    err.flush()
                elif return_value:
                    return val
            except Exception as e:
                if catch_exceptions:
                    exc_type, exc_value, exc_traceback = sys.exc_info()
                    traceback.print_exception(
                        exc_type,
                        exc_value,
                        exc_traceback
                    )
                else:
                    raise e
class RubyDebuggerConnector(DebuggerConnector):
	"""Connector used to communication with debugged process"""
	def __init__(self, debugger):
		super(RubyDebuggerConnector, self).__init__(debugger)
		self.debugger = debugger
		self.process = None
		self.client = None
		self.control_client = None
		self.connected = False
		self.ruby_version = None

	def start(self, current_directory, file_name, *args):
		'''
		Start and attach the process
		'''
		# Vaildate ruby versions and gem version
		if not self.validation_environment():
			return

		# Start the debuggee process
		self.start_process(current_directory, file_name, args)

		# Try to connect to process with sockets
		if not self.connect_debugger():
			return

		# Start read from socket, output, errors
		self.errors_reader = self.start_tread(lambda stream = self.process.stderr: self.output_thread(stream))
		self.outputer = self.start_tread(lambda stream = self.process.stdout: self.output_thread(stream))
		self.reader = self.start_tread(self.reader_thread)

	def validation_environment(self):
		try:
			self.ruby_version = subprocess.Popen(["ruby", PathHelper.get_ruby_version_discoverer()], stdout=subprocess.PIPE).communicate()[0].splitlines()
		except Exception:
			self.log_message("Connection could not start process: "+str(ex)+'\n')
			return False

		self.ruby_version[0] = self.ruby_version[0].decode("UTF-8")
		self.ruby_version[1] = self.ruby_version[1].decode("UTF-8")

		if self.ruby_version[1] == "UNSUPPORTED":
			self.log_message("Ruby version: "+self.ruby_version[0]+" is not supported.")
			return False

		return True

	def start_process(self, current_directory, file_name, args):
		# Initialize params acourding to OS type
		if os.name == "posix":
			# On Unix using exec and shell to get environemnt variables of ruby version
			process_params = "exec ruby '-C"+current_directory+"' '-r"+PathHelper.get_sublime_require()+"' '"+ file_name+"' "
			process_params += " ".join(args)
			self.process = subprocess.Popen(process_params, stdin = subprocess.PIPE, stderr = subprocess.PIPE, stdout=subprocess.PIPE, bufsize=1, shell=True)
		else:
			# On Windows not using shell, so the proces is not visible to the user
			process_params = ["ruby", "-C"+current_directory, "-r"+PathHelper.get_sublime_require(), file_name]
			process_params += args
			self.process = subprocess.Popen(process_params, stdin = subprocess.PIPE, stderr = subprocess.PIPE, stdout=subprocess.PIPE, bufsize=1, shell=False)

	def connect_debugger(self):
		self.data = StringIO()
		self.requests = Queue()
		self.requests.put({"signal":False, "reason":"get_location"})

		self.connected = False
		self.log_message("Connecting... ")
		for i in range(1,9):
			try:
				self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
				self.client.connect(("localhost", 8989))
				self.control_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
				self.control_client.connect(("localhost", 8990))
				self.connected = True
				self.log_message("Connected"+'\n')
				break
			except Exception as ex:
				if i == 8:
					self.log_message("Connection could not be made: "+str(ex)+'\n')
					return False
				else:
					time.sleep(1)

		return True

	def start_tread(self, threads_method):
		thread  = Thread(target=threads_method)
		thread.daemon = True
		thread.start()
		return thread

	def output_thread(self, stream):
		# Always read stream`
		try:
			while True:
				bytes = stream.readline()

				if len(bytes) == 0:
					break

				result = str(bytes, "UTF-8")
				self.log_message(result)
		except Exception:
			pass

	def reader_thread(self):
		# Alwast read stream
		try:
			while True:
				bytes = self.client.recv(4096)

				if len(bytes) == 0:
					break

				result = str(bytes, "UTF-8")
				self.data.write(result)
				self.data.flush()
				# self.log_message(result)

				if self.has_end_stream():
					self.handle_response()

		except Exception as ex:
			if self.connected:
				self.log_message("Debugger exception: "+str(ex)+'\n'+" StackTrace: "+traceback.format_exc())
				self.connected = False

		self.outputer.join()
		self.errors_reader.join()

		# Signal that the process has exited
		self.log_message("Debugger stopped")
		self.debugger.signal_process_ended()

	def handle_response(self):
		results = self.split_by_results()
		next_result = results.pop()

		for result in results:
			if result:
				pass

			file_name, line_number = self.get_current_position()

			# Check wheather position was updated
			if file_name != "" and not PathHelper.is_same_path(PathHelper.get_sublime_require(), file_name):
				self.debugger.signal_position_changed(file_name, line_number)
				# self.log_message("New position: "+file_name+":"+str(line_number))

			try:
				request = self.requests.get_nowait()
				# self.log_message("Pop request: "+str(request)+", current queue size: "+str(self.requests.qsize())+", request result:"+result)

				# Check if should return the result
				if request["signal"]:
					prefix = request.get("prefix")
					data = result.strip()

					if prefix:
						data = (prefix, data)

					# Return result
					self.debugger.signal_text_result(data, request["reason"])
				else:
					pass

				if PathHelper.is_same_path(PathHelper.get_sublime_require(), file_name):
					self.debugger.run_command(DebuggerModel.COMMAND_CONTINUTE)
			except queue.Empty:
				pass

		self.data = StringIO()
		self.data.write(next_result)

	def send_data(self, command, reason):
		self.requests.put({"signal": False, "reason": reason, "command": command})
		self.send_data_internal(command)

	def send_without_outcome(self, command):
		self.send_data_internal(command)

	def send_input(self, command):
		self.process.stdin.write(bytes(command+'\n',"UTF-8"))
		self.process.stdin.flush()

	def send_control_command(self, command):
		if not self.connected:
			return

		try:
			self.control_client.sendall(bytes(command+'\n', 'UTF-8'))
		except Exception as e:
			if self.connected:
				self.log_message("Failed communicate with process ("+command+"): "+str(e))

	def send_data_internal(self, command):
		if not self.connected:
			return

		try:
			self.client.sendall(bytes(command+'\n', 'UTF-8'))
		except Exception as e:
			if self.connected:
				self.log_message("Failed communicate with process ("+command+"): "+str(e))

	def send_for_result(self, command, reason):
		self.requests.put({"signal": True, "reason": reason, "command": command})
		self.send_data_internal(command)

	def send_with_result(self, command, reason, prefix):
		self.requests.put({"signal": True, "prefix": prefix, "reason": reason, "command": command})
		self.send_data_internal(command)

	def split_by_results(self):
		result = [""]
		for line in self.get_lines():
			if self.debugger.match_ending(self.ruby_version[0], line):
				result.insert(len(result), "")
			else:
				result[len(result)-1] += line + '\n'

		return result

	def has_end_stream(self):
		end_of_stream = False
		for line in self.get_lines():
				if self.debugger.match_ending(self.ruby_version[0], line):
					end_of_stream = True;

		return end_of_stream

	def get_current_position(self):
		current_line = -1
		current_file = ""
		end_of_stream = False

		for line in self.get_lines():
			match = self.debugger.match_line_cursor(self.ruby_version[0], line)

			if match:
				current_line = match.groups()[0]

			match = self.debugger.match_file_cursor(self.ruby_version[0], line)
			if match:
				current_file = match.groups()[0]

		return current_file, int(current_line)

	def get_lines(self):
		return self.data.getvalue().split('\n')

	def stop(self):
		self.connected = False
		self.log_message("Stopping...")
		self.send_control_command("kill")
		if self.process:
			self.process.kill()
		self.process = None
class RubyDebuggerConnector(DebuggerConnector):
	"""Connector used to communication with debugged process"""
	def __init__(self, debugger):
		super(RubyDebuggerConnector, self).__init__(debugger)
		self.debugger = debugger
		self.process = None
		self.client = None
		self.control_client = None
		self.connected = False
		self.ruby_version = None
		self.ruby_protocol_type = None
		self.errors_reader = None
		self.outputer = None
		self.reader = None

	def set_settings(self, use_bundler, ruby_binaries, ruby_supported_versions, ruby_arguments, debug_logs):
		self.settings_use_bundler = use_bundler
		self.settings_ruby_binaries = ruby_binaries
		self.settings_ruby_supported_versions = ruby_supported_versions
		self.settings_ruby_arguments = ruby_arguments
		self.settings_debug_logs = debug_logs

	def start(self, current_directory, file_name, *args):
		'''
		Start and attach the process
		'''
		# Vaildate ruby versions and gem version
		if not self.validation_environment():
			return

		# Start the debuggee process
		self.start_process(current_directory, file_name, args)

		# Start read from socket, output, errors
		self.errors_reader = self.start_tread(lambda stream = self.process.stderr: self.output_thread(stream))
		self.outputer = self.start_tread(lambda stream = self.process.stdout: self.output_thread(stream))

		# Try to connect to process with sockets
		if not self.connect_debugger():
			return

		self.reader = self.start_tread(self.reader_thread)

	def validation_environment(self):
		try:
			if os.name == "posix":
				# Fixing permissions
				subprocess.Popen("bash -c \"chmod +x '" + PathHelper.get_ruby_executor() + "'\"", stdin = subprocess.PIPE, stderr = subprocess.PIPE, stdout=subprocess.PIPE, bufsize=1, shell=True).communicate()[0]

				# On Unix using rvm and bash
				ruby_binaries = "'" + self.settings_ruby_binaries+"'"

				# On Unix using exec and shell to get environemnt variables of ruby version
				process_command = "'"+PathHelper.get_ruby_executor()+"' " + ruby_binaries + " False '" + PathHelper.get_ruby_version_discoverer() + "'"
				process_params = ["bash", "-c", "\""+process_command+"\""]
				self.ruby_version = subprocess.Popen(" ".join(process_params), stdin = subprocess.PIPE, stderr = subprocess.PIPE, stdout=subprocess.PIPE, bufsize=1, shell=True).communicate()[0]
			else:
				# On Windows not using shell, so the proces is not visible to the user
				startupinfo = subprocess.STARTUPINFO()
				startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
				process_params = ["ruby", PathHelper.get_ruby_version_discoverer()]
				self.ruby_version = subprocess.Popen(process_params, stdout=subprocess.PIPE, startupinfo=startupinfo).communicate()[0]
		except Exception as ex:
			self.log_message("Could not start process: "+str(ex)+'\n')
			return False

		self.ruby_version = self.ruby_version.decode("UTF-8").replace("\n", "").replace("\r", "")

		if self.ruby_version not in self.settings_ruby_supported_versions:
			self.log_message("Ruby version: "+self.ruby_version+" is not supported.")
			return False

		if self.ruby_version == "1.9.3":
			self.ruby_protocol_type = "debugger"
		else:
			self.ruby_protocol_type = "byebug"

		return True

	def start_process(self, current_directory, file_name, args):
		settings = sublime.load_settings('Ruby Debugger.sublime-settings')
		requires = " '-r"+PathHelper.get_sublime_require()+"'"
		directory = " '-C"+current_directory+"'"
		program = " '"+file_name+"' "+" ".join(args)

		# Case of running rails
		if self.settings_use_bundler:
				requires = requires + " '-rbundler/setup'"
				directory = " '-C"+sublime.active_window().folders()[0]+"'"

		# Initialize params acourding to OS type
		if os.name == "posix":
			ruby_binaries = "'"+self.settings_ruby_binaries+"'"
			debug_logs_enabled = str(self.settings_debug_logs)
			ruby_arguments = directory + requires + " " + self.settings_ruby_arguments+ " " +program

			# On Unix using exec and shell to get environemnt variables of ruby version
			process_command = "'"+PathHelper.get_ruby_executor()+"' " + ruby_binaries + " " + debug_logs_enabled + " " + ruby_arguments
			process_params = ["bash", "-c", "\""+process_command+"\""]
			self.process = subprocess.Popen(" ".join(process_params), stdin = subprocess.PIPE, stderr = subprocess.PIPE, stdout=subprocess.PIPE, bufsize=1, shell=True, cwd=sublime.active_window().folders()[0])

			if self.is_debug():
					self.log_message("Started process command: " + " ".join(process_params) )
		else:
			# On Windows not using shell, so the proces is not visible to the user
			startupinfo = subprocess.STARTUPINFO()
			startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
			process_params = ["ruby", "-C"+current_directory, "-r"+PathHelper.get_sublime_require(), file_name]
			process_params += args
			self.process = subprocess.Popen(process_params, stdin = subprocess.PIPE, stderr = subprocess.PIPE, stdout=subprocess.PIPE, bufsize=1, startupinfo=startupinfo)

			if self.is_debug():
				self.log_message("Started process command: " + " ".join(process_params) )

	def connect_debugger(self):
		self.data = StringIO()
		self.requests = Queue()
		self.requests.put({"signal":False, "reason":"get_location"})

		self.connected = False
		self.log_message("Connecting... ")
		for i in range(1,9):
			try:
				self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
				self.client.connect(("localhost", 8989))
				self.control_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
				self.control_client.connect(("localhost", 8990))
				self.connected = True
				self.log_message("Connected"+'\n')
				break
			except Exception as ex:
				if i == 9:
					self.log_message("Connection could not be made: "+str(ex)+'\n')
					return False
				else:
					time.sleep(1)

		return True

	def start_tread(self, threads_method):
		thread  = Thread(target=threads_method)
		thread.daemon = True
		thread.start()
		return thread

	def output_thread(self, stream):
		# Always read stream`
		try:
			while True:
				bytes = stream.readline()

				if len(bytes) == 0:
					break

				result = bytes.decode("UTF-8")
				self.log_message(result)
		except Exception:
			pass

	def reader_thread(self):
		# Alwast read stream
		try:
			while True:
				bytes = self.client.recv(4096)

				if len(bytes) == 0:
					break

				result = bytes.decode("UTF-8")
				self.data.write(result)
				self.data.flush()

				if self.has_end_stream():
					self.handle_response()

		except Exception as ex:
			if self.connected:
				self.log_message("Debugger exception: "+str(ex)+'\n'+" StackTrace: "+traceback.format_exc())
				self.connected = False

		self.outputer.join()
		self.errors_reader.join()

		# Signal that the process has exited
		self.log_message("Debugger stopped")
		self.debugger.signal_process_ended()

	def handle_response(self):
		results = self.split_by_results()
		next_result = results.pop()

		for result in results:
			if result:
				pass

			file_name, line_number = self.get_current_position()

			# Check wheather position was updated
			if file_name != "" and not PathHelper.is_same_path(PathHelper.get_sublime_require(), file_name) and not "kernel_require.rb" in file_name:
				self.debugger.signal_position_changed(file_name, line_number)
				# self.log_message("New position: "+file_name+":"+str(line_number))

			try:
				request = self.requests.get_nowait()
				# self.log_message("Pop request: "+str(request)+", current queue size: "+str(self.requests.qsize())+", request result:"+result)

				# Check if should return the result
				if request["signal"]:
					prefix = request.get("prefix")
					data = result.strip()

					if prefix:
						data = (prefix, data)

					# Return result
					self.debugger.signal_text_result(data, request["reason"])
				else:
					pass

				if PathHelper.is_same_path(PathHelper.get_sublime_require(), file_name) or "kernel_require.rb" in file_name:
					self.debugger.run_command(DebuggerModel.COMMAND_STEP_OVER)
			except Empty:
				pass

		self.data = StringIO()
		self.data.write(next_result)

	def send_data(self, command, reason):
		self.requests.put({"signal": False, "reason": reason, "command": command})
		self.send_data_internal(command)

	def send_without_outcome(self, command):
		self.send_data_internal(command)

	def send_input(self, command):
		self.process.stdin.write(bytearray(command+'\n', "UTF-8"))
		self.process.stdin.flush()

	def send_control_command(self, command):
		if not self.connected:
			pass

		try:
			self.control_client.sendall(bytearray(command+'\n', "UTF-8"))
		except Exception as e:
			if self.connected:
				self.log_message("Failed communicate with process ("+command+"): "+str(e))

	def send_data_internal(self, command):
		if not self.connected:
			return

		try:
			self.client.sendall(bytearray(command+'\n', "UTF-8"))
		except Exception as e:
			if self.connected:
				self.log_message("Failed communicate with process ("+command+"): "+str(e))

	def send_for_result(self, command, reason):
		self.requests.put({"signal": True, "reason": reason, "command": command})
		self.send_data_internal(command)

	def send_with_result(self, command, reason, prefix):
		self.requests.put({"signal": True, "prefix": prefix, "reason": reason, "command": command})
		self.send_data_internal(command)

	def split_by_results(self):
		result = [""]
		for line in self.get_lines():
			if self.debugger.match_ending(self.ruby_protocol_type, line):
				result.insert(len(result), "")
			else:
				result[len(result)-1] += line + '\n'

		return result

	def has_end_stream(self):
		end_of_stream = False
		for line in self.get_lines():
			if self.debugger.match_ending(self.ruby_protocol_type, line):
				end_of_stream = True;

		return end_of_stream

	def get_current_position(self):
		current_line = -1
		current_file = ""
		end_of_stream = False

		for line in self.get_lines():
			match = self.debugger.match_line_cursor(self.ruby_protocol_type, line)

			if match:
				current_line = match.groups()[0]

			match = self.debugger.match_file_cursor(self.ruby_protocol_type, line)
			if match:
				current_file = match.groups()[0]

		return current_file, int(current_line)

	def get_lines(self):
		return self.data.getvalue().split('\n')

	def stop(self):
		self.log_message("Stopping...")
		self.send_control_command("kill")
		if self.process:
			self.process.kill()

		self.connected = False
		self.process = None

	def is_debug(self):
		settings = sublime.load_settings('Ruby Debugger.sublime-settings')
		return self.settings_debug_logs
Ejemplo n.º 53
0
class BuilderClient(object):
    def __init__(self, client, follow_build_steps=False, logger=None):
        self.client = client
        self.follow_build_steps = follow_build_steps
        self.tmp_containers = {}
        self.tmp_images = {}
        self.image = None
        self.maintainer = None
        self.tag = None
        self.need_commit = False
        self.config = {}

        if logger:
            self.logger = logger
        else:
            self.logger = logging.getLogger(__name__)
            logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
                        level='INFO')
            self.logs = StringIO()
            self.logger.addHandler(logging.StreamHandler(self.logs))

    def done(self):
        if self.image is None:
            # The build is unsuccessful, remove temporary containers and images
            self.client.remove_container(*self.tmp_containers)
            self.client.remove_image(*self.tmp_images)

        res = ''
        try:
            self.logs.flush()
            res = self.logs.getvalue()
            #self.logs.close()
        except AttributeError:
            pass

        return res

    def build(self, dockerfile, tag=None):
        if tag:
            self.tag = tag
        for line in dockerfile:
            line = line.strip().replace("\t", " ", 1)
            if len(line) == 0 or line[0] == '#':
                continue
            instr, sep, args = line.partition(' ')
            if sep == '':
                self.logger.error('Invalid Dockerfile format: "{0}"'.format(line))
                return
            args = args.strip()
            self.logger.info('{0} {1} ({2})'.format(instr.upper(), args,
                self.image))
            try:
                method = getattr(self, 'cmd_{0}'.format(instr.lower()))
                try:
                    method(args)
                except Exception as e:
                    self.logger.exception(str(e))
                    return
            except Exception as e:
                self.logger.warning("Skipping unknown instruction {0}".
                    format(instr.upper()))
            self.logger.info('===> {0}'.format(self.image))
        if self.need_commit:
            try:
                self.commit()
            except Exception as e:
                self.logger.exception(str(e))
                return
        if self.image is not None:
            self.logger.info("Build finished, image id: {0}".format(self.image))
            return self.image
        self.logger.error("An error has occured during the build.")
        return

    def commit(self, id=None):
        if self.image is None:
            raise Exception("Please provide a source image with `from` prior to"
                "run")
        self.config['Image'] = self.image
        if id is None:
            cmd = self.config['Cmd']
            self.config['Cmd'] = ['true']
            id = self.run()
            self.config['Cmd'] = cmd

        res = self.client.commit(id, author=self.maintainer,
            repository=self.tag)
        if 'Id' not in res:
            raise Exception('No ID returned by commit operation: {0}'.format(res))

        self.tmp_images[res['Id']] = {}
        self.image = res['Id']
        self.need_commit = False

    def run(self):
        if self.image is None:
            raise Exception("Please provide a source image with `from` prior to"
                "run")
        self.config['Image'] = self.image
        container = self.client.create_container_from_config(self.config)
        if container.get('Warnings', None):
            for warning in container['Warnings']:
                self.logger.warning(warning)
        self.client.start(container['Id'])
        if self.follow_build_steps:
            for log_line in self.client.attach(container['Id']):
                self.logger.info(" {0}".format(log_line))
        self.tmp_containers[container['Id']] = {}
        status = self.client.wait(container['Id'])
        if status != 0:
            raise Exception("The command `{0}` returned a non-zero status: {1}".
                format(self.config['Cmd'], status))
        return container['Id']

    def merge_config(self, a, b):
        if not a.get('Hostname'):
            a['Hostname'] = b.get('Hostname')
        if not a.get('User'):
            a['User'] = b.get('User')
        if not a.get('Memory'):
            a['Memory'] = b.get('Memory')
        if not a.get('MemorySwap'):
            a['MemorySwap'] = b.get('MemorySwap')
        if not a.get('CpuShares'):
            a['CpuShares'] = b.get('CpuShares')
        if not a.get('PortSpecs') or len(a.get('PortSpecs')) == 0:
            a['PortSpecs'] = b.get('PortSpecs')

        a['Tty'] = a.get('Tty') or b.get('Tty')
        a['OpenStdin'] = a.get('OpenStdin') or b.get('OpenStdin')
        a['StdinOnce'] = a.get('StdinOnce') or b.get('StdinOnce')

        if not a.get('Env') or len(a.get('Env')) == 0:
            a['Env'] = b.get('Env')
        if not a.get('Cmd') or len(a.get('Cmd')) == 0:
            a['Cmd'] = b.get('Cmd')
        if not a.get('Dns') or len(a.get('Dns')) == 0:
            a['Dns'] = b.get('Dns')

    def cmd_from(self, name):
        img = None
        try:
            img = self.client.inspect_image(name)
        except:
            self.client.pull(name)
            img = self.client.inspect_image(name)

        self.image = img['id']
        self.logger.debug("Using image {0}".format(self.image))

    def cmd_run(self, args):
        if self.image is None:
            raise Exception("Please provide a source image with `from` prior to"
                "run")
        config = self.client._container_config(self.image,
            ['/bin/sh', '-c', args])
        cmd = self.config.get('Cmd', None)
        env = self.config.get('Env', None)

        self.config['Cmd'] = None
        self.merge_config(self.config, config)
        container = self.run()

        self.config['Cmd'] = cmd
        self.config['Env'] = env
        self.commit(container)

    def cmd_maintainer(self, name):
        self.need_commit = True
        self.maintainer = name

    def cmd_env(self, args):
        self.need_commit = True
        try:
            k, v = args.split(None, 1)
            if 'Env' not in self.config:
                self.config['Env'] = { k: v }
                return
            env = self.config['Env']
            for line in env:
                if line.startswith(k):
                    env[env.index(line)] = '{0}={1}'.format(k, v)
                    return
            env.extend('{0}={1}'.format(k, v))
        except ValueError:
            raise Exception("Invalid ENV format")

    def cmd_cmd(self, args):
        self.need_commit = True
        try:
            self.config['Cmd'] = json.loads(args)
        except:
            self.logger.debug("Error decoding json, using /bin/sh -c")
            self.config['Cmd'] = ['/bin/sh', '-c', args]

    def cmd_expose(self, args):
        ports = args.split()
        if 'PortSpecs' not in self.config or self.config['PortSpecs'] is None:
            self.config['PortSpecs'] = ports
            return
        self.config['PortSpecs'].append(ports)

    def cmd_insert(self, args):
        raise NotImplementedError("INSERT is deprecated, please use ADD instead")

    def cmd_add(self, args):
        src, dst = args.split()
        if not (src.startswith('http://') or src.startswith('https://')):
            raise NotImplementedError("Contextual build is not supported")
        output = self.client.insert(self.image, src, dst)
        srch = r'\{"Id":"(.*)"}'
        match = re.search(srch, output)
        if not match:
            raise Exception("ADD failed to retrieve the new image ID in API"
                "output")
        self.image = match.group(1)
        if self.image == "":
            raise Exception("ADD failed to retrieve the new image ID in API"
                "output")
Ejemplo n.º 54
0
class Sphinx(object):

    def __init__(self, srcdir, confdir, outdir, doctreedir, buildername,
                 confoverrides=None, status=sys.stdout, warning=sys.stderr,
                 freshenv=False, warningiserror=False, tags=None):
        self.next_listener_id = 0
        self._extensions = {}
        self._listeners = {}
        self.domains = BUILTIN_DOMAINS.copy()
        self.builderclasses = BUILTIN_BUILDERS.copy()
        self.builder = None
        self.env = None

        self.srcdir = srcdir
        self.confdir = confdir
        self.outdir = outdir
        self.doctreedir = doctreedir

        if status is None:
            self._status = StringIO()
            self.quiet = True
        else:
            self._status = status
            self.quiet = False

        if warning is None:
            self._warning = StringIO()
        else:
            self._warning = warning
        self._warncount = 0
        self.warningiserror = warningiserror

        self._events = events.copy()

        # say hello to the world
        self.info(bold('Running Sphinx v%s' % sphinx.__version__))

        # status code for command-line application
        self.statuscode = 0

        # read config
        self.tags = Tags(tags)
        self.config = Config(confdir, CONFIG_FILENAME,
                             confoverrides or {}, self.tags)
        self.config.check_unicode(self.warn)

        # set confdir to srcdir if -C given (!= no confdir); a few pieces
        # of code expect a confdir to be set
        if self.confdir is None:
            self.confdir = self.srcdir

        # backwards compatibility: activate old C markup
        self.setup_extension('sphinx.ext.oldcmarkup')
        # load all user-given extension modules
        for extension in self.config.extensions:
            self.setup_extension(extension)
        # the config file itself can be an extension
        if self.config.setup:
            self.config.setup(self)

        # now that we know all config values, collect them from conf.py
        self.config.init_values()

        # check the Sphinx version if requested
        if self.config.needs_sphinx and \
           self.config.needs_sphinx > sphinx.__version__[:3]:
            raise VersionRequirementError(
                'This project needs at least Sphinx v%s and therefore cannot '
                'be built with this version.' % self.config.needs_sphinx)

        # set up translation infrastructure
        self._init_i18n()
        # set up the build environment
        self._init_env(freshenv)
        # set up the builder
        self._init_builder(buildername)

    def _init_i18n(self):
        """Load translated strings from the configured localedirs if enabled in
        the configuration.
        """
        if self.config.language is not None:
            self.info(bold('loading translations [%s]... ' %
                           self.config.language), nonl=True)
            locale_dirs = [None, path.join(package_dir, 'locale')] + \
                [path.join(self.srcdir, x) for x in self.config.locale_dirs]
        else:
            locale_dirs = []
        self.translator, has_translation = locale.init(locale_dirs,
                                                       self.config.language)
        if self.config.language is not None:
            if has_translation:
                self.info('done')
            else:
                self.info('locale not available')

    def _init_env(self, freshenv):
        if freshenv:
            self.env = BuildEnvironment(self.srcdir, self.doctreedir,
                                        self.config)
            self.env.find_files(self.config)
            for domain in list(self.domains.keys()):
                self.env.domains[domain] = self.domains[domain](self.env)
        else:
            try:
                self.info(bold('loading pickled environment... '), nonl=True)
                self.env = BuildEnvironment.frompickle(self.config,
                    path.join(self.doctreedir, ENV_PICKLE_FILENAME))
                self.env.domains = {}
                for domain in list(self.domains.keys()):
                    # this can raise if the data version doesn't fit
                    self.env.domains[domain] = self.domains[domain](self.env)
                self.info('done')
            except Exception as err:
                if type(err) is IOError and err.errno == ENOENT:
                    self.info('not yet created')
                else:
                    self.info('failed: %s' % err)
                return self._init_env(freshenv=True)

        self.env.set_warnfunc(self.warn)

    def _init_builder(self, buildername):
        if buildername is None:
            print('No builder selected, using default: html', file=self._status)
            buildername = 'html'
        if buildername not in self.builderclasses:
            raise SphinxError('Builder name %s not registered' % buildername)

        builderclass = self.builderclasses[buildername]
        if isinstance(builderclass, tuple):
            # builtin builder
            mod, cls = builderclass
            builderclass = getattr(
                __import__('sphinx.builders.' + mod, None, None, [cls]), cls)
        self.builder = builderclass(self)
        self.emit('builder-inited')

    def build(self, force_all=False, filenames=None):
        try:
            if force_all:
                self.builder.build_all()
            elif filenames:
                self.builder.build_specific(filenames)
            else:
                self.builder.build_update()
        except Exception as err:
            self.emit('build-finished', err)
            raise
        else:
            self.emit('build-finished', None)
        self.builder.cleanup()

    def warn(self, message, location=None, prefix='WARNING: '):
        if isinstance(location, tuple):
            docname, lineno = location
            if docname:
                location = '%s:%s' % (self.env.doc2path(docname), lineno or '')
            else:
                location = None
        warntext = location and '%s: %s%s\n' % (location, prefix, message) or \
                   '%s%s\n' % (prefix, message)
        if self.warningiserror:
            raise SphinxWarning(warntext)
        self._warncount += 1
        try:
            self._warning.write(warntext)
        except UnicodeEncodeError:
            encoding = getattr(self._warning, 'encoding', 'ascii') or 'ascii'
            self._warning.write(warntext.encode(encoding, 'replace'))

    def info(self, message='', nonl=False):
        try:
            self._status.write(message)
        except UnicodeEncodeError:
            encoding = getattr(self._status, 'encoding', 'ascii') or 'ascii'
            self._status.write(message.encode(encoding, 'replace'))
        if not nonl:
            self._status.write('\n')
        self._status.flush()

    # general extensibility interface

    def setup_extension(self, extension):
        """Import and setup a Sphinx extension module. No-op if called twice."""
        if extension in self._extensions:
            return
        try:
            mod = __import__(extension, None, None, ['setup'])
        except ImportError as err:
            raise ExtensionError('Could not import extension %s' % extension,
                                 err)
        if not hasattr(mod, 'setup'):
            self.warn('extension %r has no setup() function; is it really '
                      'a Sphinx extension module?' % extension)
        else:
            try:
                mod.setup(self)
            except VersionRequirementError as err:
                # add the extension name to the version required
                raise VersionRequirementError(
                    'The %s extension used by this project needs at least '
                    'Sphinx v%s; it therefore cannot be built with this '
                    'version.' % (extension, err))
        self._extensions[extension] = mod

    def require_sphinx(self, version):
        # check the Sphinx version if requested
        if version > sphinx.__version__[:3]:
            raise VersionRequirementError(version)

    def import_object(self, objname, source=None):
        """Import an object from a 'module.name' string."""
        try:
            module, name = objname.rsplit('.', 1)
        except ValueError as err:
            raise ExtensionError('Invalid full object name %s' % objname +
                                 (source and ' (needed for %s)' % source or ''),
                                 err)
        try:
            return getattr(__import__(module, None, None, [name]), name)
        except ImportError as err:
            raise ExtensionError('Could not import %s' % module +
                                 (source and ' (needed for %s)' % source or ''),
                                 err)
        except AttributeError as err:
            raise ExtensionError('Could not find %s' % objname +
                                 (source and ' (needed for %s)' % source or ''),
                                 err)

    # event interface

    def _validate_event(self, event):
        event = sys.intern(event)
        if event not in self._events:
            raise ExtensionError('Unknown event name: %s' % event)

    def connect(self, event, callback):
        self._validate_event(event)
        listener_id = self.next_listener_id
        if event not in self._listeners:
            self._listeners[event] = {listener_id: callback}
        else:
            self._listeners[event][listener_id] = callback
        self.next_listener_id += 1
        return listener_id

    def disconnect(self, listener_id):
        for event in self._listeners.values():
            event.pop(listener_id, None)

    def emit(self, event, *args):
        results = []
        if event in self._listeners:
            for _, callback in self._listeners[event].items():
                results.append(callback(self, *args))
        return results

    def emit_firstresult(self, event, *args):
        for result in self.emit(event, *args):
            if result is not None:
                return result
        return None

    # registering addon parts

    def add_builder(self, builder):
        if not hasattr(builder, 'name'):
            raise ExtensionError('Builder class %s has no "name" attribute'
                                 % builder)
        if builder.name in self.builderclasses:
            if isinstance(self.builderclasses[builder.name], tuple):
                raise ExtensionError('Builder %r is a builtin builder' %
                                     builder.name)
            else:
                raise ExtensionError(
                    'Builder %r already exists (in module %s)' % (
                    builder.name, self.builderclasses[builder.name].__module__))
        self.builderclasses[builder.name] = builder

    def add_config_value(self, name, default, rebuild):
        if name in self.config.values:
            raise ExtensionError('Config value %r already present' % name)
        if rebuild in (False, True):
            rebuild = rebuild and 'env' or ''
        self.config.values[name] = (default, rebuild)

    def add_event(self, name):
        if name in self._events:
            raise ExtensionError('Event %r already present' % name)
        self._events[name] = ''

    def add_node(self, node, **kwds):
        nodes._add_node_class_names([node.__name__])
        for key, val in kwds.items():
            try:
                visit, depart = val
            except ValueError:
                raise ExtensionError('Value for key %r must be a '
                                     '(visit, depart) function tuple' % key)
            if key == 'html':
                from sphinx.writers.html import HTMLTranslator as translator
            elif key == 'latex':
                from sphinx.writers.latex import LaTeXTranslator as translator
            elif key == 'text':
                from sphinx.writers.text import TextTranslator as translator
            elif key == 'man':
                from sphinx.writers.manpage import ManualPageTranslator \
                    as translator
            elif key == 'texinfo':
                from sphinx.writers.texinfo import TexinfoTranslator \
                    as translator
            else:
                # ignore invalid keys for compatibility
                continue
            setattr(translator, 'visit_'+node.__name__, visit)
            if depart:
                setattr(translator, 'depart_'+node.__name__, depart)

    def _directive_helper(self, obj, content=None, arguments=None, **options):
        if isinstance(obj, (types.FunctionType, types.MethodType)):
            obj.content = content
            obj.arguments = arguments or (0, 0, False)
            obj.options = options
            return convert_directive_function(obj)
        else:
            if content or arguments or options:
                raise ExtensionError('when adding directive classes, no '
                                     'additional arguments may be given')
            return obj

    def add_directive(self, name, obj, content=None, arguments=None, **options):
        directives.register_directive(
            name, self._directive_helper(obj, content, arguments, **options))

    def add_role(self, name, role):
        roles.register_local_role(name, role)

    def add_generic_role(self, name, nodeclass):
        # don't use roles.register_generic_role because it uses
        # register_canonical_role
        role = roles.GenericRole(name, nodeclass)
        roles.register_local_role(name, role)

    def add_domain(self, domain):
        if domain.name in self.domains:
            raise ExtensionError('domain %s already registered' % domain.name)
        self.domains[domain.name] = domain

    def override_domain(self, domain):
        if domain.name not in self.domains:
            raise ExtensionError('domain %s not yet registered' % domain.name)
        if not issubclass(domain, self.domains[domain.name]):
            raise ExtensionError('new domain not a subclass of registered '
                                 'domain' % domain.name)
        self.domains[domain.name] = domain

    def add_directive_to_domain(self, domain, name, obj,
                                content=None, arguments=None, **options):
        if domain not in self.domains:
            raise ExtensionError('domain %s not yet registered' % domain)
        self.domains[domain].directives[name] = \
            self._directive_helper(obj, content, arguments, **options)

    def add_role_to_domain(self, domain, name, role):
        if domain not in self.domains:
            raise ExtensionError('domain %s not yet registered' % domain)
        self.domains[domain].roles[name] = role

    def add_index_to_domain(self, domain, index):
        if domain not in self.domains:
            raise ExtensionError('domain %s not yet registered' % domain)
        self.domains[domain].indices.append(index)

    def add_object_type(self, directivename, rolename, indextemplate='',
                        parse_node=None, ref_nodeclass=None, objname='',
                        doc_field_types=[]):
        StandardDomain.object_types[directivename] = \
            ObjType(objname or directivename, rolename)
        # create a subclass of GenericObject as the new directive
        new_directive = type(directivename, (GenericObject, object),
                             {'indextemplate': indextemplate,
                              'parse_node': staticmethod(parse_node),
                              'doc_field_types': doc_field_types})
        StandardDomain.directives[directivename] = new_directive
        # XXX support more options?
        StandardDomain.roles[rolename] = XRefRole(innernodeclass=ref_nodeclass)

    # backwards compatible alias
    add_description_unit = add_object_type

    def add_crossref_type(self, directivename, rolename, indextemplate='',
                          ref_nodeclass=None, objname=''):
        StandardDomain.object_types[directivename] = \
            ObjType(objname or directivename, rolename)
        # create a subclass of Target as the new directive
        new_directive = type(directivename, (Target, object),
                             {'indextemplate': indextemplate})
        StandardDomain.directives[directivename] = new_directive
        # XXX support more options?
        StandardDomain.roles[rolename] = XRefRole(innernodeclass=ref_nodeclass)

    def add_transform(self, transform):
        SphinxStandaloneReader.transforms.append(transform)

    def add_javascript(self, filename):
        from sphinx.builders.html import StandaloneHTMLBuilder
        if '://' in filename:
            StandaloneHTMLBuilder.script_files.append(filename)
        else:
            StandaloneHTMLBuilder.script_files.append(
                posixpath.join('_static', filename))

    def add_stylesheet(self, filename):
        from sphinx.builders.html import StandaloneHTMLBuilder
        if '://' in filename:
            StandaloneHTMLBuilder.css_files.append(filename)
        else:
            StandaloneHTMLBuilder.css_files.append(
                posixpath.join('_static', filename))

    def add_lexer(self, alias, lexer):
        from sphinx.highlighting import lexers
        if lexers is None:
            return
        lexers[alias] = lexer

    def add_autodocumenter(self, cls):
        from sphinx.ext import autodoc
        autodoc.add_documenter(cls)
        self.add_directive('auto' + cls.objtype, autodoc.AutoDirective)

    def add_autodoc_attrgetter(self, type, getter):
        from sphinx.ext import autodoc
        autodoc.AutoDirective._special_attrgetters[type] = getter

    def add_search_language(self, cls):
        from sphinx.search import languages, SearchLanguage
        assert isinstance(cls, SearchLanguage)
        languages[cls.lang] = cls
Ejemplo n.º 55
0
class Protocol(object):

    """
    This is the base class for all protocols; it defines the common portions
    of the API.

    The goal of all protocol classes is to provide an interface that
    is unified across protocols, such that the adapters may be used
    interchangeably without changing any other code.

    In order to achieve this, the main challenge are the differences
    arising from the authentication methods that are used.
    The reason is that many devices may support the following variety
    authentication/authorization methods:

        1. Protocol level authentication, such as SSH's built-in
           authentication.

                - p1: password only
                - p2: username
                - p3: username + password
                - p4: username + key
                - p5: username + key + password

        2. App level authentication, such that the authentication may
           happen long after a connection is already accepted.
           This type of authentication is normally used in combination with
           Telnet, but some SSH hosts also do this (users have reported
           devices from Enterasys). These devices may also combine
           protocol-level authentication with app-level authentication.
           The following types of app-level authentication exist:

                - a1: password only
                - a2: username
                - a3: username + password

        3. App level authorization: In order to implement the AAA protocol,
           some devices ask for two separate app-level logins, whereas the
           first serves to authenticate the user, and the second serves to
           authorize him.
           App-level authorization may support the same methods as app-level
           authentication:

                - A1: password only
                - A2: username
                - A3: username + password

    We are assuming that the following methods are used:

        - Telnet:

          - p1 - p5: never
          - a1 - a3: optional
          - A1 - A3: optional

        - SSH:

          - p1 - p5: optional
          - a1 - a3: optional
          - A1 - A3: optional

    To achieve authentication method compatibility across different
    protocols, we must hide all this complexity behind one single API
    call, and figure out which ones are supported.

    As a use-case, our goal is that the following code will always work,
    regardless of which combination of authentication methods a device
    supports::

        key = PrivateKey.from_file('~/.ssh/id_rsa', 'my_key_password')

        # The user account to use for protocol level authentication.
        # The key defaults to None, in which case key authentication is
        # not attempted.
        account = Account(name     = 'myuser',
                          password = '******',
                          key      = key)

        # The account to use for app-level authentication.
        # password2 defaults to password.
        app_account = Account(name      = 'myuser',
                              password  = '******',
                              password2 = 'my_app_password2')

        # app_account defaults to account.
        conn.login(account, app_account = None, flush = True)

    Another important consideration is that once the login is complete, the
    device must be in a clearly defined state, i.e. we need to
    have processed the data that was retrieved from the connected host.

    More precisely, the buffer that contains the incoming data must be in
    a state such that the following call to expect_prompt() will either
    always work, or always fail.

    We hide the following methods behind the login() call::

        # Protocol level authentication.
        conn.protocol_authenticate(...)
        # App-level authentication.
        conn.app_authenticate(...)
        # App-level authorization.
        conn.app_authorize(...)

    The code produces the following result::

        Telnet:
            conn.protocol_authenticate -> NOP
            conn.app_authenticate
                -> waits for username or password prompt, authenticates,
                   returns after a CLI prompt was seen.
            conn.app_authorize
                -> calls driver.enable(), waits for username or password
                   prompt, authorizes, returns after a CLI prompt was seen.

        SSH:
            conn.protocol_authenticate -> authenticates using user/key/password
            conn.app_authenticate -> like Telnet
            conn.app_authorize -> like Telnet

    We can see the following:

        - protocol_authenticate() must not wait for a prompt, because else
          app_authenticate() has no way of knowing whether an app-level
          login is even necessary.

        - app_authenticate() must check the buffer first, to see if
          authentication has already succeeded. In the case that
          app_authenticate() is not necessary (i.e. the buffer contains a
          CLI prompt), it just returns.

          app_authenticate() must NOT eat the prompt from the buffer, because
          else the result may be inconsistent with devices that do not do
          any authentication; i.e., when app_authenticate() is not called.

        - Since the prompt must still be contained in the buffer,
          conn.driver.app_authorize() needs to eat it before it sends the
          command for starting the authorization procedure.

          This has a drawback - if a user attempts to call app_authorize()
          at a time where there is no prompt in the buffer, it would fail.
          So we need to eat the prompt only in cases where we know that
          auto_app_authorize() will attempt to execute a command. Hence
          the driver requires the Driver.supports_auto_authorize() method.

          However, app_authorize() must not eat the CLI prompt that follows.

        - Once all logins are processed, it makes sense to eat the prompt
          depending on the wait parameter. Wait should default to True,
          because it's better that the connection stalls waiting forever,
          than to risk that an error is not immediately discovered due to
          timing issues (this is a race condition that I'm not going to
          detail here).
    """

    def __init__(self,
                 driver=None,
                 stdout=None,
                 stderr=None,
                 debug=0,
                 connect_timeout=30,
                 timeout=30,
                 logfile=None,
                 termtype='dumb',
                 verify_fingerprint=True,
                 account_factory=None,
                 banner_timeout=20,
                 encoding='latin-1'):
        """
        Constructor.
        The following events are provided:

          - data_received_event: A packet was received from the connected host.
          - otp_requested_event: The connected host requested a
            one-time-password to be entered.

        :keyword driver: Driver()|str
        :keyword stdout: Where to write the device response. Defaults to
            an in-memory buffer.
        :keyword stderr: Where to write debug info. Defaults to stderr.
        :keyword debug: An integer between 0 (no debugging) and 5 (very
            verbose debugging) that specifies the amount of debug info
            sent to the terminal. The default value is 0.
        :keyword connect_timeout: Timeout for the initial TCP connection attempt
        :keyword timeout: See set_timeout(). The default value is 30.
        :keyword logfile: A file into which a log of the conversation with the
            device is dumped.
        :keyword termtype: The terminal type to request from the remote host,
            e.g. 'vt100'.
        :keyword verify_fingerprint: Whether to verify the host's fingerprint.
        :keyword account_factory: A function that produces a new :class:`Account`.
        :type banner_timeout: bool
        :keyword banner_timeout: The time to wait for the banner.
        :type encoding: str
        :keyword encoding: The encoding of data received from the remote host.
        """
        self.data_received_event = Event()
        self.otp_requested_event = Event()
        self.os_guesser = OsGuesser()
        self.auto_driver = driver_map[self.guess_os()]
        self.proto_authenticated = False
        self.app_authenticated = False
        self.app_authorized = False
        self.manual_user_re = None
        self.manual_password_re = None
        self.manual_prompt_re = None
        self.manual_error_re = None
        self.manual_login_error_re = None
        self.driver_replaced = False
        self.host = None
        self.port = None
        self.last_account = None
        self.termtype = termtype
        self.verify_fingerprint = verify_fingerprint
        self.manual_driver = None
        self.debug = debug
        self.connect_timeout = connect_timeout
        self.timeout = timeout
        self.logfile = logfile
        self.response = None
        self.buffer = MonitoredBuffer()
        self.account_factory = account_factory
        self.banner_timeout = banner_timeout
        self.encoding = encoding
        self.send_data = None
        if stdout is None:
            self.stdout = StringIO()
        else:
            self.stdout = stdout
        if stderr is None:
            self.stderr = sys.stderr
        else:
            self.stderr = stderr
        if logfile is None:
            self.log = None
        else:
            self.log = open(logfile, 'a')

        # set manual_driver
        if driver is not None:
            if isinstance(driver, str):
                if driver in driver_map:
                    self.manual_driver = driver_map[driver]
                else:
                    self._dbg(1, 'Invalid driver string given. Ignoring...')
            elif isinstance(driver, Driver):
                self.manual_driver = driver
            else:
                self._dbg(1, 'Invalid driver given. Ignoring...')

    def __copy__(self):
        """
        Overwritten to return the very same object instead of copying the
        stream, because copying a network connection is impossible.

        :rtype:  Protocol
        :return: self
        """
        return self

    def __deepcopy__(self, memo):
        """
        Overwritten to return the very same object instead of copying the
        stream, because copying a network connection is impossible.

        :type  memo: object
        :param memo: Please refer to Python's standard library documentation.
        :rtype:  Protocol
        :return: self
        """
        return self

    def _driver_replaced_notify(self, old, new):
        self.driver_replaced = True
        self.cancel_expect()
        msg = 'Protocol: driver replaced: %s -> %s' % (old.name, new.name)
        self._dbg(1, msg)

    def _receive_cb(self, data, remove_cr=True):
        # Clean the data up.
        if remove_cr:
            text = data.replace('\r', '')
        else:
            text = data

        # Write to a logfile.
        self.stdout.write(text)
        self.stdout.flush()
        if self.log is not None:
            self.log.write(text)

        # Check whether a better driver is found based on the incoming data.
        old_driver = self.get_driver()
        self.os_guesser.data_received(data, self.is_app_authenticated())
        self.auto_driver = driver_map[self.guess_os()]
        new_driver = self.get_driver()
        if old_driver != new_driver:
            self._driver_replaced_notify(old_driver, new_driver)

        # Send signals to subscribers.
        self.data_received_event(data)

    def is_dummy(self):
        """
        Returns True if the adapter implements a virtual device, i.e.
        it isn't an actual network connection.

        :rtype:  Boolean
        :return: True for dummy adapters, False for network adapters.
        """
        return False

    def _dbg(self, level, msg):
        if self.debug < level:
            return
        self.stderr.write(self.get_driver().name + ': ' + msg + '\n')

    def set_driver(self, driver=None):
        """
        Defines the driver that is used to recognize prompts and implement
        behavior depending on the remote system.
        The driver argument may be an instance of a protocols.drivers.Driver
        subclass, a known driver name (string), or None.
        If the driver argument is None, the adapter automatically chooses
        a driver using the guess_os() function.

        :type  driver: Driver()|str
        :param driver: The pattern that, when matched, causes an error.
        """
        if driver is None:
            self.manual_driver = None
        elif isinstance(driver, str):
            if driver not in driver_map:
                raise TypeError('no such driver:' + repr(driver))
            self.manual_driver = driver_map[driver]
        elif isinstance(driver, Driver):
            self.manual_driver = driver
        else:
            raise TypeError('unsupported argument type:' + type(driver))

    def get_driver(self):
        """
        Returns the currently used driver.

        :rtype:  Driver
        :return: A regular expression.
        """
        if self.manual_driver:
            return self.manual_driver
        return self.auto_driver

    def get_banner(self):
        """
        Returns the banner that was received upon login.
        Only supported on SSH2; returns None on all other protocols.
        Also returns None if the client is not yet connected.

        :rtype: str|None
        :return: The banner as a string
        """
        return None

    def get_remote_version(self):
        """
        Returns the remote version idstring that was received upon login.
        Only supported on SSH2; returns None on all other protocols.
        Also returns None if the client is not yet connected.

        :rtype: str|None
        :return: The idstring.
        """
        return None

    def autoinit(self):
        """
        Make the remote host more script-friendly by automatically executing
        one or more commands on it.
        The commands executed depend on the currently used driver.
        For example, the driver for Cisco IOS would execute the
        following commands::

            term len 0
            term width 0
        """
        self.get_driver().init_terminal(self)

    def set_username_prompt(self, regex=None):
        """
        Defines a pattern that is used to monitor the response of the
        connected host for a username prompt.

        :type  regex: RegEx
        :param regex: The pattern that, when matched, causes an error.
        """
        if regex is None:
            self.manual_user_re = regex
        else:
            self.manual_user_re = to_regexs(regex)

    def get_username_prompt(self):
        """
        Returns the regular expression that is used to monitor the response
        of the connected host for a username prompt.

        :rtype:  regex
        :return: A regular expression.
        """
        if self.manual_user_re:
            return self.manual_user_re
        return self.get_driver().user_re

    def set_password_prompt(self, regex=None):
        """
        Defines a pattern that is used to monitor the response of the
        connected host for a password prompt.

        :type  regex: RegEx
        :param regex: The pattern that, when matched, causes an error.
        """
        if regex is None:
            self.manual_password_re = regex
        else:
            self.manual_password_re = to_regexs(regex)

    def get_password_prompt(self):
        """
        Returns the regular expression that is used to monitor the response
        of the connected host for a username prompt.

        :rtype:  regex
        :return: A regular expression.
        """
        if self.manual_password_re:
            return self.manual_password_re
        return self.get_driver().password_re

    def set_prompt(self, prompt=None):
        """
        Defines a pattern that is waited for when calling the expect_prompt()
        method.
        If the set_prompt() method is not called, or if it is called with the
        prompt argument set to None, a default prompt is used that should
        work with many devices running Unix, IOS, IOS-XR, or Junos and others.

        :type  prompt: RegEx
        :param prompt: The pattern that matches the prompt of the remote host.
        """
        if prompt is None:
            self.manual_prompt_re = prompt
        else:
            self.manual_prompt_re = to_regexs(prompt)

    def get_prompt(self):
        """
        Returns the regular expressions that is matched against the host
        response when calling the expect_prompt() method.

        :rtype:  list(re.RegexObject)
        :return: A list of regular expression objects.
        """
        if self.manual_prompt_re:
            return self.manual_prompt_re
        return self.get_driver().prompt_re

    def set_error_prompt(self, error=None):
        """
        Defines a pattern that is used to monitor the response of the
        connected host. If the pattern matches (any time the expect() or
        expect_prompt() methods are used), an error is raised.

        :type  error: RegEx
        :param error: The pattern that, when matched, causes an error.
        """
        if error is None:
            self.manual_error_re = error
        else:
            self.manual_error_re = to_regexs(error)

    def get_error_prompt(self):
        """
        Returns the regular expression that is used to monitor the response
        of the connected host for errors.

        :rtype:  regex
        :return: A regular expression.
        """
        if self.manual_error_re:
            return self.manual_error_re
        return self.get_driver().error_re

    def set_login_error_prompt(self, error=None):
        """
        Defines a pattern that is used to monitor the response of the
        connected host during the authentication procedure.
        If the pattern matches an error is raised.

        :type  error: RegEx
        :param error: The pattern that, when matched, causes an error.
        """
        if error is None:
            self.manual_login_error_re = error
        else:
            self.manual_login_error_re = to_regexs(error)

    def get_login_error_prompt(self):
        """
        Returns the regular expression that is used to monitor the response
        of the connected host for login errors; this is only used during
        the login procedure, i.e. app_authenticate() or app_authorize().

        :rtype:  regex
        :return: A regular expression.
        """
        if self.manual_login_error_re:
            return self.manual_login_error_re
        return self.get_driver().login_error_re

    def set_connect_timeout(self, timeout):
        """
        Defines the maximum time that the adapter waits for initial connection.

        :type  timeout: int
        :param timeout: The maximum time in seconds.
        """
        self.connect_timeout = int(timeout)

    def get_connect_timeout(self):
        """
        Returns the current connect_timeout in seconds.

        :rtype:  int
        :return: The connect_timeout in seconds.
        """
        return self.connect_timeout

    def set_timeout(self, timeout):
        """
        Defines the maximum time that the adapter waits before a call to
        :class:`expect()` or :class:`expect_prompt()` fails.

        :type  timeout: int
        :param timeout: The maximum time in seconds.
        """
        self.timeout = int(timeout)

    def get_timeout(self):
        """
        Returns the current timeout in seconds.

        :rtype:  int
        :return: The timeout in seconds.
        """
        return self.timeout

    def _connect_hook(self, host, port):
        """
        Should be overwritten.
        """
        raise NotImplementedError()

    def connect(self, hostname=None, port=None):
        """
        Opens the connection to the remote host or IP address.

        :type  hostname: string
        :param hostname: The remote host or IP address.
        :type  port: int
        :param port: The remote TCP port number.
        """
        if hostname is not None:
            self.host = hostname
        conn = self._connect_hook(self.host, port)
        self.os_guesser.protocol_info(self.get_remote_version())
        self.auto_driver = driver_map[self.guess_os()]
        if self.get_banner():
            self.os_guesser.data_received(self.get_banner(), False)
        return conn

    def _get_account(self, account):
        if isinstance(account, Context) or isinstance(account, _Context):
            return account.context()
        if account is None:
            account = self.last_account
        if self.account_factory:
            account = self.account_factory(account)
        else:
            if account is None:
                raise TypeError('An account is required')
            account.__enter__()
        self.last_account = account
        return account.context()

    def login(self, account=None, app_account=None, flush=True):
        """
        Log into the connected host using the best method available.
        If an account is not given, default to the account that was
        used during the last call to login(). If a previous call was not
        made, use the account that was passed to the constructor. If that
        also fails, raise a TypeError.

        The app_account is passed to :class:`app_authenticate()` and
        :class:`app_authorize()`.
        If app_account is not given, default to the value of the account
        argument.

        :type  account: Account
        :param account: The account for protocol level authentication.
        :type  app_account: Account
        :param app_account: The account for app level authentication.
        :type  flush: bool
        :param flush: Whether to flush the last prompt from the buffer.
        """
        with self._get_account(account) as account:
            if app_account is None:
                app_account = account
            self.authenticate(account, flush=False)
            if self.get_driver().supports_auto_authorize():
                self.expect_prompt()
            self.auto_app_authorize(app_account, flush=flush)

    def authenticate(self, account=None, app_account=None, flush=True):
        """
        Like login(), but skips the authorization procedure.

        .. HINT::
           If you are unsure whether to use :class:`authenticate()` or
           :class:`login()`, stick with :class:`login`.

        :type  account: Account
        :param account: The account for protocol level authentication.
        :type  app_account: Account
        :param app_account: The account for app level authentication.
        :type  flush: bool
        :param flush: Whether to flush the last prompt from the buffer.
        """
        with self._get_account(account) as account:
            if app_account is None:
                app_account = account

            if not self.proto_authenticated:
                self.protocol_authenticate(account)
            self.app_authenticate(app_account, flush=flush)

    def _protocol_authenticate(self, user, password):
        pass

    def _protocol_authenticate_by_key(self, user, key):
        pass

    def protocol_authenticate(self, account=None):
        """
        Low-level API to perform protocol-level authentication on protocols
        that support it.

        .. HINT::
           In most cases, you want to use the login() method instead, as
           it automatically chooses the best login method for each protocol.

        :type  account: Account
        :param account: An account object, like login().
        """
        with self._get_account(account) as account:
            user = account.get_name()
            password = account.get_password()
            key = account.get_key()
            if key is None:
                self._dbg(1, "Attempting to authenticate %s." % user)
                self._protocol_authenticate(user, password)
            else:
                self._dbg(1, "Authenticate %s with key." % user)
                self._protocol_authenticate_by_key(user, key)
        self.proto_authenticated = True

    def is_protocol_authenticated(self):
        """
        Returns True if the protocol-level authentication procedure was
        completed, False otherwise.

        :rtype:  bool
        :return: Whether the authentication was completed.
        """
        return self.proto_authenticated

    def _app_authenticate(self,
                          account,
                          password,
                          flush=True,
                          bailout=False):
        user = account.get_name()

        while True:
            # Wait for any prompt. Once a match is found, we need to be able
            # to find out which type of prompt was matched, so we build a
            # structure to allow for mapping the match index back to the
            # prompt type.
            prompts = (('login-error', self.get_login_error_prompt()),
                       ('username',    self.get_username_prompt()),
                       ('skey',        [_skey_re]),
                       ('password',    self.get_password_prompt()),
                       ('cli',         self.get_prompt()))
            prompt_map = []
            prompt_list = []
            for section, sectionprompts in prompts:
                for prompt in sectionprompts:
                    prompt_map.append((section, prompt))
                    prompt_list.append(prompt)

            # Wait for the prompt.
            try:
                index, match = self._waitfor(prompt_list)
            except TimeoutException:
                if self.response is None:
                    self.response = ''
                msg = "Buffer: %s" % repr(self.response)
                raise TimeoutException(msg)
            except DriverReplacedException:
                # Driver replaced, retry.
                self._dbg(1, 'Protocol.app_authenticate(): driver replaced')
                continue
            except ExpectCancelledException:
                self._dbg(1, 'Protocol.app_authenticate(): expect cancelled')
                raise
            except EOFError:
                self._dbg(1, 'Protocol.app_authenticate(): EOF')
                raise

            # Login error detected.
            section, prompt = prompt_map[index]
            if section == 'login-error':
                raise LoginFailure("Login failed")

            # User name prompt.
            elif section == 'username':
                self._dbg(1, "Username prompt %s received." % index)
                self.expect(prompt)  # consume the prompt from the buffer
                self.send(user + '\r')
                continue

            # s/key prompt.
            elif section == 'skey':
                self._dbg(1, "S/Key prompt received.")
                self.expect(prompt)  # consume the prompt from the buffer
                seq = int(match.group(1))
                seed = match.group(2)
                self.otp_requested_event(account, seq, seed)
                self._dbg(2, "Seq: %s, Seed: %s" % (seq, seed))
                phrase = otp(password, seed, seq)

                # A password prompt is now required.
                self.expect(self.get_password_prompt())
                self.send(phrase + '\r')
                self._dbg(1, "Password sent.")
                if bailout:
                    break
                continue

            # Cleartext password prompt.
            elif section == 'password':
                self._dbg(1, "Cleartext password prompt received.")
                self.expect(prompt)  # consume the prompt from the buffer
                self.send(password + '\r')
                if bailout:
                    break
                continue

            # Shell prompt.
            elif section == 'cli':
                self._dbg(1, 'Shell prompt received.')
                if flush:
                    self.expect_prompt()
                break

            else:
                assert False  # No such section

    def app_authenticate(self, account=None, flush=True, bailout=False):
        """
        Attempt to perform application-level authentication. Application
        level authentication is needed on devices where the username and
        password are requested from the user after the connection was
        already accepted by the remote device.

        The difference between app-level authentication and protocol-level
        authentication is that in the latter case, the prompting is handled
        by the client, whereas app-level authentication is handled by the
        remote device.

        App-level authentication comes in a large variety of forms, and
        while this method tries hard to support them all, there is no
        guarantee that it will always work.

        We attempt to smartly recognize the user and password prompts;
        for a list of supported operating systems please check the
        Exscript.protocols.drivers module.

        Returns upon finding the first command line prompt. Depending
        on whether the flush argument is True, it also removes the
        prompt from the incoming buffer.

        :type  account: Account
        :param account: An account object, like login().
        :type  flush: bool
        :param flush: Whether to flush the last prompt from the buffer.
        :type  bailout: bool
        :param bailout: Whether to wait for a prompt after sending the password.
        """
        with self._get_account(account) as account:
            user = account.get_name()
            password = account.get_password()
            self._dbg(1, "Attempting to app-authenticate %s." % user)
            self._app_authenticate(account, password, flush, bailout)
        self.app_authenticated = True

    def is_app_authenticated(self):
        """
        Returns True if the application-level authentication procedure was
        completed, False otherwise.

        :rtype:  bool
        :return: Whether the authentication was completed.
        """
        return self.app_authenticated

    def app_authorize(self, account=None, flush=True, bailout=False):
        """
        Like app_authenticate(), but uses the authorization password
        of the account.

        For the difference between authentication and authorization
        please google for AAA.

        :type  account: Account
        :param account: An account object, like login().
        :type  flush: bool
        :param flush: Whether to flush the last prompt from the buffer.
        :type  bailout: bool
        :param bailout: Whether to wait for a prompt after sending the password.
        """
        with self._get_account(account) as account:
            user = account.get_name()
            password = account.get_authorization_password()
            if password is None:
                password = account.get_password()
            self._dbg(1, "Attempting to app-authorize %s." % user)
            self._app_authenticate(account, password, flush, bailout)
        self.app_authorized = True

    def auto_app_authorize(self, account=None, flush=True, bailout=False):
        """
        Like authorize(), but instead of just waiting for a user or
        password prompt, it automatically initiates the authorization
        procedure by sending a driver-specific command.

        In the case of devices that understand AAA, that means sending
        a command to the device. For example, on routers running Cisco
        IOS, this command executes the 'enable' command before expecting
        the password.

        In the case of a device that is not recognized to support AAA, this
        method does nothing.

        :type  account: Account
        :param account: An account object, like login().
        :type  flush: bool
        :param flush: Whether to flush the last prompt from the buffer.
        :type  bailout: bool
        :param bailout: Whether to wait for a prompt after sending the password.
        """
        with self._get_account(account) as account:
            self._dbg(1, 'Calling driver.auto_authorize().')
            self.get_driver().auto_authorize(self, account, flush, bailout)

    def is_app_authorized(self):
        """
        Returns True if the application-level authorization procedure was
        completed, False otherwise.

        :rtype:  bool
        :return: Whether the authorization was completed.
        """
        return self.app_authorized

    def send(self, data):
        """
        Sends the given data to the remote host.
        Returns without waiting for a response.

        :type  data: string
        :param data: The data that is sent to the remote host.
        :rtype:  Boolean
        :return: True on success, False otherwise.
        """
        raise NotImplementedError()

    def execute(self, command, consume=True):
        """
        Sends the given data to the remote host (with a newline appended)
        and waits for a prompt in the response. The prompt attempts to use
        a sane default that works with many devices running Unix, IOS,
        IOS-XR, or Junos and others. If that fails, a custom prompt may
        also be defined using the set_prompt() method.
        This method also modifies the value of the response (self.response)
        attribute, for details please see the documentation of the
        expect() method.

        :type  command: string
        :param command: The data that is sent to the remote host.
        :type  consume: boolean (Default: True)
        :param consume: Whether to consume the prompt from the buffer or not.
        :rtype:  int, re.MatchObject
        :return: The index of the prompt regular expression that matched,
          and the match object.
        """
        self.send(command + '\r')
        return self.expect_prompt(consume)

    def _domatch(self, prompt, flush):
        """
        Should be overwritten.
        """
        raise NotImplementedError()

    def _waitfor(self, prompt):
        re_list = to_regexs(prompt)
        patterns = [p.pattern for p in re_list]
        self._dbg(2, 'waiting for: ' + repr(patterns))
        result = self._domatch(re_list, False)
        return result

    def waitfor(self, prompt):
        """
        Monitors the data received from the remote host and waits until
        the response matches the given prompt.
        Once a match has been found, the buffer containing incoming data
        is NOT changed. In other words, consecutive calls to this function
        will always work, e.g.::

            conn.waitfor('myprompt>')
            conn.waitfor('myprompt>')
            conn.waitfor('myprompt>')

        will always work. Hence in most cases, you probably want to use
        expect() instead.

        This method also stores the received data in the response
        attribute (self.response).

        Returns the index of the regular expression that matched.

        :type  prompt: str|re.RegexObject|list(str|re.RegexObject)
        :param prompt: One or more regular expressions.
        :rtype:  int, re.MatchObject
        :return: The index of the regular expression that matched,
          and the match object.

        @raise TimeoutException: raised if the timeout was reached.

        @raise ExpectCancelledException: raised when cancel_expect() was
        called in a callback.

        @raise ProtocolException: on other internal errors.

        @raise Exception: May raise other exceptions that are caused
        within the underlying protocol implementations.
        """
        while True:
            try:
                result = self._waitfor(prompt)
            except DriverReplacedException:
                continue  # retry
            return result

    def _expect(self, prompt):
        result = self._domatch(to_regexs(prompt), True)
        return result

    def expect(self, prompt):
        """
        Like waitfor(), but also removes the matched string from the buffer
        containing the incoming data. In other words, the following may not
        alway complete::

            conn.expect('myprompt>')
            conn.expect('myprompt>') # timeout

        Returns the index of the regular expression that matched.

        .. HINT::
            May raise the same exceptions as :class:`waitfor`.

        :type  prompt: str|re.RegexObject|list(str|re.RegexObject)
        :param prompt: One or more regular expressions.
        :rtype:  int, re.MatchObject
        :return: The index of the regular expression that matched,
          and the match object.
        """
        while True:
            try:
                result = self._expect(prompt)
            except DriverReplacedException:
                continue  # retry
            return result

    def expect_prompt(self, consume=True):
        """
        Monitors the data received from the remote host and waits for a
        prompt in the response. The prompt attempts to use
        a sane default that works with many devices running Unix, IOS,
        IOS-XR, or Junos and others. If that fails, a custom prompt may
        also be defined using the set_prompt() method.
        This method also stores the received data in the response
        attribute (self.response).

        :type  consume: boolean (Default: True)
        :param consume: Whether to consume the prompt from the buffer or not.
        :rtype:  int, re.MatchObject
        :return: The index of the prompt regular expression that matched,
          and the match object.
        """
        if consume:
            result = self.expect(self.get_prompt())
        else:
            self._dbg(1, "DO NOT CONSUME PROMPT!")
            result = self.waitfor(self.get_prompt())

        # We skip the first line because it contains the echo of the command
        # sent.
        self._dbg(5, "Checking %s for errors" % repr(self.response))
        for line in self.response.split('\n')[1:]:
            for prompt in self.get_error_prompt():
                if not prompt.search(line):
                    continue
                args = repr(prompt.pattern), repr(line)
                self._dbg(5, "error prompt (%s) matches %s" % args)
                raise InvalidCommandException('Device said:\n' + self.response)

        return result

    def add_monitor(self, pattern, callback, limit=80):
        """
        Calls the given function whenever the given pattern matches the
        incoming data.

        .. HINT::
            If you want to catch all incoming data regardless of a
            pattern, use the Protocol.data_received_event event instead.

        Arguments passed to the callback are the protocol instance, the
        index of the match, and the match object of the regular expression.

        :type  pattern: str|re.RegexObject|list(str|re.RegexObject)
        :param pattern: One or more regular expressions.
        :type  callback: callable
        :param callback: The function that is called.
        :type  limit: int
        :param limit: The maximum size of the tail of the buffer
                      that is searched, in number of bytes.
        """
        self.buffer.add_monitor(pattern, partial(callback, self), limit)

    def cancel_expect(self):
        """
        Cancel the current call to :class:`expect()` as soon as control returns
        to the protocol adapter. This method may be used in callbacks to
        the events emitted by this class, e.g. Protocol.data_received_event.
        """
        raise NotImplementedError()

    def _call_key_handlers(self, key_handlers, data):
        if key_handlers is not None:
            for key, func in list(key_handlers.items()):
                if data == key:
                    func(self)
                    return True
        return False

    def _set_terminal_size(self, rows, cols):
        raise NotImplementedError()

    def _open_posix_shell(self,
                          channel,
                          key_handlers,
                          handle_window_size):
        # We need to make sure to use an unbuffered stdin, else multi-byte
        # chars (such as arrow keys) won't work properly.
        with os.fdopen(sys.stdin.fileno(), 'r', 0) as stdin:
            oldtty = termios.tcgetattr(stdin)

            # Update the terminal size whenever the size changes.
            if handle_window_size:
                def handle_sigwinch(signum, frame):
                    rows, cols = get_terminal_size()
                    self._set_terminal_size(rows, cols)
                signal.signal(signal.SIGWINCH, handle_sigwinch)
                handle_sigwinch(None, None)

            # Read from stdin and write to the network, endlessly.
            try:
                tty.setraw(sys.stdin.fileno())
                tty.setcbreak(sys.stdin.fileno())
                channel.settimeout(0.0)

                while True:
                    try:
                        r, w, e = select.select([channel, stdin], [], [])
                    except select.error as e:
                        code, message = e
                        if code == errno.EINTR:
                            # This may happen when SIGWINCH is called
                            # during the select; we just retry then.
                            continue
                        raise

                    if channel in r:
                        try:
                            data = channel.recv(1024).decode(self.encoding)
                        except socket.timeout:
                            pass
                        if not data:
                            self._dbg(1, 'EOF from remote')
                            break
                        self._receive_cb(data, False)
                        self.buffer.append(data)
                    if stdin in r:
                        data = stdin.read(1).decode(self.encoding)
                        self.buffer.clear()
                        if len(data) == 0:
                            break

                        # Temporarily revert stdin behavior while callbacks are
                        # active.
                        curtty = termios.tcgetattr(stdin)
                        termios.tcsetattr(stdin, termios.TCSADRAIN, oldtty)
                        is_handled = self._call_key_handlers(key_handlers, data)
                        termios.tcsetattr(stdin, termios.TCSADRAIN, curtty)

                        if not is_handled:
                            if not self.send_data is None:
                                self.send_data.write(data)
                            channel.send(data)
            finally:
                termios.tcsetattr(stdin, termios.TCSADRAIN, oldtty)

    def _open_windows_shell(self, channel, key_handlers, handle_window_size):
        import threading

        def writeall(sock):
            while True:
                data = sock.recv(256)
                if not data:
                    self._dbg(1, 'EOF from remote')
                    break
                self._receive_cb(data)

        writer = threading.Thread(target=writeall, args=(channel,))
        writer.start()

        try:
            while True:
                data = sys.stdin.read(1)
                if not data:
                    break
                if not self._call_key_handlers(key_handlers, data):
                    if not self.send_data is None:
                        self.send_data.write(data)
                    channel.send(data)
        except EOFError:
            self._dbg(1, 'User hit ^Z or F6')

    def _open_shell(self, channel, key_handlers, handle_window_size):
        if _have_termios:
            return self._open_posix_shell(channel, key_handlers, handle_window_size)
        else:
            return self._open_windows_shell(channel, key_handlers, handle_window_size)

    def interact(self, key_handlers=None, handle_window_size=True):
        """
        Opens a simple interactive shell. Returns when the remote host
        sends EOF.
        The optional key handlers are functions that are called whenever
        the user presses a specific key. For example, to catch CTRL+y::

            conn.interact({'\031': mycallback})

        .. WARNING::
           handle_window_size is not supported on Windows platforms.

        :type  key_handlers: dict(str: callable)
        :param key_handlers: A dictionary mapping chars to a functions.
        :type  handle_window_size: bool
        :param handle_window_size: Whether the connected host is notified
          when the terminal size changes.
        """
        raise NotImplementedError()

    def close(self, force=False):
        """
        Closes the connection with the remote host.
        """
        if self.log:
            try:
                self.log.close()
            except:
                pass

    def get_host(self):
        """
        Returns the name or address of the currently connected host.

        :rtype:  string
        :return: A name or an address.
        """
        return self.host

    def guess_os(self):
        """
        Returns an identifier that specifies the operating system that is
        running on the remote host. This OS is obtained by watching the
        response of the remote host, such as any messages retrieved during
        the login procedure.

        The OS is also a wild guess that often depends on volatile
        information, so there is no guarantee that this will always work.

        :rtype:  string
        :return: A string to help identify the remote operating system.
        """
        return self.os_guesser.get('os')
Ejemplo n.º 56
0
class MemoryFile(MemoryFSNode):
    """In-memory file (behaves like a :term:`file object`).

    Parameters
    ----------
    parent : :class:`MemoryFSNode`, optional
        Parent of the node. Will be set to the node itself, if ``None``.

    Attributes
    ----------
    content : bytes
        Content of the file.
    """
    def __init__(self, parent=None):
        stat = Stat()
        stat.st_mode = (
            st.S_IRUSR | st.S_IWUSR | st.S_IRGRP | st.S_IWGRP |
            st.S_IROTH | st.S_IWOTH | st.S_IFREG)
        super(MemoryFile, self).__init__(parent, stat)
        self.content = b''
        self._delegate = None
        self._mode = None

    def open(self, mode='r'):
        """Opens the file for reading or writing.

        Valid mode flags are:

        * ``'r'``: Open file for reading.
        * ``'w'``: Open file for writing.
        * ``'a'``: Open file for appending.
        * ``'+'``: Open file for reading and writing. In combination with
          ``'w'`` the file will be truncated.
        * ``'t'``: Open file in text mode.
        * ``'b'``: Open file in binary mode.

        Parameters
        ----------
        mode : str, optional
            Combination of mode flags (see above) to define the open mode.

        Returns
        -------
        self : :class:`MemoryFile`

        See also
        --------
        io.open
        """
        need_read_perm = 'r' in mode or '+' in mode
        need_write_perm = 'w' in mode or 'a' in mode
        if need_read_perm and not st.S_IMODE(self.status.st_mode) & st.S_IREAD:
            raise OSError(errno.EACCES, "Permission denied.")
        if (need_write_perm and
                not st.S_IMODE(self.status.st_mode) & st.S_IWRITE):
            raise OSError(errno.EACCES, "Permission denied.")

        if 'w' in mode:
            self.content = b''

        self._mode = mode
        if 'b' in mode:
            self._delegate = BytesIO(self.content)
        else:
            self._delegate = StringIO(self.content.decode())
        if 'a' in mode:
            self._delegate.seek(0, os.SEEK_END)
        return self

    def flush(self):
        """Flushes the written data to :attr:`content`."""
        self._delegate.flush()
        if 'b' in self._mode:
            self.content = self._delegate.getvalue()
        else:
            self.content = self._delegate.getvalue().encode()
        self.status.st_size = len(self.content)
        self.status.st_mtime = _get_time()

    def close(self):
        """Flushes and closes the file.

        See also
        --------
        flush
        """
        self.flush()
        self._delegate.close()

    def __getattr__(self, name):
        return getattr(self._delegate, name)

    def __enter__(self):
        return self

    def __exit__(self, err_type, value, traceback):
        self.close()
Ejemplo n.º 57
0
  def saveAs(self, newfile, options=None):
    """Saves Jpeg with IPTC data to a given file name."""
    assert self._filename is not None
    print("saveAs: self = %s" % self, file=sys.stderr)
    # Open file and snarf data from it.
    fh = self._getfh()
    if not self.fileIsJpeg(fh):
      self.log("Source file is not a Jpeg; I can only save Jpegs. Sorry.")
      return None
    ret = self.jpegCollectFileParts(fh, options)
    self._closefh(fh)
    if ret is None:
      self.log("collectfileparts failed")
      print(self.error, file=sys.stderr)
      raise Exception('collectfileparts failed')

    (start, end, adobe) = ret
    debug(2, 'start: %d, end: %d, adobe:%d' % tuple(map(len, ret)))
    self.hexDump(start), len(end)
    debug(3, 'adobe1', adobe)
    if options is not None and 'discardAdobeParts' in options:
      adobe = None
    debug(3, 'adobe2', adobe)

    debug(1, 'writing...')
    # fh = os.tmpfile() ## 20051011 - Windows doesn't like tmpfile ##
    # Open dest file and stuff data there
    # fh.truncate()
    # fh.seek(0, 0)
    # debug(2, self._filepos(fh))
    fh = StringIO()
    if not fh:
      self.log("Can't open output file")
      return None
    debug(3, len(start), len(end))
    fh.write(start)
    # character set
    ch = self.c_charset_r.get((self.out_charset is None and [self.inp_charset]
                               or [self.out_charset])[0], None)
    # writing the character set is not the best practice - couldn't find the needed place (record) for it yet!
    if SURELY_WRITE_CHARSET_INFO and ch is not None:
      fh.write(pack("!BBBHH", 0x1c, 1, 90, 4, ch))


    debug(2, self._filepos(fh))
    #$self->PhotoshopIIMBlock($adobe, $self->PackedIIMData());
    data = self.photoshopIIMBlock(adobe, self.packedIIMData())
    debug(3, len(data), self.hexDump(data))
    fh.write(data)
    debug(2, self._filepos(fh))
    fh.flush()
    fh.write(end)
    debug(2, self._filepos(fh))
    fh.flush()

    #copy the successfully written file back to the given file
    fh2 = file(newfile, 'wb')
    fh2.truncate()
    fh2.seek(0,0)
    fh.seek(0, 0)
    while 1:
      buf = fh.read(8192)
      if buf is None or len(buf) == 0: break
      fh2.write(buf)
    self._closefh(fh)
    fh2.flush()
    fh2.close()
    return True
Ejemplo n.º 58
0
class BaseLoggerTestCase(unittest.TestCase):
    """
    base class for testing the patching of the logging module to support
    PEP-3101 formatting strings. This class does no patching of the logging
    method

    Subclasses should import and/or patch logging in the appropriate manner
    in either setUp or setUpClass

    """
    maxDiff = None

    def setUp(self):
        """
        Setup a string buffer to capture log output
        """

        self.buffer = StringIO()

    def assertLogOutput(self, expected, msg, *args):
        """
        :param expected: the expected log output
        :param msg: the (preformatted) message to pass to the logging statement
        :param args: the arguments to pass the logging statement
        """

        self.make_buffered_logger().info(msg, *args)
        self.buffer.flush()
        actual = self.buffer.getvalue()
        # log messages always end in a carriage return, so account for that
        expected += '\n'
        # reset the buffer for future tests
        self.buffer = StringIO()

        self.assertEqual(actual, expected, 'message not expected:'
                                           ' %r != %r' % (actual, expected))

    def make_buffered_logger(self, string_buffer=None, name='test_log'):
        """
        return a new logger instance, with the buffer set to buffer
        :param string_buffer: a StringIO instance, if None (default), will use
            self.buffer
        :param name: the name of the log (default 'test_log')
        :return: logger instance with the handler set to stream to buffer
        """
        # if this has already been patched, this additional import won't do
        # anything, which is fine
        import logging

        if string_buffer is None:
            string_buffer = self.buffer

        log_format = logging.Formatter('%(message)s')
        log_handler = logging.StreamHandler(string_buffer)
        _log = logging.getLogger(name)
        log_handler.setFormatter(log_format)
        _log.addHandler(log_handler)
        _log.setLevel(logging.DEBUG)

        return _log

    def test_basic_message(self):
        """
        test that a message with no formatting works as expected
        """
        expected = msg = 'hello world!'
        self.assertEqual(expected, msg)
        self.assertLogOutput(expected, msg)

    def test_modulo_syntax(self):
        """
        test that a message with a format using %-syntax works as expected
        """

        test_items = [
            ('hello world!', '%s %s!', ('hello', 'world')),
            ('hello world!', '%(word1)s %(word2)s!', dict(word1='hello',
                                                          word2='world')),
            ('Dramatis Personæ', '%s %s', ('Dramatis', 'Personæ'))
        ]
            
        for expected, msg, args in test_items:
            self.assertEqual(expected, msg % args)
            if isinstance(args, dict):
                args = (args,)
            self.assertLogOutput(expected, msg, *args)

        if sys.version_info < (3, 0):
            # special case test for issue #4
            expected, msg, args = ('Dramatis Personæ', b'%s %s',
                                   ('Dramatis', 'Personæ'))
            self.assertLogOutput(expected, msg, *args)

    def _pep3101_test(self):
        """
        convenience function for testing pep3101 syntax and allowing test cases
        to assert based on the results
        """

        test_dict = dict(word1='hello', word2='world')
        test_items = [
            ('hello world!', '{0} {1}!', ('hello', 'world')),
            (repr(test_dict), '{0!r}', (test_dict,)),
            ('Dramatis Personæ', '{0} {1}', ('Dramatis', 'Personæ'))
        ]
        
        for expected, msg, args in test_items:
            self.assertEqual(expected, msg.format(*args))
            self.assertLogOutput(expected, msg, *args)

        if sys.version_info < (3, 0):
            # special case test for issue #4
            expected, msg, args = ('Dramatis Personæ', b'{0} {1}',
                                   ('Dramatis', 'Personæ'))
            with self.assertRaises(UnicodeEncodeError):
                self.assertEqual(expected, msg.format(*args))
            self.assertLogOutput(expected, msg, *args)

    def test_pep3101_syntax(self):
        """
        test that using str.format syntax fails as expected when unpatched
        """

        self.assertRaises(AssertionError, self._pep3101_test)
Ejemplo n.º 59
0
 def flush(self):
     StringIO.flush(self)
     self.stream.flush()