Example #1
0
   def test_two_iso_same_exon(self):
      '''SEMANTIC:      
      This is an expanded test for the function get_intron_sequences.
      There is one chromosome and one gene, but three different isoforms. Isoform one and two have the three exons,
      the third isoform has one exon. We expect the system to give us the first isoform with three exons. 
      '''
      intron_sequence_file_stream = StringIO()
      five_prime_file_stream = StringIO()
      five_prime_file_stream.close = MagicMock(name= 'close')
      intron_sequence_file_stream.close = MagicMock(name= 'close')


      res = intron.get_intron_sequences(gtf_file = StringIO(two_iso_same_exon_gtf_file_contents),
                                                   fasta_file = StringIO(two_iso_same_exon_fasta_file_contents),
                                                   five_prime_slice_len = 1 ,
                                                   intron_sequence_file = intron_sequence_file_stream,
                                                   five_prime_slice_file = five_prime_file_stream )

      five_prime_file      = FastaFile( StringIO (five_prime_file_stream.getvalue())  )

      required_five_prime_entries = {'chr1_GENE_A_1_+_5_8' : \
                                         FastaEntry(header = 'chr1_GENE_A_1_+_5_8', sequence = 'T'),\
                                     'chr1_GENE_A_2_+_13_16' : \
                                         FastaEntry(header = 'chr1_GENE_A_2_+_13_16', sequence = 'T'),\
                                     'chr1_GENE_A_3_+_21_24' : \
                                         FastaEntry(header = 'chr1_GENE_A_3_+_21_24', sequence = 'T')}
                                         
      for entry in five_prime_file:
          try:
              self.assertEqual(required_five_prime_entries[entry.header].header , entry.header)
              self.assertEqual(required_five_prime_entries[entry.header].sequence , entry.sequence)
          except KeyError as k:
              print('Couldn\'t find an entry with header', entry.header)
              raise(KeyError())
Example #2
0
	def marshal( self ):
		oStreamString = StringIO()
		oStreamString.write(unicode('<?xml version="1.0" ?>\n'))
		self.export( oStreamString, 0, name_="XSDataResultWaitMultiFile" )
		oStringXML = oStreamString.getvalue()
		oStreamString.close()
		return oStringXML
 def marshal( self ):
     oStreamString = StringIO()
     oStreamString.write(unicode('<?xml version="1.0" ?>\n'))
     self.export( oStreamString, 0, name_="XSDataOutputXdsBurnStrategy" )
     oStringXML = oStreamString.getvalue()
     oStreamString.close()
     return oStringXML
Example #4
0
   def test_neg_strand(self):
      '''SEMANTIC:      
      This is a simple test for the get_intron_sequences function. 
      The data is from the negative strand of the DNA.
      '''
      intron_sequence_file_stream = StringIO()
      five_prime_file_stream = StringIO()
      five_prime_file_stream.close = MagicMock(name= 'close')
      intron_sequence_file_stream.close = MagicMock(name= 'close')


      res = intron.get_intron_sequences(gtf_file = StringIO(neg_strand_gtf_file_contents),
                                                   fasta_file = StringIO(neg_strand_fasta_file_contents),
                                                   five_prime_slice_len = 4 ,
                                                   intron_sequence_file = intron_sequence_file_stream,
                                                   five_prime_slice_file = five_prime_file_stream )

      five_prime_file      = FastaFile( StringIO (five_prime_file_stream.getvalue())  )

      required_five_prime_entries = {'chr13_GENE_NEG_1_-_5_8' : \
                                         FastaEntry(header = 'chr13_GENE_NEG_1_-_5_8', sequence = 'ATGC'),\
                                     'chr13_GENE_NEG_2_-_13_16' : \
                                         FastaEntry(header = 'chr13_GENE_NEG_2_-_13_16', sequence = 'GCAA')}
                                         
      for entry in five_prime_file:
          try:
              self.assertEqual(required_five_prime_entries[entry.header].header , entry.header)
              self.assertEqual(required_five_prime_entries[entry.header].sequence , entry.sequence)
          except KeyError as k:
              print('Couldn\'t find an entry with header', entry.header)
              raise(KeyError())
Example #5
0
    def run_pip_main(cls, *args, **kwargs):
        import pip

        args = list(args)
        check_output = kwargs.pop('check_output', False)

        if check_output:
            from io import StringIO

            out = StringIO()
            sys.stdout = out

            try:
                pip.main(args)
            except:
                traceback.print_exc()
            finally:
                sys.stdout = sys.__stdout__

                out.seek(0)
                pipdata = out.read()
                out.close()

                print(pipdata)
                return pipdata
        else:
            return pip.main(args)
Example #6
0
def cli_success(resp):
    data = resp["result"]["random"]["data"]
    msg = StringIO()
    msg.write("\n".join(map(str, data)))
    output = msg.getvalue()
    msg.close()
    return output
Example #7
0
class DistantInteractiveConsole(InteractiveConsole):
    def __init__(self, ipc):
        InteractiveConsole.__init__(self, globals())

        self.ipc = ipc
        self.set_buffer()

    def set_buffer(self):
        self.out_buffer = StringIO()
        sys.stdout = sys.stderr = self.out_buffer

    def unset_buffer(self):
        sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
        value = self.out_buffer.getvalue()
        self.out_buffer.close()

        return value

    def raw_input(self, prompt=""):
        output = self.unset_buffer()
        # payload format: 'prompt' ? '\n' 'output'
        self.ipc.send('\n'.join((prompt, output)))

        cmd = self.ipc.recv()

        self.set_buffer()

        return cmd
Example #8
0
def _run_complexity_analysis(on_ci):
    """Generates cyclomatic complexity reports for the package

    :param bool on_ci: Indicates whether an automated tool is running this operation. Output will be customized for
                    machine readability
    """
    modlog.debug("Running complexity analysis")

    # generate cyclomatic complexities for source files in XML format for integration with external tools
    pyjen_path = os.path.join(os.getcwd(), "pyjen")
    from radon.cli import cc

    # TODO: output in XML format when running on CI
    standard_output = StringIO()
    with redirect_stdout(standard_output):
        modlog.debug("Calling radon.cc")
        cc(paths=[pyjen_path], show_complexity=True, show_closures=True, total_average=True, xml=on_ci)

    modlog.debug("Writing report to disk")
    cc_report = os.path.join(log_folder, "radon_complexity.xml")
    with open(cc_report, "w") as fh:
        fh.write(standard_output.getvalue())
    standard_output.close()

    modlog.info("Cyclomatic complexity analysis complete. See " + os.path.relpath(cc_report))
Example #9
0
def extract_diff_header(status, deleted,
                        with_diff_header, suppress_header, diffoutput):
    headers = []

    if diffoutput.startswith('Submodule'):
        if with_diff_header:
            return ('', diffoutput)
        else:
            return diffoutput

    start = False
    del_tag = 'deleted file mode '
    output = StringIO()

    diff = diffoutput.split('\n')
    for line in diff:
        if not start and '@@' == line[:2] and '@@' in line[2:]:
            start = True
        if start or (deleted and del_tag in line):
            output.write(line + '\n')
        else:
            if with_diff_header:
                headers.append(line)
            elif not suppress_header:
                output.write(line + '\n')

    result = output.getvalue()
    output.close()

    if with_diff_header:
        return('\n'.join(headers), result)
    else:
        return result
Example #10
0
 def __multiline_str(self):
     sio = StringIO()
     sio.write('{0}\n{1}:'.format('=' * self.SEPARATOR_LEN, str(self.file)))
     sio.write(' {0}: [{1}:{2}]'.format(self.linenum, self.match_start_index,
                                        self.match_end_index))
     if self.contained:
         sio.write(': {0}'.format(self.contained))
     sio.write('\n{0}\n'.format('-' * self.SEPARATOR_LEN))
     line_format = ' {0:>' + str(self.linenum_padding()) + '} | {1}\n'
     current_linenum = self.linenum
     if self.lines_before:
         current_linenum -= len(self.lines_before)
         for line_before in self.lines_before:
             sio.write(' ' + line_format.format(current_linenum,
                                                strip_newlines(line_before)))
             current_linenum += 1
     sio.write('>' + line_format.format(self.linenum,
                                        strip_newlines(self.line)))
     if self.lines_after:
         current_linenum = self.linenum + 1
         for line_after in self.lines_after:
             sio.write(' ' + line_format.format(current_linenum,
                                                strip_newlines(line_after)))
             current_linenum += 1
     s = sio.getvalue()
     sio.close()
     return s
Example #11
0
        def _on_done():
            buf = StringIO()
            for item in results:
                tabulate(item, output=buf)

            new_view = sublime.active_window().new_file()
            new_view.set_scratch(True)
            new_view.settings().set('word_wrap', False)
            new_view.settings().set('line_numbers', False)
            new_view.settings().set('gutter', False)
            new_view.set_name('LaTeXTools System Check')
            if sublime.version() < '3103':
                new_view.settings().set(
                    'syntax',
                    'Packages/LaTeXTools/system_check.hidden-tmLanguage'
                )
            else:
                new_view.settings().set(
                    'syntax', 'Packages/LaTeXTools/system_check.sublime-syntax'
                )

            new_view.set_encoding('UTF-8')

            new_view.run_command(
                'latextools_insert_text',
                {'text': buf.getvalue().rstrip()}
            )

            new_view.set_read_only(True)

            buf.close()
	def marshal( self ):
		oStreamString = StringIO()
		oStreamString.write(unicode('<?xml version="1.0" ?>\n'))
		self.export( oStreamString, 0, name_="XSDataNexusArrayGroup" )
		oStringXML = oStreamString.getvalue()
		oStreamString.close()
		return oStringXML
def CreateLDIF(dn,record,base64_attrs=None,cols=76):
  """
  Create LDIF single formatted record including trailing empty line.
  This is a compatibility function.

  dn
        string-representation of distinguished name
  record
        Either a dictionary holding the LDAP entry {attrtype:record}
        or a list with a modify list like for LDAPObject.modify().
  base64_attrs
        list of attribute types to be base64-encoded in any case
  cols
        Specifies how many columns a line may have before it's
        folded into many lines.
  """
  warnings.warn(
    'ldif.CreateLDIF() is deprecated. Use LDIFWriter.unparse() instead. It '
    'will be removed in python-ldap 3.1',
    category=DeprecationWarning,
    stacklevel=2,
  )
  f = StringIO()
  ldif_writer = LDIFWriter(f,base64_attrs,cols,'\n')
  ldif_writer.unparse(dn,record)
  s = f.getvalue()
  f.close()
  return s
 def marshal( self ):
     oStreamString = StringIO()
     oStreamString.write(unicode('<?xml version="1.0" ?>\n'))
     self.export( oStreamString, 0, name_="XSDataInputControlBackground3D" )
     oStringXML = oStreamString.getvalue()
     oStreamString.close()
     return oStringXML
Example #15
0
def limit_featurecollection(content, limit=200):
    """
    Parse a WFS FeatureCollection XML string and produce a
    similar string with at most 200 features.
    """

    parser = make_parser()

    _input = BytesIO(content)

    input_source = InputSource()
    input_source.setByteStream(_input)

    output = StringIO()
    downstream = XMLGenerator(output, 'utf-8')

    _filter = _XMLFilterLimit(parser, downstream, limit=limit)
    _filter.parse(input_source)

    result = output.getvalue()

    _input.close()
    output.close()

    return result
Example #16
0
	def marshal( self ):
		oStreamString = StringIO()
		oStreamString.write(unicode('<?xml version="1.0" ?>\n'))
		self.export( oStreamString, 0, name_="XSDataInputInterfacev2_2" )
		oStringXML = oStreamString.getvalue()
		oStreamString.close()
		return oStringXML
class PythonCapturer(object):
    def __init__(self, stdout=True):
        if stdout:
            self._original = sys.stdout
            self._set_stream = self._set_stdout
        else:
            self._original = sys.stderr
            self._set_stream = self._set_stderr
        self._stream = StringIO()
        self._set_stream(self._stream)

    def _set_stdout(self, stream):
        sys.stdout = stream

    def _set_stderr(self, stream):
        sys.stderr = stream

    def release(self):
        # Original stream must be restored before closing the current
        self._set_stream(self._original)
        try:
            return self._get_value(self._stream)
        finally:
            self._stream.close()

    def _get_value(self, stream):
        try:
            return decode_output(stream.getvalue())
        except UnicodeError:
            stream.buf = decode_output(stream.buf)
            stream.buflist = [decode_output(item) for item in stream.buflist]
            return stream.getvalue()
    def _check_ssh(self, host, pk=None, user='******'):
        """
        Check for ssh availability on a host.

        :type host: ``str``
        :param host: Hostname or IP address of the host to check.

        :type user: ``str``
        :param user: Username to use when trying to login.

        :type pk: ``str``
        :param pk: Private portion of an ssh key.

        :rtype: ``bool``
        :return: True if ssh connection was successful.
        """
        ssh = paramiko.SSHClient()
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        pkey = None
        if pk:
            key_file_object = StringIO(pk)
            pkey = paramiko.RSAKey.from_private_key(key_file_object)
            key_file_object.close()
        try:
            log.info("Trying to ssh {0}@{1}".format(user, host))
            ssh.connect(host, username=user, pkey=pkey)
            self._remove_known_host(host)
            return True
        except (BadHostKeyException, AuthenticationException,
                SSHException, socket.error) as e:
            log.warn("ssh connection exception for {0}: {1}".format(host, e))
        self._remove_known_host(host)
        return False
Example #19
0
 def __init__(self,host,credential):
     self.client = paramiko.SSHClient()
     self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
     credential=credential
     if 'password' not in credential:
         credential['password'] = None
     if 'port' not in credential:
         credential['port'] = 22
     if 'private_key' in credential:
         private_key_file=StringIO(credential['private_key'])
         credential['pkey']=paramiko.RSAKey.from_private_key(private_key_file)
         private_key_file.close()
     else:
         credential['pkey']=None
     mustend = time.time() + 90
     e=Exception()
     while time.time() < mustend:
         time.sleep(1)
         try:
             self.client.connect(
                 host,
                 username=credential['username'],
                 password=credential['password'],
                 pkey=credential['pkey'],
                 port=credential['port'],
                 timeout=None
             )
         except Exception as ex:
             e=ex
         else:
             return
     raise e
def runTest():
    savedStderr = sys.stderr
    totalTests = 0
    hadError = False
    for modName in braille.__all__:
        myIO = StringIO()
        sys.stderr = myIO
        run_module('music21.braille.' + modName, run_name='__main__')
        errOutput = myIO.getvalue()
        myIO.close()
        for thisLine in errOutput.splitlines():
            if re.match(r'^\.*$', thisLine):
                # all dots or blank line
                continue
            if re.match(r'^\-+$', thisLine):
                # separator
                continue
            if thisLine == 'OK':
                continue
            numTests = re.match(r'^Ran (\d+) tests? in ', thisLine)
            if numTests:
                totalTests += int(numTests.group(1))
                continue
            hadError = True
            print(thisLine)
    print("Total tests: ", totalTests)
    if hadError:
        print("ERRORS FOUND")
    else:
        print("All good!")

    sys.stderr = savedStderr
Example #21
0
    def redraw(self):
        """Create the pulldown menu."""
        text = StringIO()
        text.write(TOP)

        self.undisplay()

        actions = []
        count = 0

        if len(self.bookmarks.get()) > 0:
            text.write('<placeholder name="GoToBook">')
            for item in self.bookmarks.get():
                try:
                    label, obj = self.make_label(item)
                    func = self.callback(item)
                    action_id = "BM:%s" % item
                    actions.append((action_id, None, label, None, None, func))
                    text.write('<menuitem action="%s"/>' % action_id)
                    count += 1
                except AttributeError:
                    pass
            text.write('</placeholder>')

        text.write(BTM)
        self.action_group.add_actions(actions)
        self.uistate.uimanager.insert_action_group(self.action_group, 1)
        self.active = self.uistate.uimanager.add_ui_from_string(text.getvalue())
        self.uistate.uimanager.ensure_update()
        text.close()
    def test_warning_raised(self):
        fh = StringIO(u'1\n2\n3\n4')

        @self.module.register_sniffer('format')
        def sniffer(fh):
            self.was_verified = True
            return False, {}

        @self.module.register_reader('format', TestClass)
        def reader(fh):
            return TestClass([int(x) for x in fh.read().split('\n')])

        with warnings.catch_warnings(record=True):
            warnings.simplefilter("error")
            with self.assertRaises(FormatIdentificationWarning):
                self.was_verified = False
                instance = self.module.read(fh, format='format',
                                            into=TestClass, verify=True)
                self.assertEqual(TestClass([1, 2, 3, 4]), instance)
                self.assertTrue(self.was_verified)

        with warnings.catch_warnings(record=True):
            warnings.simplefilter("error")
            with self.assertRaises(FormatIdentificationWarning):
                self.was_verified = False
                instance = self.module.read(fh, format='format',
                                            into=TestClass)
                self.assertEqual(TestClass([1, 2, 3, 4]), instance)
                self.assertTrue(self.was_verified)

        fh.close()
Example #23
0
def cbFun(sendRequesthandle, errorIndication, errorStatus, errorIndex,
          varBindTable, cbCtx):
    global oid_list
    global maxRepetitions
    if errorIndication:
        print(errorIndication)
        return  # stop on error
    if errorStatus:
        print('%s at %s' % (
            errorStatus.prettyPrint(),
            errorIndex and varBindTable[-1][int(errorIndex)-1] or '?'
            )
        )
        return  # stop on error
    #print(varBindTable)
    for varBindRow in varBindTable:
        if maxRepetitions == 0:#如果为0
            return#停止,并且返回
        else:
            for oid, val in varBindTable[0]:
                o = StringIO()
                print(oid,file=o)
                oid_get = o.getvalue().strip()#通过print到StringIO进行转码,然后读回
                o.close()
                v = StringIO()
                print(val,file=v)
                val_get = v.getvalue().strip()#通过print到StringIO进行转码,然后读回
                v.close()
                oid_list.append((oid_get,val_get))#把oid和val的对添加到全局清单oid_list
        maxRepetitions -= 1#数量减一
    return True # signal dispatcher to continue walking#返回一个型号,继续往下查询!
def makeFilesManifest(destination, fileName):
    out = StringIO()
    for f in deepListDir(destination):
        print(f, file=out)
    with open(os.path.join(destination, fileName), 'w') as f:
        f.write(out.getvalue())
    out.close()
Example #25
0
    def save(self):
        """
        Create the writer & save
        """
        # GH21227 internal compression is not used when file-like passed.
        if self.compression and hasattr(self.path_or_buf, 'write'):
            msg = ("compression has no effect when passing file-like "
                   "object as input.")
            warnings.warn(msg, RuntimeWarning, stacklevel=2)

        # when zip compression is called.
        is_zip = isinstance(self.path_or_buf, ZipFile) or (
            not hasattr(self.path_or_buf, 'write')
            and self.compression == 'zip')

        if is_zip:
            # zipfile doesn't support writing string to archive. uses string
            # buffer to receive csv writing and dump into zip compression
            # file handle. GH21241, GH21118
            f = StringIO()
            close = False
        elif hasattr(self.path_or_buf, 'write'):
            f = self.path_or_buf
            close = False
        else:
            f, handles = _get_handle(self.path_or_buf, self.mode,
                                     encoding=self.encoding,
                                     compression=self.compression)
            close = True

        try:
            writer_kwargs = dict(lineterminator=self.line_terminator,
                                 delimiter=self.sep, quoting=self.quoting,
                                 doublequote=self.doublequote,
                                 escapechar=self.escapechar,
                                 quotechar=self.quotechar)
            if self.encoding == 'ascii':
                self.writer = csvlib.writer(f, **writer_kwargs)
            else:
                writer_kwargs['encoding'] = self.encoding
                self.writer = UnicodeWriter(f, **writer_kwargs)

            self._save()

        finally:
            if is_zip:
                # GH17778 handles zip compression separately.
                buf = f.getvalue()
                if hasattr(self.path_or_buf, 'write'):
                    self.path_or_buf.write(buf)
                else:
                    f, handles = _get_handle(self.path_or_buf, self.mode,
                                             encoding=self.encoding,
                                             compression=self.compression)
                    f.write(buf)
                    close = True
            if close:
                f.close()
                for _fh in handles:
                    _fh.close()
Example #26
0
def run_controller(puzzle, commands, filename=''):
    """Simulate running the game on a given puzzle and set of commands.

    If a file name is specified, write output to that file.
    Otherwise, print to the screen.

    Precondition: <commands> must be a sequence of commands which causes
    the controller to terminate (e.g., by entering 'exit' or ':SOLVE').

    @type puzzle: Puzzle
    @type commands: list[str]
    @rtype: None
    """
    out = StringIO('')
    sys.stdout = out
    sys.stdin = StringIO('\n'.join(commands))
    Controller(puzzle)
    r = out.getvalue()
    out.close()
    sys.stdin = sys.__stdin__
    sys.stdout = sys.__stdout__

    outputs = r.split('Enter a command:\n> ')
    messages = []
    for i in range(len(outputs)):
        messages.append(outputs[i])
        if i < len(commands):
            messages.append('Enter a command:\n> ')
            messages.append(commands[i] + '\n')

    if filename == '':
        print(''.join(messages))
    else:
        with open(filename, 'w') as result_file:
            result_file.writelines(messages)
def get_stats(statsfile, cluster_alg, oldcols, newcols, sel_cond2=None, sel_val2=None):

    # read the results table and select the right rows
    res_tab = Table.read(statsfile, format='ascii.commented_header')
    if sel_cond2 != None:
        sel_cond = np.logical_and(res_tab['clustering']== cluster_alg, res_tab[sel_cond2]==sel_val2)
    else:
        sel_cond = res_tab['clustering']== cluster_alg
    res_subtab = res_tab[sel_cond][oldcols]

    # figure out the number of clusters
    if sel_cond2 == None:
        ncl = res_subtab['n_clust'].data
        ncl_list = np.unique(ncl).tolist()
    else:
        ncl_list = []
        
    # rename some columns to get rid of underscores
    for i,col in enumerate(res_subtab.colnames):
        if col != newcols[i]:
            res_subtab.rename_column(col,newcols[i])

    # write the table to a string that pylatex can use
    tmptex = StringIO()
    res_subtab.write(tmptex,format='latex',latexdict = {'tablealign': 'h'})
    tmpstr = NoEscape(tmptex.getvalue())
    tmptex.close()
    return(tmpstr, ncl_list)
Example #28
0
    def import_from_aster(cls, loc):
        """Imports a spectral library from the ASTER Spectral Library (http://speclib.jpl.nasa.gov/)

        Arguments:

          * ``loc`` -- Location of the data to import. Can either be a URL (eg. http://speclib.jpl.nasa.gov/speclibdata/jhu.becknic.vegetation.trees.conifers.solid.conifer.spectrum.txt) or a file path.

        Returns:

        An ``ndarray`` with two columns: wavelength (um) and reflectance (fraction)

        Example usage::

           from Py6S import *
           s = SixS()
           s.ground_reflectance = GroundReflectance.HomogeneousLambertian(Spectra.import_from_aster("http://speclib.jpl.nasa.gov/speclibdata/jhu.becknic.vegetation.trees.conifers.solid.conifer.spectrum.txt"))
           s.run()
           # Bear in mind this will produce a result for a single Wavelength
           # To see what the whole spectrum will look like after atmospheric
           # radiative transfer has taken place you must run for multiple wavelengths
           # For example
           wavelengths, reflectances = SixSHelpers.Wavelengths.run_vnir(s, output_name="apparent_radiance")

        """
        if loc.startswith("""http://"""):
            data = urllib.request.urlopen(loc).read()
            f = StringIO(data)
        else:
            f = open(loc, "r")

        npdata = np.loadtxt(f, skiprows=26)
        f.close()
        npdata[:, 1] = npdata[:, 1] / 100
        return npdata
Example #29
0
def xmlparser(xml, objectify=True):
    """ Parse xml

    :param xml: XML element
    :type xml: Union[text_type, lxml.etree._Element]
    :rtype: lxml.etree._Element
    :returns: An element object
    :raises: TypeError if element is not in accepted type

    """
    doclose = None
    if isinstance(xml, (etree._Element, ObjectifiedElement, etree._ElementTree)):
        return xml
    elif isinstance(xml, text_type):
        xml = StringIO(xml)
        doclose = True
    elif not isinstance(xml, IOBase):
        raise TypeError("Unsupported type of resource {}".format(type(xml)))

    if objectify:
        parsed = etree.parse(xml).getroot()
    else:
        parsed = parse(xml, parser=__parser__)
    if doclose:
        xml.close()
    return parsed
Example #30
0
 def __init__(self):
     resource = os.path.join('data', 'jeol_transition_data.csv')
     fileobj = pkg_resources.resource_stream('pyxray', resource) #@UndefinedVariable
     buffer = StringIO(fileobj.read().decode('ascii'))
     _BaseTransitionDatabase.__init__(self, buffer)
     fileobj.close()
     buffer.close()
Example #31
0
    def parse_known_args(self, args = None, namespace = None,
                         config_file_contents = None, env_vars = os.environ):
        """Supports all the same args as the ArgumentParser.parse_args(..),
        as well as the following additional args.

        Additional Args:
            args: a list of args as in argparse, or a string (eg. "-x -y bla")
            config_file_contents: String. Used for testing.
            env_vars: Dictionary. Used for testing.
        """
        if args is None:
            args = sys.argv[1:]
        elif isinstance(args, str):
            args = args.split()
        else:
            args = list(args)

        # normalize args by converting args like --key=value to --key value
        normalized_args = list()
        for arg in args:
            if arg and arg[0] in self.prefix_chars and '=' in arg:
                key, value =  arg.split('=', 1)
                normalized_args.append(key)
                normalized_args.append(value)
            else:
                normalized_args.append(arg)
        args = normalized_args

        for a in self._actions:
            a.is_positional_arg = not a.option_strings

        # maps a string describing the source (eg. env var) to a settings dict
        # to keep track of where values came from (used by print_values()).
        # The settings dicts for env vars and config files will then map
        # the config key to an (argparse Action obj, string value) 2-tuple.
        self._source_to_settings = OrderedDict()
        if args:
            a_v_pair = (None, list(args))  # copy args list to isolate changes
            self._source_to_settings[_COMMAND_LINE_SOURCE_KEY] = {'': a_v_pair}

        # handle auto_env_var_prefix __init__ arg by setting a.env_var as needed
        if self._auto_env_var_prefix is not None:
            for a in self._actions:
                config_file_keys = self.get_possible_config_keys(a)
                if config_file_keys and not (a.env_var or a.is_positional_arg
                    or a.is_config_file_arg or a.is_write_out_config_file_arg or
                    isinstance(a, argparse._VersionAction) or
                    isinstance(a, argparse._HelpAction)):
                    stripped_config_file_key = config_file_keys[0].strip(
                        self.prefix_chars)
                    a.env_var = (self._auto_env_var_prefix +
                                 stripped_config_file_key).replace('-', '_').upper()

        # add env var settings to the commandline that aren't there already
        env_var_args = []
        nargs = False
        actions_with_env_var_values = [a for a in self._actions
            if not a.is_positional_arg and a.env_var and a.env_var in env_vars
                and not already_on_command_line(args, a.option_strings)]
        for action in actions_with_env_var_values:
            key = action.env_var
            value = env_vars[key]
            # Make list-string into list.
            if action.nargs or isinstance(action, argparse._AppendAction):
                nargs = True
                element_capture = re.match('\[(.*)\]', value)
                if element_capture:
                    value = [val.strip() for val in element_capture.group(1).split(',') if val.strip()]
            env_var_args += self.convert_item_to_command_line_arg(
                action, key, value)

        if nargs:
            args = args + env_var_args
        else:
            args = env_var_args + args

        if env_var_args:
            self._source_to_settings[_ENV_VAR_SOURCE_KEY] = OrderedDict(
                [(a.env_var, (a, env_vars[a.env_var]))
                    for a in actions_with_env_var_values])

        # before parsing any config files, check if -h was specified.
        supports_help_arg = any(
            a for a in self._actions if isinstance(a, argparse._HelpAction))
        skip_config_file_parsing = supports_help_arg and (
            "-h" in args or "--help" in args)

        # prepare for reading config file(s)
        known_config_keys = dict((config_key, action) for action in self._actions
            for config_key in self.get_possible_config_keys(action))

        # open the config file(s)
        config_streams = []
        if config_file_contents is not None:
            stream = StringIO(config_file_contents)
            stream.name = "method arg"
            config_streams = [stream]
        elif not skip_config_file_parsing:
            config_streams = self._open_config_files(args)

        # parse each config file
        for stream in reversed(config_streams):
            try:
                config_items = self._config_file_parser.parse(stream)
            except ConfigFileParserException as e:
                self.error(e)
            finally:
                if hasattr(stream, "close"):
                    stream.close()

            # add each config item to the commandline unless it's there already
            config_args = []
            nargs = False
            for key, value in config_items.items():
                if key in known_config_keys:
                    action = known_config_keys[key]
                    discard_this_key = already_on_command_line(
                        args, action.option_strings)
                else:
                    action = None
                    discard_this_key = self._ignore_unknown_config_file_keys or \
                        already_on_command_line(
                            args,
                            [self.get_command_line_key_for_unknown_config_file_setting(key)])

                if not discard_this_key:
                    config_args += self.convert_item_to_command_line_arg(
                        action, key, value)
                    source_key = "%s|%s" %(_CONFIG_FILE_SOURCE_KEY, stream.name)
                    if source_key not in self._source_to_settings:
                        self._source_to_settings[source_key] = OrderedDict()
                    self._source_to_settings[source_key][key] = (action, value)
                    if (action and action.nargs or
                        isinstance(action, argparse._AppendAction)):
                        nargs = True

            if nargs:
                args = args + config_args
            else:
                args = config_args + args

        # save default settings for use by print_values()
        default_settings = OrderedDict()
        for action in self._actions:
            cares_about_default_value = (not action.is_positional_arg or
                action.nargs in [OPTIONAL, ZERO_OR_MORE])
            if (already_on_command_line(args, action.option_strings) or
                    not cares_about_default_value or
                    action.default is None or
                    action.default == SUPPRESS or
                    isinstance(action, ACTION_TYPES_THAT_DONT_NEED_A_VALUE)):
                continue
            else:
                if action.option_strings:
                    key = action.option_strings[-1]
                else:
                    key = action.dest
                default_settings[key] = (action, str(action.default))

        if default_settings:
            self._source_to_settings[_DEFAULTS_SOURCE_KEY] = default_settings

        # parse all args (including commandline, config file, and env var)
        namespace, unknown_args = argparse.ArgumentParser.parse_known_args(
            self, args=args, namespace=namespace)
        # handle any args that have is_write_out_config_file_arg set to true
        # check if the user specified this arg on the commandline
        output_file_paths = [getattr(namespace, a.dest, None) for a in self._actions
                             if getattr(a, "is_write_out_config_file_arg", False)]
        output_file_paths = [a for a in output_file_paths if a is not None]
        self.write_config_file(namespace, output_file_paths, exit_after=True)
        return namespace, unknown_args
Example #32
0
    def write_html(self,
                   xml_path,
                   html_path,
                   json_path,
                   write_append='w',
                   debug_liv='0'):
        """fa il parse del file xml_path scrive i files:
            nel formato comapatto: <html_path>
            formato indentato <html_name>_f.html
        Args:
            xml_path (str]:  file xml
            html_path (str): file html
            json_path (str): file di configurazoine
            write_append(str): modalità output
            deb (bool, optional): flag per gestione debuf
        Returns:
            html_path (str): filr name html 
        """
        try:
            inp.set_liv(debug_liv)
            self.x_data_lst = []
            self.xml_path = xml_path
            self.html_path = html_path
            if write_append not in ['w', 'a']:
                raise Exception(
                    f"ERROR in output write/append. {write_append}")
            # lettur a file configurazione
            self.read_conf(json_path)
            # lib per costruziona html
            self.hb = HtmlBuilder()
            # dict dei dati xml con tag come key
            self.x_data_dict = {}
            # stack dei nodi che sono si/no container
            self.is_container_stack = [False for i in range(1, 20)]
            # tag per controlo ERRORi
            self.csv_tag_ctrl = ""
            #
            self.hb.init()
            try:
                parser = etree.XMLParser(ns_clean=True)
                xml_root = etree.parse(self.xml_path, parser)
            except Exception as e:
                logerr.log("ERROR teixml2html.py write_html() parse_xml")
                logerr.log(e)
                sys.exit(1)
            for nd in xml_root.iter():
                self.html_append(nd)
            self.hb.del_tags('XXX')
            self.hb.end()
            """gestisce il settaggio degli overflow
            modifica il parametro html_lst
            Returns:
                str: html modificato
            """
            html_rows = self.hb.get_tag_lst()
            html_over = HtmlOvweflow(self.x_data_lst, html_rows,
                                     self.html_tag_cfg)
            html_over.set_overflow()
            # controllo dei parametri %par% non settati
            self.check_tml()
            # html su una riga versione per produzione
            html = self.hb.html_onerow()
            html = self.set_html_pramas(html)
            fu.make_dir_of_file(self.html_path)
            with open(self.html_path, write_append) as f:
                f.write(html)
            fu.chmod(self.html_path)
        except Exception as e:
            logerr.log("ERROR teixml2html.py write_html()")
            logerr.log(e)
            ou = StringIO()
            traceback.print_exc(file=ou)
            st = ou.getvalue()
            ou.close()
            logerr.log(st)

            sys.exit(1)
        return self.html_path
Example #33
0
class IndentingBlockPrinter(object):
    def __init__(self, line_length=LINE_LENGTH, indent_level=4, current_indent=0):
        assert isinstance(current_indent, int)
        assert isinstance(indent_level, int)
        assert isinstance(indent_level, int)
        self.buffer = StringIO()
        self.line_length = line_length
        self.current_indent = current_indent
        self.indent_level = indent_level
        self.printer = lambda x: self.buffer.write(x + '\n')

        self._line_so_far = ''

    def append(self, text):
        assert isinstance(text, STRING_TYPES)
        self._line_so_far += text

    def line(self, text):
        assert isinstance(text, STRING_TYPES)
        self.printer(self.current_indent_str + self._line_so_far + text)
        self._line_so_far = ''

    @property
    def current_indent_str(self):
        return ' ' * self.current_indent

    def blank_line(self):
        assert not self._line_so_far, 'Cannot throw away appended strings by calling blank_line'
        self.printer('')

    def increase_indent(self):
        self.current_indent += self.indent_level

    def decrease_indent(self):
        if self.indent_level and self.current_indent <= 0:
            raise Exception('indent cannot be negative')
        self.current_indent -= self.indent_level

    @contextmanager
    def with_indent(self, text=None):
        if text is not None:
            self.line(text)
        self.increase_indent()
        yield
        self.decrease_indent()

    def __enter__(self):
        return self

    def __exit__(self, _exception_type, _exception_value, _traceback):
        self.buffer.close()

    def block(self, text, prefix=''):
        '''Automagically wrap a block of text.'''
        assert isinstance(text, STRING_TYPES)
        wrapper = TextWrapper(
            width=self.line_length - len(self.current_indent_str),
            initial_indent=prefix,
            subsequent_indent=prefix,
            break_long_words=False,
            break_on_hyphens=False,
        )
        for line in wrapper.wrap(text):
            self.line(line)

    def comment(self, text):
        assert isinstance(text, STRING_TYPES)
        self.block(text, prefix='# ')

    def read(self):
        '''Get the value of the backing StringIO.'''
        return self.buffer.getvalue()
Example #34
0
def resize(image, dims, crop=False):
    """Resize

	Given raw data and a size, a new image is created and returned as raw data

	Arguments:
		image (str): Raw image data to be loaded and resized
		dims (str|dict): New dimensions of the image, "WWWxHHH" or {"w":, "h":}
		crop (bool): Set to true to crop the photo rather than add whitespace

	Returns:
		str
	"""

    # Check the dimensions
    if not isinstance(dims, dict):
        if isinstance(dims, str):
            l = [i for i in size.split('x')]
            dims = {"w": l[0], "h": l[1]}
        else:
            raise ValueError('dims')

    # Load the image into a new StringIO
    sImg = StringIO(image)
    sNewImg = StringIO('')

    # Create a new Pillow instance from the raw data
    oImg = Pillow.open(sImg)

    # Make sure the values are ints
    dims['w'] = int(dims['w'])
    dims['h'] = int(dims['h'])

    # Create a new blank image
    oNewImg = Pillow.new(oImg.mode, [dims['w'], dims['h']],
                         (255, 255, 255, 255))

    # If we are cropping
    if crop:
        dResize = Resize.crop(oImg.width, oImg.height, dims['w'], dims['h'])

    # Else, we are fitting
    else:
        dResize = Resize.fit(oImg.width, oImg.height, dims['w'], dims['h'])

    # Resize the image
    oImg.thumbnail([dResize['w'], dResize['h']], Pillow.ANTIALIAS)

    # Get the offsets
    lOffset = ((dims['w'] - dResize['w']) / 2, (dims['h'] - dResize['h']) / 2)

    # Paste the resized image onto the new canvas
    oNewImg.paste(oImg, lOffset)

    # Save the new image to a StringIO
    oNewImg.save(sNewImg, oImg.format)

    # Pull out the raw string
    sReturn = sNewImg.getvalue()

    # Cleanup
    oNewImg.close()
    oImg.close()
    sImg.close()

    # Return the new string
    return sReturn
Example #35
0
 def ImageData(self, image_data, x, y):
     file_str = StringIO(str(image_data))
     img_surface = g15cairo.load_surface_from_file(file_str, None)
     file_str.close()
     self._page.image(img_surface, x, y)
Example #36
0
 def f(file_name, revision):
     fp = StringIO()
     r.print_info(file_name, revision, output='json', fp=fp)
     ret = Artifact.from_dict(json.loads(fp.getvalue()))
     fp.close()
     return ret
Example #37
0
 def f(file_name, revision):
     fp = StringIO()
     r.print_info(file_name, revision, fp=fp)
     ret = fp.getvalue()
     fp.close()
     return ret
Example #38
0
def moderatesignups():
    global commentHashesAndComments
    commentHashesAndComments = {}
    stringio = StringIO()
    stringio.write('<html>\n<head>\n</head>\n\n')

    # redditSession = loginAndReturnRedditSession()
    redditSession = loginOAuthAndReturnRedditSession()
    submissions = getSubmissionsForRedditSession(redditSession)
    flat_comments = getCommentsForSubmissions(submissions)
    retiredHashes = retiredCommentHashes()
    i = 1
    stringio.write(
        '<iframe name="invisibleiframe" style="display:none;"></iframe>\n')
    stringio.write("<h3>")
    stringio.write(os.getcwd())
    stringio.write("<br>\n")
    for submission in submissions:
        stringio.write(submission.title)
        stringio.write("<br>\n")
    stringio.write("</h3>\n\n")
    stringio.write(
        '<form action="copydisplayduringsignuptoclipboard.html" method="post" target="invisibleiframe">'
    )
    stringio.write(
        '<input type="submit" value="Copy display-during-signup.py stdout to clipboard">'
    )
    stringio.write('</form>')
    for comment in flat_comments:
        # print comment.is_root
        # print comment.score
        i += 1
        commentHash = sha1()
        commentHash.update(comment.fullname.encode('utf-8'))
        commentHash.update(comment.body.encode('utf-8'))
        commentHash = commentHash.hexdigest()
        if commentHash not in retiredHashes:
            commentHashesAndComments[commentHash] = comment
            authorName = str(
                comment.author
            )  # can be None if author was deleted.  So check for that and skip if it's None.
            stringio.write("<hr>\n")
            stringio.write('<font color="blue"><b>')
            stringio.write(
                authorName
            )  # can be None if author was deleted.  So check for that and skip if it's None.
            stringio.write('</b></font><br>')
            if ParticipantCollection().hasParticipantNamed(authorName):
                stringio.write(
                    ' <small><font color="green">(member)</font></small>')
                # if ParticipantCollection().participantNamed(authorName).isStillIn:
                #    stringio.write(' <small><font color="green">(in)</font></small>')
                # else:
                #    stringio.write(' <small><font color="red">(out)</font></small>')
            else:
                stringio.write(
                    ' <small><font color="red">(not a member)</font></small>')
            stringio.write(
                '<form action="takeaction.html" method="post" target="invisibleiframe">'
            )
            stringio.write(
                '<input type="submit" name="actiontotake" value="Signup" style="color:white;background-color:green">'
            )
            # stringio.write('<input type="submit" name="actiontotake" value="Signup and checkin">')
            # stringio.write('<input type="submit" name="actiontotake" value="Relapse">')
            # stringio.write('<input type="submit" name="actiontotake" value="Reinstate">')
            stringio.write(
                '<input type="submit" name="actiontotake" value="Skip comment">'
            )
            stringio.write(
                '<input type="submit" name="actiontotake" value="Skip comment and don\'t upvote">'
            )
            stringio.write(
                '<input type="hidden" name="username" value="' +
                b64encode(authorName.encode('utf-8')).decode('utf-8') + '">')
            stringio.write('<input type="hidden" name="commenthash" value="' +
                           commentHash + '">')
            # stringio.write('<input type="hidden" name="commentpermalink" value="' + comment.permalink + '">')
            stringio.write('</form>')
            # stringio.write(bleach.clean(markdown.markdown(comment.body.encode('utf-8')), tags=['p']))
            stringio.write(
                bleach.clean(markdown.markdown(comment.body), tags=['p']))
            stringio.write("\n<br><br>\n\n")

    stringio.write('</html>')
    pageString = stringio.getvalue()
    stringio.close()
    return Response(pageString, mimetype='text/html')
Example #39
0
def write_db(conn, tbname, file):
	'''
	:function:
		- Write xyz data into a database
	:param conn:
		- The connection object
	:param tbname:
		- The table name
	:param file:
		- The input file
	'''	
	if conn is not None:
		try:
			cur = conn.cursor()

			# Method-I: COPY -- copy data between a file and a table
			# cur.execute("\COPY " + tbname + "(x, y, z, objID) FROM '" + file + "' DELIMITER ' ';")

			# Method-II: Read .xyz file line by line
			xyzset = set()		# store xyz in each line from raw data file
			# xyzlist = []
			xMin, yMin, zMin = None, None, None
			xMax, yMax, zMax = None, None, None
			
			with open(file, mode='r', encoding='utf-8') as f:
				while(True):
					line = f.readline().strip()
					if not line:
						break
					x, y, z = int(line.split()[0]), int(line.split()[1]), int(line.split()[2])
					# objID, buildID = int(line.split()[3]), int(line.split()[4])
					buildID = int(line.split()[3])
					# cur.execute("INSERT INTO " + tbname + 
					# 	" (x, y, z, objID) VALUES({0}, {1}, {2}, {3})".format(int(x), int(y), int(z), int(objID)))
					xyzset.add(tuple(int(i) for i in line.split()))
					# xyzlist.append([x,y,z,objID])
					if xMin is None:
						xMin, yMin, zMin = x, y, z
						xMax, yMax, zMax = x, y, z
					if x < xMin:
						xMin = x
					if y < yMin:
						yMin = y
					if z < zMin:
						zMin = z
					if x > xMax:
						xMax = x
					if y > yMax:
						yMax = y
					if z > zMax:
						zMax = z

			depth = 8
			translateX, translateY, translateZ, scale = translation_and_scaling(xMin, xMax, yMin, yMax, zMin, zMax, depth)
			idx = 0
			stringVoxels =  StringIO()
			w = csv.writer(stringVoxels)
			data = []

			for line in xyzset:
				xRaw, yRaw, zRaw = line[0], line[1], line[2]
				buildID = line[3]
				# Scaling coordinates.
				x = (xRaw + translateX) * scale
				y = (yRaw + translateY) * scale
				z = (zRaw + translateZ) * scale
				# Snap point to leaf node by converting float to integer and truncate towards 0.
				leafNode = (int(x), int(y), int(z))
				# Retrieve material path from box.
				octPath = get_material_path(depth, leafNode[0], leafNode[1], leafNode[2])

				voxel_list = [xRaw, yRaw, zRaw, buildID, octPath]
				data.append(voxel_list)
			
				# Flush data to file every 100,000 records
				if idx % 100000 == 0 and idx > 0:
					w.writerows(data)
					stringVoxels.seek(0)
					cur.copy_from(stringVoxels, tbname, sep=',', columns=('x', 'y', 'z', 'buildID', 'octPath'))
					stringVoxels.close()
					stringVoxels = StringIO()
					w = csv.writer(stringVoxels)
					data = []
					print(idx, "voxels written")

				idx += 1

			stringVoxels.seek(0)
			cur.copy_from(stringVoxels, tbname, sep=',', columns=('x', 'y', 'z', 'buildID', 'octPath'))
			conn.commit()
			print(idx, "voxels written")			
		except (Exception, psycopg2.DatabaseError) as error:
			print(error)
		finally:
			cur.close()
			conn.close()
			stringVoxels.close()
			print('Database connection closed.')
Example #40
0
                            try:
                                subprocess.Popen(args)
                            except OSError:
                                print('command not found.')
                        elif u('チャンネルダウン') in word:
                            print(word)
                            args = ['irsend', '-#', '1', 'SEND_ONCE', 'TV', 'cdown']
                            try:
                                subprocess.Popen(args)
                            except OSError:
                                print('command not found.')
                        elif u('チャンネルアップ') in word:
                            print(word)
                            args = ['irsend', '-#', '1', 'SEND_ONCE', 'TV', 'cup']
                            try:
                                subprocess.Popen(args)
                            except OSError:
                                print('command not found.')

            buff.close()
            buff = StringIO(u(''))
            if lines[len(lines)-1] != '.':
            	buff.write(lines[len(lines)-1])

except socket.error:
    print('socket error')
except KeyboardInterrupt:
    pass

sock.close()
def setup_save_mock_open(mock_open):
    mock_file = StringIO()
    mock_file.close = lambda: None
    mock_open.side_effect = lambda *args: mock_file
    return mock_file
Example #42
0
    def build(self):
        """Generate the settings dictionary from the FLAME lattice file."""
        try:
            if isinstance(self._latpath, str):
                # latpath content
                fp = StringIO(self._latpath)
                conf = OrderedDict(GLPSParser().parse(fp))
                fp.close()
        except:
            if not os.path.isfile(self._latpath):
                raise RuntimeError("SettingsFactory: FLAME lattice file not found: {}".format(self._latpath))

            with open(self._latpath, "rb") as fp:
                conf = OrderedDict(GLPSParser().parse(fp))


        settings = Settings()

        if 'elements' not in conf:
            return settings

        def parseName(cname):
            # Too FRIB specific!
            try:
                parts = re.split("[_:]", cname)
                name = parts[0] + "_" + parts[1] + ":" + parts[2] + "_" + parts[3]
            except IndexError:
                return None
            else:
                return name.upper()

        _start = False
        _end = False
        for celem in conf['elements']:
            elem = OrderedDict(celem)
            ftype = elem['type'].lower()
            name = parseName(elem['name'])

            if name is None:
                continue

            if not _start and name != self._start:
                continue
            else:
                _start = True

            if _end:
                break

            if name == self._end:
                _end = True

            if ftype in ['source', 'drift', 'marker']:
                continue

            elif ftype in ['bpm', 'stripper']:
                continue
                # name = parseName(elem['name'])
                # settings[name] = OrderedDict()

            elif ftype == "rfcavity":
                cav = CavityElement(0, 0, 0, name)
                fields = OrderedDict()
                fields[cav.fields.phase_phy] = elem['phi']
                fields[cav.fields.amplitude_phy] = elem['scl_fac']
                fields[cav.fields.frequency] = elem['f']
                settings[name] = fields

            elif ftype == "solenoid":
                sol = SolElement(0, 0, 0, name)
                fields = OrderedDict()
                fields[sol.fields.field_phy] = elem['B']
                settings[name] = fields

            elif ftype == "orbtrim":
                fields = OrderedDict()
                if 'theta_x' in elem:
                    cor = HCorElement(0, 0, 0, name)
                    fields[cor.fields.angle_phy] = elem['theta_x']
                elif 'theta_y' in elem:
                    cor = VCorElement(0, 0, 0, name)
                    fields[cor.fields.angle_phy] = elem['theta_y']
                else:
                    cor = HCorElement(0, 0, 0, name)
                    fields[cor.fields.angle_phy] = 0.0
                settings[name] = fields

            elif ftype == "sbend":
                bend = BendElement(0, 0, 0, name)
                if name not in settings:
                    fields = OrderedDict()
                    fields[bend.fields.field_phy] = elem.get('bg', 0)  # HdipoleFitMode must be nonzero
                    fields[bend.fields.angle] = elem['phi']
                    fields[bend.fields.entrAngle] = elem['phi1']
                    fields[bend.fields.exitAngle] = elem['phi2']
                    settings[name] = fields
                else:
                    settings[name][bend.fields.angle] += elem['phi']
                    if elem['phi2'] != 0.0:
                        if settings[name][bend.fields.exitAngle] == 0.0:
                            settings[name][bend.fields.exitAngle] = elem['phi2']
                        else:
                            raise RuntimeError("Bend element exit angle already defined")

            elif ftype == "quadrupole":
                quad = QuadElement(0, 0, 0, name)
                fields = OrderedDict()
                fields[quad.fields.gradient_phy] = elem['B2']
                settings[name] = fields

            elif ftype == "equad":
                equad = EQuadElement(0, 0, 0, name)
                fields = OrderedDict()
                fields[equad.fields.gradient_phy] = elem['V']
                settings[name] = fields

            elif ftype == "edipole":
                ebend = EBendElement(0, 0, 0, name)
                fields = OrderedDict()
                fields[ebend.fields.field_phy] = elem['beta']
                settings[name] = fields

            elif ftype == "sextupole":
                sextupole = SextElement(0, 0, 0, name)
                fields = OrderedDict()
                fields[sextupole.fields.field_phy] = elem['B3']
                settings[name] = fields

            else:
                raise RuntimeError("Flame element type not supported: " + elem['type'])

        return settings
Example #43
0
class CensorshipLinter():
    """Run pylint linter and modify it's output."""
    def __init__(self, config):
        """Create CenshoreshipLinter class.

        :param config: configuration class for this Linter
        :type config: CensorshipConfig class instance
        """
        self._stdout = StringIO()
        self._config = config

    def run(self):
        """Run the pylint static linter.

        :return: return code of the linter run
        :rtype: int
        """
        args = self._prepare_args()

        print("Pylint version: ", pylint.__version__)
        print("Running pylint with args: ", args)

        pylint.lint.Run(args,
                        reporter=TextReporter(self._stdout),
                        do_exit=False)

        return self._process_output()

    def _prepare_args(self):
        args = []

        if self._config.command_line_args:
            args = self._config.command_line_args

        if self._config.pylintrc_path and "--rcfile" not in args:
            args.append("--rcfile")
            args.append(self._config.pylintrc_path)

        args.extend(self._config.check_paths)

        return args

    def _filter_false_positives(self, lines):
        if not self._config.false_positives:
            return lines

        lines = lines.split("\n")

        temp_line = ""
        retval = []

        for line in lines:

            # This is not an error message.  Ignore it.
            if line.startswith("Using config file"):
                retval.append(line)
            elif not line.strip():
                retval.append(line)
            elif line.startswith("*****"):
                temp_line = line
            else:
                if self._check_false_positive(line):
                    if temp_line:
                        retval.append(temp_line)
                        temp_line = ""

                    retval.append(line)

        return "\n".join(retval)

    def _check_false_positive(self, line):
        valid_error = True

        for regex in self._config.false_positives:
            if re.search(regex.regex, line):
                # The false positive was hit, so record that and ignore
                # the message from pylint.
                regex.used += 1
                valid_error = False
                break

        # If any false positive matched the error message, it's a valid
        # error from pylint.
        return valid_error

    def _report_unused_false_positives(self):
        unused = []

        for fp in self._config.false_positives:
            if fp.used == 0:
                unused.append(fp.regex)

        if unused:
            print("************* Unused False Positives Found:")

            for fp in unused:
                print(fp)

    def _process_output(self):
        stdout = self._stdout.getvalue()
        self._stdout.close()

        rc = 0

        if stdout:
            filtered_stdout = self._filter_false_positives(stdout)
            if filtered_stdout:
                print(filtered_stdout)
                sys.stdout.flush()
                rc = 1

        self._report_unused_false_positives()

        return rc
Example #44
0
def create_mapping_configuration_file(graphid,
                                      include_concepts=True,
                                      data_dir=None):
    files_for_export = []
    graphid = uuid.UUID(graphid)
    nodes = []
    values = {}
    export_json = OrderedDict()
    if graphid != False:
        if graphid is None or graphid == "all" or graphid == [""]:
            node_query = (Node.objects.filter(
                graph_id__isresource=True).exclude(
                    pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).order_by(
                        "name"))
        else:
            node_query = Node.objects.filter(graph_id=graphid).exclude(
                datatype="semantic").order_by("name")

        export_json["resource_model_id"] = str(node_query[0].graph_id)
        export_json["resource_model_name"] = JSONSerializer(
        ).serializeToPython(
            Graph.objects.filter(
                graphid=export_json["resource_model_id"]))[0]["name"]
        export_json["nodes"] = []
        file_name_prefix = export_json["resource_model_name"]

        for node in node_query:
            export_node = OrderedDict()
            export_node["arches_nodeid"] = str(node.nodeid)
            export_node["arches_node_name"] = node.name
            export_node["file_field_name"] = node.name
            export_node["data_type"] = node.datatype
            if node.datatype in [
                    "concept", "concept-list", "domain-value",
                    "domain-value-list"
            ]:
                export_node["concept_export_value"] = "label"
            # export_node['value_type'] = ""
            # export_node['data_length'] = ""
            export_node["export"] = False

            export_json["nodes"].append(export_node)

            if include_concepts == True:
                concept_export = {}

                def get_values(concept, values):
                    for subconcept in concept.subconcepts:
                        for value in subconcept.values:
                            if value.type == "prefLabel":
                                values[value.id] = value.value
                        get_values(subconcept, values)
                    return values

                if node.datatype in [
                        "concept", "concept-list", "domain-value",
                        "domain-value-list"
                ]:
                    if node.datatype in ["concept", "concept-list"]:
                        if node.config["rdmCollection"] is not None:
                            rdmCollection = node.config["rdmCollection"]
                        try:
                            concept = Concept().get(
                                node.config["rdmCollection"],
                                include_subconcepts=True,
                                semantic=False)
                            rdmCollectionLabel = concept.get_preflabel().value
                            collection_values = {}
                            concepts = OrderedDict(
                                sorted(list(
                                    get_values(concept,
                                               collection_values).items()),
                                       key=itemgetter(1)))
                            values[rdmCollectionLabel] = concepts
                        except:
                            values[
                                node.
                                name] = node.name + " does not appear to be configured with a valid concept collectionid"
                    elif node.datatype in [
                            "domain-value", "domain-value-list"
                    ]:
                        concepts = {}
                        if node.config["options"]:
                            for concept in node.config["options"]:
                                concepts[concept["id"]] = concept["text"]

                        values[node.name] = OrderedDict(
                            sorted(list(concepts.items()), key=itemgetter(1)))

        if include_concepts == True:
            try:
                relation_concepts = OrderedDict(
                    sorted(
                        list(
                            get_values(
                                Concept().get(
                                    "00000000-0000-0000-0000-000000000005",
                                    include_subconcepts=True,
                                    semantic=False), {}).items()),
                        key=itemgetter(1),
                    ))
            except:
                relations_concepts = "You do not appear to have values for resource to resource relationships in your rdm."
            values[
                "Resource to Resource Relationship Types"] = relation_concepts

    # Concept lookup file
    if include_concepts == True:
        file_name = os.path.join("{0}_{1}.{2}".format(file_name_prefix,
                                                      "concepts", "json"))
        dest = StringIO()
        dest.write(json.dumps(values, indent=4))
        files_for_export.append({"name": file_name, "outputfile": dest})

    # Import/Export mapping file
    file_name = os.path.join("{0}.{1}".format(file_name_prefix, "mapping"))
    dest = StringIO()
    dest.write(json.dumps(export_json, indent=4))
    files_for_export.append({"name": file_name, "outputfile": dest})

    if data_dir is not None:
        with open(os.path.join(data_dir), "w") as config_file:
            json.dump(export_json, config_file, indent=4)

        file_name = Graph.objects.get(graphid=graphid).name
        buffer = StringIO()
        with zipfile.ZipFile(buffer, "w", zipfile.ZIP_DEFLATED) as zip:
            for f in files_for_export:
                f["outputfile"].seek(0)
                zip.writestr(f["name"], f["outputfile"].read())

        zip.close()
        buffer.flush()
        zip_stream = buffer.getvalue()
        buffer.close()
        with open(os.path.join(data_dir), "w") as archive:
            archive.write(zip_stream)
    else:
        return files_for_export
Example #45
0
    print('Project path: {}\n'.format(project_path))
    print('Creating single header for itsy.bitsy ({})'.format(
        processed_files[0]))

ss = StringIO()
ss.write(
    intro.format(time=dt.datetime.utcnow(),
                 revision=revision,
                 version=version,
                 guard=include_guard))
for processed_file in processed_files:
    process_file(processed_file, ss)

ss.write('#endif // {}\n'.format(include_guard))
result = ss.getvalue()
ss.close()

if not args.quiet:
    print('finished creating single header for itsy.bitsy\n')

if not args.quiet:
    print('Creating single forward declaration header for itsy.bitsy ({})'.
          format(forward_processed_files[0]))

forward_ss = StringIO()
forward_ss.write(
    intro.format(time=dt.datetime.utcnow(),
                 revision=revision,
                 version=version,
                 guard=forward_include_guard))
for forward_processed_file in forward_processed_files:
Example #46
0
        if os.path.isfile(x) and os.path.splitext(x)[1] == '.py'
    ]  #列出所有的.py文件

#序列化: 变量从内存(程序中使用的)中变成可存储或传输(文件)的过程
#序列化之后,就可以把序列化后的内容写入磁盘,或者通过网络传输到别的机器上。
#从文件中读取内容,变为变量放入内存称为反序列化
if False:
    #Python提供了pickle模块来实现序列化和反序列化。(不通用,只能用于python,无法和其他语言交互,想交互要用json)
    #序列化
    #主要步骤是引入pickle模块,然后调用dumps方法序列化为bytes,然后把bypes写入文件,或者直接调用dump方法,直接序列化为bytes并写入文件
    import pickle
    d = dict(name="bob", age=20, score=88)
    pickle.dumps(d)  #pickle.dumps()方法把任意对象序列化成一个bytes,然后,就可以把这个bytes写入文件
    f = open('dump.txt', 'wb')
    pickle.dump(d, f)  #pickle.dump()直接把对象序列化后写入一个file-like Object
    f.close()
    # 反序列化:
    # 当我们要把对象从磁盘读到内存时,可以先把内容读到一个bytes,然后用pickle.loads()方法反序列化出对象
    # 也可以直接用pickle.load()方法从一个file-like Object中直接反序列化出对象。
    f = open('dump.txt', 'rb')
    d = pickle.load(f)
    f.close()

    # 不同语言间的通用格式json的序列化和反序列化
    # json和python的数据类型对应: {}--dict  []---list  "string"--str  123.456---int或float  true--True  null---None

    # python内置对象和json的序列化和反序列化
    # 序列化:
    # 依旧是通过dumps方法序列化为json字符串,或者dump方法直接序列化为json字符串并写入文件
    import json
    d = dict(name="bob", age=20, score=33)
Example #47
0
    def emailException(self, htmlErrMsg):
        """Email the exception.

        Send the exception via mail, either as an attachment,
        or as the body of the mail.
        """
        message = Message()

        # Construct the message headers
        headers = self.setting('ErrorEmailHeaders').copy()
        headers['Date'] = formatdate()
        headers['Mime-Version'] = '1.0'
        headers['Subject'] = '{} {}: {}'.format(
            headers.get('Subject', '[Webware Error]'), *sys.exc_info()[:2])
        for header, value in headers.items():
            if isinstance(value, (list, tuple)):
                value = ','.join(value)
            message.add_header(header, value)

        # Construct the message body
        if self.setting('EmailErrorReportAsAttachment'):
            # start off with a text/plain part
            text = self._emailBody.format(
                path=self.servletPathname(),
                ascTime=asclocaltime(self._time), time=self._time)
            message.set_type('multipart/mixed')
            part = Message()
            part.set_type('text/plain')
            body = StringIO()
            body.write(text)
            traceback.print_exc(file=body)
            part.set_payload(body.getvalue())
            body.close()
            message.attach(part)
            part = Message()
            # now add htmlErrMsg
            part.add_header('Content-Transfer-Encoding', '7bit')
            part.add_header(
                'Content-Description',
                'HTML version of Webware error message')
            part.add_header(
                'Content-Disposition',
                'attachment', filename='WebwareErrorMsg.html')
            part.set_type('text/html')
            part.set_payload(htmlErrMsg)
            message.attach(part)
        else:
            message.set_type('text/html')
            message.set_payload(htmlErrMsg, 'us-ascii')

        # Send the message
        server = self.setting('ErrorEmailServer')
        # This setting can be: server, server:port, server:port:user:password
        # or server:port:user:password:popserver:popport for "smtp after pop".
        parts = server.split(':', 5)
        server = port = user = passwd = None
        popserver = popssl = popport = None
        try:
            # fetch individual parts until we get an IndexError
            server = parts[0]
            try:
                port = int(parts[1])
            except ValueError:
                pass
            user = parts[2]
            passwd = parts[3]
            popserver = parts[4]
            try:
                popport = int(parts[5])
            except ValueError:
                popport = None
            if parts[6].lower() == 'ssl':
                popssl = True
        except IndexError:
            pass
        if user and passwd and popserver:
            # SMTP after POP
            if popssl is None and popport == 995:
                popssl = True
            popssl = poplib.POP3_SSL if popssl else poplib.POP3
            if popport:
                popserver = popssl(popserver, popport)
            else:
                popserver = popssl(popserver)
            popserver.set_debuglevel(0)
            popserver.user(user)
            popserver.pass_(passwd)
            try:
                popserver.quit()
            except Exception:
                pass
        if port:
            server = smtplib.SMTP(server, port)
        else:
            server = smtplib.SMTP(server)
        try:
            server.set_debuglevel(0)
            if user and passwd and not popserver:
                # SMTP-AUTH
                server.ehlo()
                if server.has_extn('starttls'):
                    server.starttls()
                    server.ehlo()
                server.login(user, passwd)
            body = message.as_string()
            server.sendmail(headers['From'], headers['To'], body)
        finally:
            try:
                server.quit()
            except Exception:
                pass
Example #48
0
    def _turnList(self, symbol):
        editor = self._application.mainWindow.pagePanel.pageView.codeEditor

        startSelection = editor.GetSelectionStart()
        endSelection = editor.GetSelectionEnd()

        text = editor.GetText()

        if len(text) == 0:
            text = symbol + u" "
            position = len(text)

            editor.SetText(text)
            editor.SetSelection(position, position)
            return

        firstLine = text[:startSelection].rfind("\n")
        lastLine = text[endSelection:].find("\n")

        if firstLine == -1:
            firstLine = 0
        else:
            firstLine += 1

        if lastLine == -1:
            lastLine = len(text)
        else:
            lastLine += endSelection

        selectedText = text[firstLine: lastLine]
        lines = selectedText.splitlines()

        buf = StringIO()

        appendSymbols = 0

        for n, line in enumerate(lines):
            if n != 0:
                buf.write(u"\n")

            buf.write(symbol)
            if not line.startswith(symbol):
                buf.write(u" ")

            buf.write(line)
            appendSymbols = len(symbol)

        if len(lines) == 0:
            buf.write(symbol)
            buf.write(u" ")
            appendSymbols = len(symbol) + 1

        result = buf.getvalue()
        buf.close()

        editor.SetSelection(firstLine, lastLine)
        editor.replaceText(result)

        if len(lines) > 1:
            position = firstLine + len(result)
            editor.SetSelection(firstLine, position)
        elif startSelection == endSelection:
            editor.SetSelection(startSelection + appendSymbols,
                                endSelection + appendSymbols)
def export_graphviz(
    decision_tree,
    out_file=None,
    *,
    max_depth=None,
    feature_names=None,
    class_names=None,
    label="all",
    filled=False,
    leaves_parallel=False,
    impurity=True,
    node_ids=False,
    proportion=False,
    rotate=False,
    rounded=False,
    special_characters=False,
    precision=3,
    fontname="helvetica",
):
    """Export a decision tree in DOT format.

    This function generates a GraphViz representation of the decision tree,
    which is then written into `out_file`. Once exported, graphical renderings
    can be generated using, for example::

        $ dot -Tps tree.dot -o tree.ps      (PostScript format)
        $ dot -Tpng tree.dot -o tree.png    (PNG format)

    The sample counts that are shown are weighted with any sample_weights that
    might be present.

    Read more in the :ref:`User Guide <tree>`.

    Parameters
    ----------
    decision_tree : decision tree classifier
        The decision tree to be exported to GraphViz.

    out_file : object or str, default=None
        Handle or name of the output file. If ``None``, the result is
        returned as a string.

        .. versionchanged:: 0.20
            Default of out_file changed from "tree.dot" to None.

    max_depth : int, default=None
        The maximum depth of the representation. If None, the tree is fully
        generated.

    feature_names : list of str, default=None
        Names of each of the features.
        If None generic names will be used ("feature_0", "feature_1", ...).

    class_names : list of str or bool, default=None
        Names of each of the target classes in ascending numerical order.
        Only relevant for classification and not supported for multi-output.
        If ``True``, shows a symbolic representation of the class name.

    label : {'all', 'root', 'none'}, default='all'
        Whether to show informative labels for impurity, etc.
        Options include 'all' to show at every node, 'root' to show only at
        the top root node, or 'none' to not show at any node.

    filled : bool, default=False
        When set to ``True``, paint nodes to indicate majority class for
        classification, extremity of values for regression, or purity of node
        for multi-output.

    leaves_parallel : bool, default=False
        When set to ``True``, draw all leaf nodes at the bottom of the tree.

    impurity : bool, default=True
        When set to ``True``, show the impurity at each node.

    node_ids : bool, default=False
        When set to ``True``, show the ID number on each node.

    proportion : bool, default=False
        When set to ``True``, change the display of 'values' and/or 'samples'
        to be proportions and percentages respectively.

    rotate : bool, default=False
        When set to ``True``, orient tree left to right rather than top-down.

    rounded : bool, default=False
        When set to ``True``, draw node boxes with rounded corners.

    special_characters : bool, default=False
        When set to ``False``, ignore special characters for PostScript
        compatibility.

    precision : int, default=3
        Number of digits of precision for floating point in the values of
        impurity, threshold and value attributes of each node.

    fontname : str, default='helvetica'
        Name of font used to render text.

    Returns
    -------
    dot_data : str
        String representation of the input tree in GraphViz dot format.
        Only returned if ``out_file`` is None.

        .. versionadded:: 0.18

    Examples
    --------
    >>> from sklearn.datasets import load_iris
    >>> from sklearn import tree

    >>> clf = tree.DecisionTreeClassifier()
    >>> iris = load_iris()

    >>> clf = clf.fit(iris.data, iris.target)
    >>> tree.export_graphviz(clf)
    'digraph Tree {...
    """

    check_is_fitted(decision_tree)
    own_file = False
    return_string = False
    try:
        if isinstance(out_file, str):
            out_file = open(out_file, "w", encoding="utf-8")
            own_file = True

        if out_file is None:
            return_string = True
            out_file = StringIO()

        exporter = _DOTTreeExporter(
            out_file=out_file,
            max_depth=max_depth,
            feature_names=feature_names,
            class_names=class_names,
            label=label,
            filled=filled,
            leaves_parallel=leaves_parallel,
            impurity=impurity,
            node_ids=node_ids,
            proportion=proportion,
            rotate=rotate,
            rounded=rounded,
            special_characters=special_characters,
            precision=precision,
            fontname=fontname,
        )
        exporter.export(decision_tree)

        if return_string:
            return exporter.out_file.getvalue()

    finally:
        if own_file:
            out_file.close()
Example #50
0
def _get_uml_template(*, types: dict, type_mapping: dict,
                      message_mapping: dict) -> str:
    """
    Return the graphviz dot template for a UML class diagram.
    :param types: protobuf types with indexes
    :param type_mapping: a mapping for the protobuf type indexes and the type text
    :param message_mapping: a dict with which messages were linked, for the relationships
    :return: UML template
    :rtype: str
    """
    relationships = []
    classes = []

    uml_template = """
        digraph "Protobuf UML class diagram" {
            fontname = "Bitstream Vera Sans"
            fontsize = 8

            node [
                fontname = "Bitstream Vera Sans"
                fontsize = 8
                shape = "record"
                style=filled
                fillcolor=gray95
            ]

            edge [
                fontname = "Bitstream Vera Sans"
                fontsize = 8

            ]

    CLASSES

    RELATIONSHIPS
        }
        """

    entry_index = 2
    for _type, message in types.items():
        type_template_text = StringIO()
        type_template_text.write(f"""    {entry_index}[label = "{{{_type}|""")
        fields = []
        for _field in message.fields:
            message_type = _field.message_type
            field_type = type_mapping[
                _field.
                type]  # this will be 'message' if referencing another protobuf message

            if message_type:
                this_node = message_mapping[_type]
                that_node = message_mapping[message_type.name]
                relationships.append(f"    {this_node}->{that_node}")
                field_type = message_type.name  # so we replace the 'message' token by the actual name

            fields.append(f"+ {_field.name}:{field_type}")

        # add fields
        type_template_text.write("\\n".join(fields))
        type_template_text.write("}\"]\n")
        entry_index += 1
        classes.append(type_template_text.getvalue())

        type_template_text.close()

    uml_template = uml_template.replace("CLASSES", "\n".join(classes))
    uml_template = uml_template.replace("RELATIONSHIPS",
                                        "\n".join(relationships))
    return uml_template
Example #51
0
charnum = (sys.argv[2])
charnum1 = int(charnum) + 1

if commchar == 'y':
    passchars = passchars2

# Shuffle the array a few times
random.shuffle(passchars)
random.shuffle(passchars)
random.shuffle(passchars)

# Write to vfile and print to stdout
for x in range(1, int(charnum1)):
    for y in range(1):
        if commchar == 'y':
            rannum = random.randint(0, 74)
        elif commchar == 'n':
            rannum = random.randint(0, 91)
    vfile.write(passchars[rannum])
readout = vfile.getvalue()
print(readout)

# Clear and close out buffers
readout = vfile.write(' ')
vfile.close()
passchars = [' ']
passchars2 = [' ']
charnum = 1
charnum1 = 1
Example #52
0
    def send(cls, r, resource):
        """
            Method to retrieve updates for a subscription, render the
            notification message and send it - responds to POST?format=msg
            requests to the respective resource.

            Args:
                r: the S3Request
                resource: the S3Resource
        """

        _debug = current.log.debug
        _debug("S3Notifications.send()")

        json_message = current.xml.json_message

        # Read subscription data
        source = r.body
        source.seek(0)
        data = source.read()
        subscription = json.loads(data)

        #_debug("Notify PE #%s by %s on %s of %s since %s" % \
        #           (subscription["pe_id"],
        #            str(subscription["method"]),
        #            str(subscription["notify_on"]),
        #            subscription["resource"],
        #            subscription["last_check_time"],
        #            ))

        # Check notification settings
        notify_on = subscription["notify_on"]
        methods = subscription["method"]
        if not notify_on or not methods:
            return json_message(message="No notifications configured "
                                "for this subscription")

        # Authorization (pe_id must not be None)
        pe_id = subscription["pe_id"]

        if not pe_id:
            r.unauthorised()

        # Fields to extract
        fields = resource.list_fields(key="notify_fields")
        if "created_on" not in fields:
            fields.append("created_on")

        # Extract the data
        data = resource.select(fields, represent=True, raw_data=True)
        rows = data["rows"]

        # How many records do we have?
        numrows = len(rows)
        if not numrows:
            return json_message(message="No records found")

        #_debug("%s rows:" % numrows)

        # Prepare meta-data
        get_config = resource.get_config
        settings = current.deployment_settings

        page_url = subscription["page_url"]

        crud_strings = current.response.s3.crud_strings.get(resource.tablename)
        if crud_strings:
            resource_name = crud_strings.title_list
        else:
            resource_name = string.capwords(resource.name, "_")

        last_check_time = s3_decode_iso_datetime(
            subscription["last_check_time"])

        email_format = subscription["email_format"]
        if not email_format:
            email_format = settings.get_msg_notify_email_format()

        filter_query = subscription.get("filter_query")

        meta_data = {
            "systemname": settings.get_system_name(),
            "systemname_short": settings.get_system_name_short(),
            "resource": resource_name,
            "page_url": page_url,
            "notify_on": notify_on,
            "last_check_time": last_check_time,
            "filter_query": filter_query,
            "total_rows": numrows,
        }

        # Render contents for the message template(s)
        renderer = get_config("notify_renderer")
        if not renderer:
            renderer = settings.get_msg_notify_renderer()
        if not renderer:
            renderer = cls._render

        contents = {}
        if email_format == "html" and "EMAIL" in methods:
            contents["html"] = renderer(resource, data, meta_data, "html")
            contents["default"] = contents["html"]
        if email_format != "html" or "EMAIL" not in methods or len(
                methods) > 1:
            contents["text"] = renderer(resource, data, meta_data, "text")
            contents["default"] = contents["text"]

        # Subject line
        subject = get_config("notify_subject")
        if not subject:
            subject = settings.get_msg_notify_subject()
        if callable(subject):
            subject = subject(resource, data, meta_data)

        from string import Template
        subject = Template(subject).safe_substitute(S="%(systemname)s",
                                                    s="%(systemname_short)s",
                                                    r="%(resource)s")
        subject = subject % meta_data

        # Attachment
        attachment = subscription.get("attachment", False)
        document_ids = None
        if attachment:
            attachment_fnc = settings.get_msg_notify_attachment()
            if attachment_fnc:
                document_ids = attachment_fnc(resource, data, meta_data)

        # **data for send_by_pe_id function in s3msg
        send_data = {}
        send_data_fnc = settings.get_msg_notify_send_data()
        if callable(send_data_fnc):
            send_data = send_data_fnc(resource, data, meta_data)

        # Helper function to find message templates from a priority list
        join = lambda *f: os.path.join(current.request.folder, *f)

        def get_msg_template(path, filenames):
            for fn in filenames:
                filepath = join(path, fn)
                if os.path.exists(filepath):
                    try:
                        return open(filepath, "rb")
                    except:
                        pass
            return None

        # Render and send the message(s)
        templates = settings.get_template()
        if templates != "default" and not isinstance(templates, (tuple, list)):
            templates = (templates, )
        prefix = resource.get_config("notify_template", "notify")

        send = current.msg.send_by_pe_id

        success = False
        errors = []

        for method in methods:

            error = None

            # Get the message template
            msg_template = None
            filenames = ["%s_%s.html" % (prefix, method.lower())]
            if method == "EMAIL" and email_format:
                filenames.insert(0,
                                 "%s_email_%s.html" % (prefix, email_format))
            if templates != "default":
                for template in templates[::-1]:
                    path = join("modules", "templates", template, "views",
                                "msg")
                    msg_template = get_msg_template(path, filenames)
                    if msg_template is not None:
                        break
            if msg_template is None:
                path = join("views", "msg")
                msg_template = get_msg_template(path, filenames)
            if msg_template is None:
                msg_template = StringIO(
                    s3_str(current.T("New updates are available.")))

            # Select contents format
            if method == "EMAIL" and email_format == "html":
                output = contents["html"]
            else:
                output = contents["text"]

            # Render the message
            try:
                message = current.response.render(msg_template, output)
            except:
                exc_info = sys.exc_info()[:2]
                error = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
                errors.append(error)
                continue
            finally:
                if hasattr(msg_template, "close"):
                    msg_template.close()

            if not message:
                continue

            # Send the message
            #_debug("Sending message per %s" % method)
            #_debug(message)
            try:
                sent = send(
                    pe_id,
                    # RFC 2822
                    subject=s3_truncate(subject, 78),
                    message=message,
                    contact_method=method,
                    system_generated=True,
                    document_ids=document_ids,
                    **send_data)
            except:
                exc_info = sys.exc_info()[:2]
                error = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
                sent = False

            if sent:
                # Successful if at least one notification went out
                success = True
            else:
                if not error:
                    error = current.session.error
                    if isinstance(error, list):
                        error = "/".join(error)
                if error:
                    errors.append(error)

        # Done
        if errors:
            message = ", ".join(errors)
        else:
            message = "Success"
        return json_message(success=success,
                            statuscode=200 if success else 403,
                            message=message)
    class DeepRacerRacetrackSimTraceData():
        def __init__(self, s3_bucket, s3_key, s3_endpoint_url=None):
            logger.info("simtrace_data init")
            DeepRacerRacetrackSimTraceData.__instance = self
            self.data_state = SIMTRACE_DATA_UPLOAD_UNKNOWN_STATE
            self.s3_bucket = s3_bucket
            self.s3_object_key = s3_key
            self.s3_endpoint_url = s3_endpoint_url
            if s3_key != "None":
                self.setup_mutipart_upload()
            self.s3_extra_args = utils.get_s3_kms_extra_args()

        def setup_mutipart_upload(self):
            logger.info("simtrace_data: setup_mutipart_upload to %s",
                        self.s3_bucket)

            #setup for SIM_TRACE data incremental uploads to S3
            self.simtrace_csv_data = StringIO()
            self.csvwriter = csv.writer(self.simtrace_csv_data)
            self.csvwriter.writerow(SIMTRACE_CSV_DATA_HEADER)

            self.aws_region = rospy.get_param('AWS_REGION')
            logger.info(
                "simtrace_data: setup_mutipart_upload on s3_bucket {} s3_object_key {} region {}"
                .format(self.s3_bucket, self.s3_object_key, self.aws_region))

            #initiate the multipart upload
            s3_client = boto3.session.Session().client(
                's3',
                region_name=self.aws_region,
                endpoint_url=self.s3_endpoint_url,
                config=utils.get_boto_config())
            self.mpu = s3_client.create_multipart_upload(
                Bucket=self.s3_bucket, Key=self.s3_object_key)
            self.mpu_id = self.mpu["UploadId"]
            self.mpu_part_number = 1
            self.mpu_parts = []
            self.mpu_episodes = 0
            self.total_upload_size = 0
            self.data_state = SIMTRACE_DATA_UPLOAD_INIT_DONE
            logger.info(
                "simtrace_data: setup_mutipart_upload done! mpu_id= %s mpu_part_number",
                self.mpu_id)

        def write_simtrace_data(self, jsondata):
            if self.data_state != SIMTRACE_DATA_UPLOAD_UNKNOWN_STATE:
                try:
                    csvdata = []
                    for key in SIMTRACE_CSV_DATA_HEADER:
                        csvdata.append(jsondata[key])
                    self.csvwriter.writerow(csvdata)
                    self.total_upload_size += sys.getsizeof(csvdata)
                    logger.debug("csvdata={} size data={} csv={}".format(
                        csvdata, sys.getsizeof(csvdata),
                        sys.getsizeof(self.simtrace_csv_data.getvalue())))
                except Exception:
                    log_and_exit("Invalid SIM_TRACE data format",
                                 SIMAPP_SIMULATION_WORKER_EXCEPTION,
                                 SIMAPP_EVENT_ERROR_CODE_500)

        def get_mpu_part_size(self):
            if self.data_state != SIMTRACE_DATA_UPLOAD_UNKNOWN_STATE:
                upload_size = sys.getsizeof(self.simtrace_csv_data.getvalue())
                logger.debug(
                    "simtrace_data: upload size ={}".format(upload_size))
                return upload_size
            else:
                logger.info("simtrace_data: invalid call to get_mpu_part_size")
                return SIMTRACE_DATA_SIZE_ZERO

        def reset_mpu_part(self, episodes):
            if self.data_state != SIMTRACE_DATA_UPLOAD_UNKNOWN_STATE:
                logger.debug(
                    "simtrace_data: reset_episode::: episode {}".format(
                        episodes))
                self.simtrace_csv_data.close()
                self.simtrace_csv_data = StringIO()
                self.csvwriter = csv.writer(self.simtrace_csv_data)
                logger.info(
                    "simtrace_data: reset_part_upload::: done! episode {}".
                    format(episodes))

        def upload_mpu_part_to_s3(self, episodes):
            if self.data_state != SIMTRACE_DATA_UPLOAD_UNKNOWN_STATE:
                logger.debug(
                    "simtrace_data: Uploading mpu_part_to_s3::: mpu_id-{} mpu_part_number-{} episode-{}"
                    .format(self.mpu_id, self.mpu_part_number, episodes))
                self.mpu_episodes = episodes
                s3_client = boto3.session.Session().client(
                    's3',
                    region_name=self.aws_region,
                    endpoint_url=self.s3_endpoint_url,
                    config=utils.get_boto_config())
                metrics_body = self.simtrace_csv_data.getvalue()
                part = s3_client.upload_part(Body=bytes(metrics_body,
                                                        encoding='utf-8'),
                                             Bucket=self.s3_bucket,
                                             Key=self.s3_object_key,
                                             UploadId=self.mpu_id,
                                             PartNumber=self.mpu_part_number)
                self.mpu_parts.append({
                    "PartNumber": self.mpu_part_number,
                    "ETag": part["ETag"]
                })
                self.mpu_part_number += 1
                logger.info(
                    "simtrace_data: Uploaded mpu_part_to_s3::: done! episode-{} mpu_id-{} mpu_part_number-{} mpu_parts-{}"
                    .format(episodes, self.mpu_id, self.mpu_part_number,
                            self.mpu_parts))

        def upload_to_s3(self, episodes):
            if self.data_state != SIMTRACE_DATA_UPLOAD_UNKNOWN_STATE and self.data_state != SIMTRACE_DATA_UPLOAD_DONE:
                part_size = self.get_mpu_part_size()
                if part_size >= SIMTRACE_DATA_MPU_MINSIZE:
                    logger.info(
                        "simtrace_data: upload_to_s3::: episode-{} part_size-{} mpu_part_number-{}"
                        .format(episodes, part_size, self.mpu_part_number))
                    self.data_state = SIMTRACE_DATA_MPU_UPLOAD_IN_PROGRESS
                    self.upload_mpu_part_to_s3(episodes)
                    self.reset_mpu_part(episodes)
                else:
                    logger.info(
                        "simtrace_data: upload_to_s3::: episode-{} part_size-{}, will upload after"
                        .format(episodes, part_size))

        def complete_upload_to_s3(self):
            logger.info(
                "simtrace_data: complete_upload_to_s3::: data_state-{}".format(
                    self.data_state))

            try:
                if self.data_state == SIMTRACE_DATA_MPU_UPLOAD_IN_PROGRESS:
                    #Multi-part upload to s3
                    self.data_state = SIMTRACE_DATA_UPLOAD_DONE
                    logger.info(
                        "simtrace_data: complete_upload_to_s3::Multi-part upload to S3 in progress, upload the last part number-{}, then complete mpu"
                        .format(self.mpu_part_number))
                    self.upload_mpu_part_to_s3(self.mpu_episodes)

                    #now complete the multi-part-upload
                    session = boto3.session.Session()
                    s3_client = session.client(
                        's3',
                        region_name=self.aws_region,
                        endpoint_url=self.s3_endpoint_url,
                        config=utils.get_boto_config())
                    result = s3_client.complete_multipart_upload(
                        Bucket=self.s3_bucket,
                        Key=self.s3_object_key,
                        UploadId=self.mpu_id,
                        MultipartUpload={"Parts": self.mpu_parts})
                    self.data_state = SIMTRACE_DATA_UPLOAD_DONE
                    logger.info(
                        "simtrace_data: complete_upload_to_s3 ::: multi-part-upload done,total raw size={}bytes result={}"
                        .format(self.total_upload_size, result))
                else:
                    #One-time upload to s3
                    if self.data_state == SIMTRACE_DATA_UPLOAD_INIT_DONE and self.data_state != SIMTRACE_DATA_UPLOAD_DONE:
                        self.data_state = SIMTRACE_DATA_UPLOAD_DONE
                        logger.info(
                            "simtrace_data:  complete_upload_to_s3 ::: write simtrace data to s3"
                        )
                        session = boto3.session.Session()
                        s3_client = session.client(
                            's3',
                            region_name=self.aws_region,
                            endpoint_url=self.s3_endpoint_url,
                            config=utils.get_boto_config())

                        # cancel multipart upload process
                        logger.info(
                            "simtrace_data: multi-part upload not required, cancel it before uploading the complete S3 object"
                        )
                        s3_client.abort_multipart_upload(
                            Bucket=self.s3_bucket,
                            Key=self.s3_object_key,
                            UploadId=self.mpu_id)
                        metrics_body = self.simtrace_csv_data.getvalue()
                        logger.info(
                            "simtrace_data: complete_upload_to_s3:: write to s3 csv-formatted-data size={}bytes"
                            .format(sys.getsizeof(metrics_body)))
                        result = s3_client.put_object(Bucket=self.s3_bucket,
                                                      Key=self.s3_object_key,
                                                      Body=bytes(
                                                          metrics_body,
                                                          encoding='utf-8'),
                                                      **self.s3_extra_args)
                        logger.info(
                            "simtrace_data: complete_upload_to_s3:: done writing simtrace total-unformatted-data size={}bytes to s3. result{}"
                            .format(self.total_upload_size, result))
                self.reset_mpu_part(self.mpu_episodes)
            except Exception as e:
                logger.info(
                    "simtrace_data: complete_upload_to_s3:: exception-{} ".
                    format(e))
Example #54
0
class BaseOutputHandler(object):
    '''Base class for output handlers'''
    def __init__(self, logger=None):
        super(BaseOutputHandler, self).__init__()
        self._buffer = StringIO()
        self._wpos = self._buffer.tell()

        if logger is None or isinstance(logger, string_types):
            self.logger = logging.getLogger(logger)
        else:
            # @TODO: remove assertion
            assert isinstance(logger, logging.Logger)
            self.logger = logger

        self.percentage_fmt = '%5.1f %%'
        self.handlers = ['progress', 'line']

        # NOTE: pulse, percentage and text are all optional. The regexp
        #       consumes the '\r' character in any case.
        pattern = ('[ \t]*'
                   '\r+'
                   '[ \t]*'
                   '(?P<pulse>[\\\|/-])?'
                   '[ \t]*'
                   '((?P<percentage>\d{1,3}(\.\d*)?)[ \t]*%)?'
                   '[ \t]*'
                   '((?P<text>.*?)(?=\r|\n))')
        self._progress_pattern = re.compile(pattern)

        self._text_patterns = {
            'error': re.compile('error', re.IGNORECASE),
            'warning': re.compile('warning', re.IGNORECASE),
        }

    def reset(self):
        '''Reset the handler instance.

        Loses all unprocessed data. This is called implicitly at
        instantiation time.

        '''

        self._buffer.close()
        self._buffer = StringIO()
        self._wpos = self._buffer.tell()

    def close(self):
        '''Force processing of all buffered data and reset the instance'''

        self._parse()
        data = self._buffer.read()
        if data:
            if not data.endswith('\n'):
                data += '\n'
            self.handle_line(data)
        self.reset()

    def feed(self, data):
        '''Feed some data to the parser.

        It is processed insofar as it consists of complete elements;
        incomplete data is buffered until more data is fed or close()
        is called.

        '''

        rpos = self._buffer.tell()
        self._buffer.seek(self._wpos)
        self._buffer.write(data)
        self._wpos = self._buffer.tell()
        self._buffer.seek(rpos)
        self._parse()

    def get_progress(self):
        '''Search and decode progress patterns'''

        pos = self._buffer.tell()
        data = self._buffer.read()
        match = self._progress_pattern.match(data)
        if match:
            result = {
                'pulse': match.group('pulse'),
                'percentage': match.group('percentage'),
                'text': match.group('text'),
                'rawdata': data,
            }
        else:
            result = None

        if result:
            self._buffer.seek(pos + match.end())
        else:
            self._buffer.seek(pos)
            return None

        if result['percentage'] is not None:
            result['percentage'] = float(result['percentage'])

        return result

    def get_line(self):
        '''Extract complete lines'''

        pos = self._buffer.tell()
        data = self._buffer.readline()
        if data and (data[-1] == '\n'):
            return data[:-1]  # remove '\n'
        self._buffer.seek(pos)
        return None

    def _parse(self):
        while True:
            for name in self.handlers:
                data = getattr(self, 'get_' + name)()
                if data not in (None, ''):
                    getattr(self, 'handle_' + name)(data)
                    break
            else:
                # no pattern matches
                break

    def handle_progress(self, data):
        '''Handle progress data.

        This method is not meant to be called by the user.

        The user, anyway, can provide a custom implementation in
        derived classes.

        :param data:
            a list containing an item for each named group in the
            "progress" regular expression: (pulse, percentage, text)
            for the default implementation.
            Each item can be None.

        '''

        pulse = data.get('pulse')
        percentage = data.get('percentage')
        text = data.get('text')

        result = []
        if pulse:
            result.append(pulse)
        if percentage is not None:
            result.append(self.percentage_fmt % percentage)
        if text:
            result.append(text)

        extra = {
            'tag': 'progress',
            'pulse': pulse,
            'percentage': percentage,
            'text': text,
        }

        self.logger.log(PROGRESS, ' '.join(result), extra=extra)

    def handle_line(self, data):
        '''Handle output lines.

        This method is not meant to be directly called by the user.

        The user, anyway, can provide a custom implementation in
        derived classes.

        :param data:
            an entire output line (including the trailing "end of line"
            character.

        '''

        for tag_name, pattern in self._text_patterns.items():
            match = pattern.search(data)
            if match:
                self.logger.info(data, extra={'tag': tag_name})
                break
        else:
            self.logger.info(data)
Example #55
0
class DummySpinner(object):
    def __init__(self, text="", **kwargs):
        colorama.init()
        from .misc import decode_for_output
        self.text = to_native_string(decode_for_output(text)) if text else ""
        self.stdout = kwargs.get("stdout", sys.stdout)
        self.stderr = kwargs.get("stderr", sys.stderr)
        self.out_buff = StringIO()
        self.write_to_stdout = kwargs.get("write_to_stdout", False)

    def __enter__(self):
        if self.text and self.text != "None":
            if self.write_to_stdout:
                self.write(self.text)
        return self

    def __exit__(self, exc_type, exc_val, traceback):
        if exc_type:
            import traceback
            from .misc import decode_for_output
            self.write_err(
                decode_for_output(traceback.format_exception(*sys.exc_info())))
        self._close_output_buffer()
        return False

    def __getattr__(self, k):
        try:
            retval = super(DummySpinner, self).__getattribute__(k)
        except AttributeError:
            if k in COLOR_MAP.keys() or k.upper() in COLORS:
                return self
            raise
        else:
            return retval

    def _close_output_buffer(self):
        if self.out_buff and not self.out_buff.closed:
            try:
                self.out_buff.close()
            except Exception:
                pass

    def fail(self, exitcode=1, text="FAIL"):
        from .misc import decode_for_output
        if text and text != "None":
            if self.write_to_stdout:
                self.write(decode_for_output(text))
            else:
                self.write_err(decode_for_output(text))
        self._close_output_buffer()

    def ok(self, text="OK"):
        if text and text != "None":
            if self.write_to_stdout:
                self.stdout.write(self.text)
            else:
                self.stderr.write(self.text)
        self._close_output_buffer()
        return 0

    def hide_and_write(self, text, target=None):
        if not target:
            target = self.stdout
        from .misc import decode_for_output
        if text is None or isinstance(text,
                                      six.string_types) and text == "None":
            pass
        target.write(decode_for_output("\r"))
        self._hide_cursor(target=target)
        target.write(decode_for_output("{0}\n".format(text)))
        target.write(CLEAR_LINE)
        self._show_cursor(target=target)

    def write(self, text=None):
        if not self.write_to_stdout:
            return self.write_err(text)
        from .misc import decode_for_output
        if text is None or isinstance(text,
                                      six.string_types) and text == "None":
            pass
        text = decode_for_output(text)
        self.stdout.write(decode_for_output("\r"))
        line = decode_for_output("{0}\n".format(text))
        self.stdout.write(line)
        self.stdout.write(CLEAR_LINE)

    def write_err(self, text=None):
        from .misc import decode_for_output
        if text is None or isinstance(text,
                                      six.string_types) and text == "None":
            pass
        text = decode_for_output(text)
        self.stderr.write(decode_for_output("\r"))
        line = decode_for_output("{0}\n".format(text))
        self.stderr.write(line)
        self.stderr.write(CLEAR_LINE)

    @staticmethod
    def _hide_cursor(target=None):
        pass

    @staticmethod
    def _show_cursor(target=None):
        pass
Example #56
0
class TestParser(unittest.TestCase):
    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_register_simple_commands(self):
        def test_handler1():
            pass

        def test_handler2():
            pass

        cli = DummyCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'command the-name', test_handler1)
        command2 = AzCliCommand(cli.loader, 'sub-command the-second-name',
                                test_handler2)
        cmd_table = {
            'command the-name': command,
            'sub-command the-second-name': command2
        }
        cli.commands_loader.command_table = cmd_table

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cli.commands_loader)
        args = parser.parse_args('command the-name'.split())
        self.assertIs(args.func, command)

        args = parser.parse_args('sub-command the-second-name'.split())
        self.assertIs(args.func, command2)

        with mock.patch('azure.cli.core.parser.AzCliCommandParser.error',
                        new=VerifyError(self)):
            parser.parse_args('sub-command'.split())
            self.assertTrue(AzCliCommandParser.error.called)

    def test_required_parameter(self):
        def test_handler(args):  # pylint: disable=unused-argument
            pass

        cli = DummyCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('req', '--req', required=True)
        cmd_table = {'test command': command}
        cli.commands_loader.command_table = cmd_table

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cli.commands_loader)

        args = parser.parse_args('test command --req yep'.split())
        self.assertIs(args.func, command)

        with mock.patch('azure.cli.core.parser.AzCliCommandParser.error',
                        new=VerifyError(self)):
            parser.parse_args('test command'.split())
            self.assertTrue(AzCliCommandParser.error.called)

    def test_nargs_parameter(self):
        def test_handler():
            pass

        cli = DummyCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('req', '--req', required=True, nargs=2)
        cmd_table = {'test command': command}
        cli.commands_loader.command_table = cmd_table

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cli.commands_loader)

        args = parser.parse_args('test command --req yep nope'.split())
        self.assertIs(args.func, command)

        with mock.patch('azure.cli.core.parser.AzCliCommandParser.error',
                        new=VerifyError(self)):
            parser.parse_args('test command -req yep'.split())
            self.assertTrue(AzCliCommandParser.error.called)

    def test_case_insensitive_enum_choices(self):
        from enum import Enum

        class TestEnum(Enum):  # pylint: disable=too-few-public-methods

            opt1 = "ALL_CAPS"
            opt2 = "camelCase"
            opt3 = "snake_case"

        def test_handler():
            pass

        cli = DummyCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('opt',
                             '--opt',
                             required=True,
                             **enum_choice_list(TestEnum))
        cmd_table = {'test command': command}
        cli.commands_loader.command_table = cmd_table

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cli.commands_loader)

        args = parser.parse_args('test command --opt alL_cAps'.split())
        self.assertEqual(args.opt, 'ALL_CAPS')

        args = parser.parse_args('test command --opt CAMELCASE'.split())
        self.assertEqual(args.opt, 'camelCase')

        args = parser.parse_args('test command --opt sNake_CASE'.split())
        self.assertEqual(args.opt, 'snake_case')

    def _mock_import_lib(_):
        mock_obj = mock.MagicMock()
        mock_obj.__path__ = __name__
        return mock_obj

    def _mock_iter_modules(_):
        return [(None, __name__, None)]

    def _mock_extension_modname(ext_name, ext_dir):
        return ext_name

    def _mock_get_extensions(**kwargs):
        MockExtension = namedtuple(
            'Extension',
            ['name', 'preview', 'experimental', 'path', 'get_metadata'])
        return [
            MockExtension(name=__name__ + '.ExtCommandsLoader',
                          preview=False,
                          experimental=False,
                          path=None,
                          get_metadata=lambda: {}),
            MockExtension(name=__name__ + '.Ext2CommandsLoader',
                          preview=False,
                          experimental=False,
                          path=None,
                          get_metadata=lambda: {})
        ]

    def _mock_load_command_loader(loader, args, name, prefix):
        from enum import Enum

        class TestEnum(Enum):  # pylint: disable=too-few-public-methods
            enum_1 = 'enum_1'
            enum_2 = 'enum_2'

        def test_handler():
            pass

        class TestCommandsLoader(AzCommandsLoader):
            def load_command_table(self, args):
                super(TestCommandsLoader, self).load_command_table(args)
                command = AzCliCommand(loader, 'test module', test_handler)
                command.add_argument('opt',
                                     '--opt',
                                     required=True,
                                     **enum_choice_list(TestEnum))
                self.command_table['test module'] = command
                return self.command_table

        # A command from an extension
        class ExtCommandsLoader(AzCommandsLoader):
            def load_command_table(self, args):
                super(ExtCommandsLoader, self).load_command_table(args)
                command = AzCliCommand(loader, 'test extension', test_handler)
                command.add_argument('opt',
                                     '--opt',
                                     required=True,
                                     **enum_choice_list(TestEnum))
                self.command_table['test extension'] = command
                return self.command_table

        if prefix == 'azure.cli.command_modules.':
            command_loaders = {'TestCommandsLoader': TestCommandsLoader}
        else:
            command_loaders = {'ExtCommandsLoader': ExtCommandsLoader}

        module_command_table = {}
        for _, loader_cls in command_loaders.items():
            command_loader = loader_cls(cli_ctx=loader.cli_ctx)
            command_table = command_loader.load_command_table(args)
            if command_table:
                module_command_table.update(command_table)
                loader.loaders.append(
                    command_loader
                )  # this will be used later by the load_arguments method
        return module_command_table, command_loader.command_group_table

    @mock.patch('importlib.import_module', _mock_import_lib)
    @mock.patch('pkgutil.iter_modules', _mock_iter_modules)
    @mock.patch('azure.cli.core.commands._load_command_loader',
                _mock_load_command_loader)
    @mock.patch('azure.cli.core.extension.get_extension_modname',
                _mock_extension_modname)
    @mock.patch('azure.cli.core.extension.get_extensions',
                _mock_get_extensions)
    def test_parser_error_spellchecker(self):
        cli = DummyCli()
        main_loader = MainCommandsLoader(cli)
        cli.loader = main_loader

        cli.loader.load_command_table(None)

        parser = cli.parser_cls(cli)
        parser.load_command_table(cli.loader)

        logger_msgs = []
        choice_lists = []
        original_get_close_matches = difflib.get_close_matches

        def mock_log_error(logger_self, msg):
            # Only intercept 'cli.azure.cli.core.azclierror' logger and ignore 'az_command_data_logger'
            if logger_self.name.startswith('cli'):
                logger_msgs.append(msg)

        def mock_get_close_matches(*args, **kwargs):
            choice_lists.append(original_get_close_matches(*args, **kwargs))

        def mock_ext_cmd_tree_load(*args, **kwargs):
            return {
                "test": {
                    "new-ext": {
                        "create": "new-ext-name",
                        "reset": "another-ext-name"
                    }
                }
            }

        def mock_add_extension(*args, **kwargs):
            pass

        # run multiple faulty commands and save error logs, as well as close matches
        with mock.patch('logging.Logger.error', mock_log_error), \
                mock.patch('difflib.get_close_matches', mock_get_close_matches):
            faulty_cmd_args = [
                'test module1 --opt enum_1', 'test extension1 --opt enum_1',
                'test foo_bar --opt enum_3', 'test module --opt enum_3',
                'test extension --opt enum_3'
            ]
            for text in faulty_cmd_args:
                with self.assertRaises(SystemExit):
                    parser.parse_args(text.split())
        parser.parse_args('test module --opt enum_1'.split())

        # assert the right type of error msg is logged for command vs argument parsing
        self.assertEqual(len(logger_msgs), 5)
        for msg in logger_msgs[:3]:
            self.assertIn("misspelled or not recognized by the system", msg)
        for msg in logger_msgs[3:]:
            self.assertIn("not a valid value for '--opt'.", msg)

        # assert the right choices are matched as "close".
        # If these don't hold, matching algorithm should be deemed flawed.
        for choices in choice_lists[:2]:
            self.assertEqual(len(choices), 1)
        self.assertEqual(len(choice_lists[2]), 0)
        for choices in choice_lists[3:]:
            self.assertEqual(len(choices), 2)
            for choice in ['enum_1', 'enum_2']:
                self.assertIn(choice, choices)

        # test dynamic extension install
        with mock.patch('logging.Logger.error', mock_log_error), \
                mock.patch('azure.cli.core.extension.operations.add_extension', mock_add_extension), \
                mock.patch('azure.cli.core.parser.AzCliCommandParser._get_extension_command_tree', mock_ext_cmd_tree_load), \
                mock.patch('azure.cli.core.parser.AzCliCommandParser._get_extension_use_dynamic_install_config', return_value='yes_without_prompt'), \
                mock.patch('azure.cli.core.parser.AzCliCommandParser._get_extension_run_after_dynamic_install_config', return_value=False):
            with self.assertRaises(SystemExit):
                parser.parse_args('test new-ext create --opt enum_2'.split())
            self.assertIn(
                "Extension new-ext-name installed. Please rerun your command.",
                logger_msgs[5])
            with self.assertRaises(SystemExit):
                parser.parse_args('test new-ext reset pos1 pos2'.split()
                                  )  # test positional args
            self.assertIn(
                "Extension another-ext-name installed. Please rerun your command.",
                logger_msgs[6])
Example #57
0
	if filename != 'README.md':
		out.write('<div class="source">[<a href="' + get_file_link(sundered) + '" target="_blank">view source</a>]</div>')
	out.write('</div><div id="embedded_nav"></div>')

	if value.description != None:
		out.write(to_html(value.description))

	# generate the summary table
	nav = StringIO()
	nav.write('<div id="toc"><div id="toc_title"><b>Table of contents</b></div><ul>')
	if filename != 'README.md':
		generate_member_table(out, None, nav, value.objects, 'Classes, functions, and variables in this file')

	# generate detailed entries for each object in the file
	generate_member_list(out, nav, value.objects)

	out.write('</div>')
	nav.write('</ul></div>')
	generate_right_nav(out, nav, value.objects)
	if filename == 'README.md':
		# for README.md files, hide the table of contents
		out.write('<style>#embedded_nav {width:0;height:0;padding:0;overflow:hidden;} #rightnav {width:0;height:0;padding:0;}</style>')
	out.write(footer)
	nav.close()
	out.close()

copyfile(style_file, os.path.sep.join(['Docs','html', style_file]))
copyfile(script_file, os.path.sep.join(['Docs','html', script_file]))
copyfile(hamburger_file, os.path.sep.join(['Docs','html', hamburger_file]))
copyfile('critical.css', os.path.sep.join(['Docs','html', 'critical.css']))
class TestResourceGroupTransform(unittest.TestCase):

    @classmethod
    def setUpClass(cls):
        pass

    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    # CORRECT_ID should match 'resourceGroups' in the path in a case insensitive way
    CORRECT_ID = "/subscriptions/00000000-0000-0000-0000-0000000000000/resOurcegroUps/REsourceGROUPname/providers/Microsoft.Compute/virtualMachines/vMName"  # pylint: disable=line-too-long
    NON_RG_ID = "/subscriptions/00000000-0000-0000-0000-0000000000000/somethingElse/REsourceGROUPname/providers/Microsoft.Compute/virtualMachines/vMName"  # pylint: disable=line-too-long
    BOGUS_ID = "|completely-bogus-id|"
    DICT_ID = {'value': "/subscriptions/00000000-0000-0000-0000-0000000000000/resourceGroups/REsourceGROUPname/providers/Microsoft.Compute/virtualMachines/vMName"}  # pylint: disable=line-too-long

    def test_split_correct_id(self):
        result = _parse_id(TestResourceGroupTransform.CORRECT_ID)
        self.assertDictEqual(result, {
            'resource-group': 'REsourceGROUPname',
            'name': 'vMName'
        })

    def test_split_non_resourcegroup_id(self):
        with self.assertRaises(KeyError):
            _parse_id(TestResourceGroupTransform.NON_RG_ID)

    def test_split_bogus_resourcegroup_id(self):
        with self.assertRaises(IndexError):
            _parse_id(TestResourceGroupTransform.BOGUS_ID)

    def test_split_dict_id(self):
        with self.assertRaises(TypeError):
            _parse_id(TestResourceGroupTransform.DICT_ID)

    def test_add_valid_resourcegroup_id(self):
        instance = {
            'id': TestResourceGroupTransform.CORRECT_ID,
            'name': 'A name'
        }
        _add_resource_group(instance)
        self.assertDictEqual(instance, {
            'id': TestResourceGroupTransform.CORRECT_ID,
            'resourceGroup': 'REsourceGROUPname',
            'name': 'A name'
        })

    def test_dont_add_invalid_resourcegroup_id(self):
        instance = {
            'id': TestResourceGroupTransform.BOGUS_ID,
            'name': 'A name'
        }
        _add_resource_group(instance)
        self.assertDictEqual(instance, {
            'id': TestResourceGroupTransform.BOGUS_ID,
            'name': 'A name'
        })

    def test_dont_stomp_on_existing_resourcegroup_id(self):
        instance = {
            'id': TestResourceGroupTransform.CORRECT_ID,
            'resourceGroup': 'SomethingElse',
            'name': 'A name'
        }
        _add_resource_group(instance)
        self.assertDictEqual(instance, {
            'id': TestResourceGroupTransform.CORRECT_ID,
            'resourceGroup': 'SomethingElse',
            'name': 'A name'
        })
Example #59
0
def parse_csv(data):
    """Parse a CSV string into an array of dictionaries"""
    buf = StringIO(data)
    result = [r for r in csv.DictReader(buf)]
    buf.close()
    return result
Example #60
0
class VistirSpinner(base_obj):
    "A spinner class for handling spinners on windows and posix."

    def __init__(self, *args, **kwargs):
        """
        Get a spinner object or a dummy spinner to wrap a context.

        Keyword Arguments:
        :param str spinner_name: A spinner type e.g. "dots" or "bouncingBar" (default: {"bouncingBar"})
        :param str start_text: Text to start off the spinner with (default: {None})
        :param dict handler_map: Handler map for signals to be handled gracefully (default: {None})
        :param bool nospin: If true, use the dummy spinner (default: {False})
        :param bool write_to_stdout: Writes to stdout if true, otherwise writes to stderr (default: True)
        """

        self.handler = handler
        colorama.init()
        sigmap = {}
        if handler:
            sigmap.update({signal.SIGINT: handler, signal.SIGTERM: handler})
        handler_map = kwargs.pop("handler_map", {})
        if os.name == "nt":
            sigmap[signal.SIGBREAK] = handler
        else:
            sigmap[signal.SIGALRM] = handler
        if handler_map:
            sigmap.update(handler_map)
        spinner_name = kwargs.pop("spinner_name", "bouncingBar")
        start_text = kwargs.pop("start_text", None)
        _text = kwargs.pop("text", "Running...")
        kwargs["text"] = start_text if start_text is not None else _text
        kwargs["sigmap"] = sigmap
        kwargs["spinner"] = getattr(Spinners, spinner_name, "")
        write_to_stdout = kwargs.pop("write_to_stdout", True)
        self.stdout = kwargs.pop("stdout", sys.stdout)
        self.stderr = kwargs.pop("stderr", sys.stderr)
        self.out_buff = StringIO()
        self.write_to_stdout = write_to_stdout
        self.is_dummy = bool(yaspin is None)
        super(VistirSpinner, self).__init__(*args, **kwargs)

    def ok(self, text="OK", err=False):
        """Set Ok (success) finalizer to a spinner."""
        # Do not display spin text for ok state
        self._text = None

        _text = text if text else "OK"
        err = err or not self.write_to_stdout
        self._freeze(_text, err=err)

    def fail(self, text="FAIL", err=False):
        """Set fail finalizer to a spinner."""
        # Do not display spin text for fail state
        self._text = None

        _text = text if text else "FAIL"
        err = err or not self.write_to_stdout
        self._freeze(_text, err=err)

    def hide_and_write(self, text, target=None):
        if not target:
            target = self.stdout
        from .misc import decode_for_output
        if text is None or isinstance(text,
                                      six.string_types) and text == "None":
            pass
        target.write(decode_for_output("\r"))
        self._hide_cursor(target=target)
        target.write(decode_for_output("{0}\n".format(text)))
        target.write(CLEAR_LINE)
        self._show_cursor(target=target)

    def write(self, text):
        if not self.write_to_stdout:
            return self.write_err(text)
        from .misc import to_text
        sys.stdout.write("\r")
        self.stdout.write(CLEAR_LINE)
        if text is None:
            text = ""
        text = to_native_string("{0}\n".format(text))
        self.stdout.write(text)
        self.out_buff.write(to_text(text))

    def write_err(self, text):
        """Write error text in the terminal without breaking the spinner."""
        from .misc import to_text

        self.stderr.write("\r")
        self.stderr.write(CLEAR_LINE)
        if text is None:
            text = ""
        text = to_native_string("{0}\n".format(text))
        self.stderr.write(text)
        self.out_buff.write(to_text(text))

    def start(self):
        if self._sigmap:
            self._register_signal_handlers()

        target = self.stdout if self.write_to_stdout else self.stderr
        if target.isatty():
            self._hide_cursor(target=target)

        self._stop_spin = threading.Event()
        self._hide_spin = threading.Event()
        self._spin_thread = threading.Thread(target=self._spin)
        self._spin_thread.start()

    def stop(self):
        if self._dfl_sigmap:
            # Reset registered signal handlers to default ones
            self._reset_signal_handlers()

        if self._spin_thread:
            self._stop_spin.set()
            self._spin_thread.join()

        target = self.stdout if self.write_to_stdout else self.stderr
        if target.isatty():
            target.write("\r")

        if self.write_to_stdout:
            self._clear_line()
        else:
            self._clear_err()

        if target.isatty():
            self._show_cursor(target=target)
        if self.stderr and self.stderr != sys.stderr:
            self.stderr.close()
        if self.stdout and self.stdout != sys.stdout:
            self.stdout.close()
        self.out_buff.close()

    def _freeze(self, final_text, err=False):
        """Stop spinner, compose last frame and 'freeze' it."""
        if not final_text:
            final_text = ""
        text = to_native_string(final_text)
        self._last_frame = self._compose_out(text, mode="last")

        # Should be stopped here, otherwise prints after
        # self._freeze call will mess up the spinner
        self.stop()
        if err or not self.write_to_stdout:
            self.stderr.write(self._last_frame)
        else:
            self.stdout.write(self._last_frame)

    def _compose_color_func(self):
        fn = functools.partial(
            colored,
            color=self._color,
            on_color=self._on_color,
            attrs=list(self._attrs),
        )
        return fn

    def _compose_out(self, frame, mode=None):
        # Ensure Unicode input

        frame = to_native_string(frame)
        if self._text is None:
            self._text = ""
        text = to_native_string(self._text)
        if self._color_func is not None:
            frame = self._color_func(frame)
        if self._side == "right":
            frame, text = text, frame
        # Mode
        if not mode:
            out = to_native_string("\r{0} {1}".format(frame, text))
        else:
            out = to_native_string("{0} {1}\n".format(frame, text))
        return out

    def _spin(self):
        target = self.stdout if self.write_to_stdout else self.stderr
        clear_fn = self._clear_line if self.write_to_stdout else self._clear_err
        while not self._stop_spin.is_set():

            if self._hide_spin.is_set():
                # Wait a bit to avoid wasting cycles
                time.sleep(self._interval)
                continue

            # Compose output
            spin_phase = next(self._cycle)
            out = self._compose_out(spin_phase)

            # Write
            target.write(out)
            clear_fn()
            target.flush()

            # Wait
            time.sleep(self._interval)
            target.write("\b")

    def _register_signal_handlers(self):
        # SIGKILL cannot be caught or ignored, and the receiving
        # process cannot perform any clean-up upon receiving this
        # signal.
        try:
            if signal.SIGKILL in self._sigmap.keys():
                raise ValueError(
                    "Trying to set handler for SIGKILL signal. "
                    "SIGKILL cannot be cought or ignored in POSIX systems.")
        except AttributeError:
            pass

        for sig, sig_handler in self._sigmap.items():
            # A handler for a particular signal, once set, remains
            # installed until it is explicitly reset. Store default
            # signal handlers for subsequent reset at cleanup phase.
            dfl_handler = signal.getsignal(sig)
            self._dfl_sigmap[sig] = dfl_handler

            # ``signal.SIG_DFL`` and ``signal.SIG_IGN`` are also valid
            # signal handlers and are not callables.
            if callable(sig_handler):
                # ``signal.signal`` accepts handler function which is
                # called with two arguments: signal number and the
                # interrupted stack frame. ``functools.partial`` solves
                # the problem of passing spinner instance into the handler
                # function.
                sig_handler = functools.partial(sig_handler, spinner=self)

            signal.signal(sig, sig_handler)

    def _reset_signal_handlers(self):
        for sig, sig_handler in self._dfl_sigmap.items():
            signal.signal(sig, sig_handler)

    @staticmethod
    def _hide_cursor(target=None):
        if not target:
            target = sys.stdout
        cursor.hide(stream=target)

    @staticmethod
    def _show_cursor(target=None):
        if not target:
            target = sys.stdout
        cursor.show(stream=target)

    @staticmethod
    def _clear_err():
        sys.stderr.write(CLEAR_LINE)

    @staticmethod
    def _clear_line():
        sys.stdout.write(CLEAR_LINE)