Example #1
0
def main():
    """ Executed ur2ud.py as a command-line tool. """
    import codecs
    import locale
    import optparse
    import sys

    sys.stdin = codecs.getreader(locale.getpreferredencoding())(sys.stdin)
    sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
    sys.stderr = codecs.getwriter(locale.getpreferredencoding())(sys.stderr)

    parser = optparse.OptionParser(
        prog=__program_name__, version=__version__,
        usage='%prog',
        description=u'Read romanized Indic text from STDIN, ' \
                        u'and write Devanāgarī to STDOUT.')

    parser.add_option('-i', '--iast', dest='iast', action='store_true',
                                help="Expect IAST input (instead of ISO15919)")

    options = parser.parse_args()[0]

    ur2ud = Transliterator(options.iast)

    sys.stdout.write(ur2ud.transliterate(sys.stdin.read()))
    def collect_images(self, docname, z):
        '''
        Collects all images in the document
        and copy them to the temporary directory.
        '''
        if locale.getpreferredencoding():
          dir_locale = locale.getpreferredencoding()
        else:
          dir_locale = "UTF-8"
        dir = unicode(self.options.image_dir, dir_locale)
        for node in self.document.xpath('//svg:image', namespaces=inkex.NSS):
            xlink = node.get(inkex.addNS('href',u'xlink'))
            if (xlink[:4] != 'data'):
                absref = node.get(inkex.addNS('absref',u'sodipodi'))
                url = urlparse.urlparse(xlink)
                href = urllib.url2pathname(url.path)
                
                if (href != None):
                    absref = os.path.realpath(href)

                absref = unicode(absref, "utf-8")
                image_path = os.path.join(dir, os.path.basename(absref))
                
                if (os.path.isfile(absref)):
                    shutil.copy(absref, self.tmp_dir)
                    z.write(absref, image_path.encode(self.encoding))
                elif (os.path.isfile(os.path.join(self.tmp_dir, absref))):
                    # TODO: please explain why this clause is necessary
                    shutil.copy(os.path.join(self.tmp_dir, absref), self.tmp_dir)
                    z.write(os.path.join(self.tmp_dir, absref), image_path.encode(self.encoding))
                else:
                    inkex.errormsg(_('Could not locate file: %s') % absref)

                node.set(inkex.addNS('href',u'xlink'), image_path)
Example #3
0
 def DetectEncodingAndRead(self, fd):
     encodings = ["utf-8", "utf-16"]
     if locale.getpreferredencoding() not in encodings:
         encodings.append(locale.getpreferredencoding())
     if sys.getdefaultencoding() not in encodings:
         encodings.append(sys.getdefaultencoding())
     if locale.getdefaultlocale()[1] not in encodings:
         encodings.append(locale.getdefaultlocale()[1])
     if sys.getfilesystemencoding() not in encodings:
         encodings.append(sys.getfilesystemencoding())
     if 'latin-1' not in encodings:
         encodings.append('latin-1')
         
     for enc in encodings:
         fd.seek(0)
         try:
             reader = codecs.getreader(enc)(fd)
             content = reader.read()
         except:
             continue
         else:
             self._encoding = enc
             logger.info("Detect file %s 's encoding is %s" % (self.GetFilename(), self._encoding))
             return content
         
     logger.error("Fail to detect the encoding for file %s" % self.GetFilename())
     return None
Example #4
0
 def call(self, cmd):
     #logger.info("%s", cmd)
     root_logger = logging.getLogger()
     logger.info("logger.handlers:%s", root_logger.handlers)
     logger.info("locale preferredencoding:%s", locale.getpreferredencoding())
     logger.info("sys.getfilesystemencoding:%s", sys.getfilesystemencoding())
     return call(cmd.encode(locale.getpreferredencoding()), shell=True)
    def testMenuSelectNotepad_bug(self):
        "In notepad - MenuSelect Edit->Paste did not work"

        text = b'Here are some unicode characters \xef\xfc\r\n'
        self.app2.UntitledNotepad.Edit.Wait('enabled')
        time.sleep(0.3)
        self.app2.UntitledNotepad.Edit.SetEditText(text)
        time.sleep(0.3)
        self.assertEquals(self.app2.UntitledNotepad.Edit.TextBlock().encode(locale.getpreferredencoding()), text)

        Timings.after_menu_wait = .7
        self.app2.UntitledNotepad.MenuSelect("Edit->Select All")
        time.sleep(0.3)
        self.app2.UntitledNotepad.MenuSelect("Edit->Copy")
        time.sleep(0.3)
        self.assertEquals(clipboard.GetData().encode(locale.getpreferredencoding()), text)

        self.dlg.SetFocus()
        self.dlg.MenuSelect("Edit->Select All")
        self.dlg.MenuSelect("Edit->Paste")
        self.dlg.MenuSelect("Edit->Paste")
        self.dlg.MenuSelect("Edit->Paste")

        self.app2.UntitledNotepad.MenuSelect("File->Exit")
        self.app2.Window_(title='Notepad', class_name='#32770')["Don't save"].Click()

        self.assertEquals(self.dlg.Edit.TextBlock().encode(locale.getpreferredencoding()), text*3)
Example #6
0
  def StopRemoteWorker(self):
    """Stop the "local remote worker" started by StartRemoteWorker.

    Prints its stdout and stderr out for debug purposes.
    """
    self._worker_proc.terminate()
    self._worker_proc.wait()

    self._worker_stdout.seek(0)
    stdout_lines = [
        l.decode(locale.getpreferredencoding()).strip()
        for l in self._worker_stdout.readlines()
    ]
    if stdout_lines:
      print('Local remote worker stdout')
      print('--------------------------')
      print('\n'.join(stdout_lines))

    self._worker_stderr.seek(0)
    stderr_lines = [
        l.decode(locale.getpreferredencoding()).strip()
        for l in self._worker_stderr.readlines()
    ]
    if stderr_lines:
      print('Local remote worker stderr')
      print('--------------------------')
      print('\n'.join(stderr_lines))
Example #7
0
def fix_shebang(f, osx_is_app=False):
    path = join(config.build_prefix, f)
    if is_obj(path):
        return
    elif os.path.islink(path):
        return
    with io.open(path, encoding=locale.getpreferredencoding()) as fi:
        try:
            data = fi.read()
        except UnicodeDecodeError: # file is binary
            return
    m = SHEBANG_PAT.match(data)
    if not (m and 'python' in m.group()):
        return

    py_exec = ('/bin/bash ' + config.build_prefix + '/bin/python.app'
               if sys.platform == 'darwin' and osx_is_app else
               config.build_prefix + '/bin/' + basename(config.build_python))
    new_data = SHEBANG_PAT.sub('#!' + py_exec, data, count=1)
    if new_data == data:
        return
    print("updating shebang:", f)
    with io.open(path, 'w', encoding=locale.getpreferredencoding()) as fo:
        fo.write(new_data)
    os.chmod(path, int('755', 8))
Example #8
0
    def strftime(self, dt, fmt):
        fmt = self.illegal_s.sub(r"\1", fmt)
        fmt = fmt.replace("%s", "s")
        if dt.year > 1900:
            return unicode(dt.strftime(fmt), locale.getpreferredencoding())

        year = dt.year
        # For every non-leap year century, advance by
        # 6 years to get into the 28-year repeat cycle
        delta = 2000 - year
        off = 6*(delta // 100 + delta // 400)
        year = year + off

        # Move to around the year 2000
        year = year + ((2000 - year)//28)*28
        timetuple = dt.timetuple()
        s1 = time.strftime(fmt, (year,) + timetuple[1:])
        sites1 = self._findall(s1, str(year))

        s2 = time.strftime(fmt, (year+28,) + timetuple[1:])
        sites2 = self._findall(s2, str(year+28))

        sites = []
        for site in sites1:
            if site in sites2:
                sites.append(site)

        s = s1
        syear = "%4d" % (dt.year,)
        for site in sites:
            s = s[:site] + syear + s[site+4:]

        return unicode(s, locale.getpreferredencoding())
Example #9
0
 def display_sound_init_error_message(self, e):
     if isinstance(e, soundcard.SoundInitException):
         solfege.win.display_error_message(
         """%s""" % str(e).decode(locale.getpreferredencoding(), 'replace'))
     elif isinstance(e, ImportError):
         solfege.win.display_error_message2(str(e), _("You should configure sound from the preferences window, and try to use an external midi player. Or try to recompile the program and check for error messages to see why the module is not built."))
     elif getattr(e, 'errno', None) == errno.EACCES:
         solfege.win.display_error_message(
             "The sound init failed: %s\n"
             "The errno EACCES indicates that you don't have write "
             "permission to the device."
             % str(e).decode(locale.getpreferredencoding(), 'replace'))
     elif getattr(e, 'errno', None) == errno.EBUSY:
         solfege.win.display_error_message(
             "The sound init failed: %s\n"
             "It seems like some other program is using the device. You "
             "should try to quit that other program and restart Solfege."
             % str(e).decode(locale.getpreferredencoding(), 'replace'))
     else:
         solfege.win.display_error_message(
             "The sound init failed: %s\n"
             "You should configure sound from the 'Sound' page of "
             "the preferences window.\n\n"
             "It is also possible that the OS sound setup is incorrect."
             % str(e).decode(locale.getpreferredencoding(), 'replace'))
Example #10
0
def get_commandline_args():
    """ Simply returns sys.argv, converted to Unicode objects on UNIX.
    Does a bit more work on win32 since Python 2.x' handling of
    command line strings is broken. It only passes ASCII characters
    while replacing all chars that cannot be converted with the current
    encoding to "?".
    So we'll just bypass Python and get an unicode argument vector from
    native win32 library functions."""

    if sys.platform == 'win32':
        # Set up function prototypes
        ctypes.windll.kernel32.GetCommandLineW.restype = ctypes.c_wchar_p
        ctypes.windll.shell32.CommandLineToArgvW.restype = ctypes.POINTER(ctypes.c_wchar_p)
        args_length = ctypes.c_int(0)
        # Convert argument string from GetCommandLineW to array
        args_pointer = ctypes.windll.shell32.CommandLineToArgvW(
            ctypes.windll.kernel32.GetCommandLineW(),
            ctypes.byref(args_length))

        if args_pointer:
            args = [args_pointer[i] for i in range(args_length.value)]
            ctypes.windll.kernel32.LocalFree(args_pointer)
            # The first argument is either the python interpreter, or MComix.exe
            # in case of being called as py2exe wrapper. If called by Python, the
            # second argument will be a Python script, which needs to be removed.
            if hasattr(sys, 'frozen'):
                return args[1:]
            else:
                return args[2:]
        else:
            # For some reason CommandLineToArgvW failed and returned NULL
            # Fall back to sys.argv
            return [arg.decode(locale.getpreferredencoding(), 'replace') for arg in sys.argv[1:]]
    else:
        return [arg.decode(locale.getpreferredencoding(), 'replace') for arg in sys.argv[1:]]
Example #11
0
    def push(self, s, insert_into_history=True):
        """Push a line of code onto the buffer so it can process it all at once
        when a code block ends."""
        s = s.rstrip('\n')
        self.buffer.append(s)

        if insert_into_history:
            if self.config.hist_length:
                histfilename = os.path.expanduser(self.config.hist_file)
                oldhistory = self.rl_history.entries
                self.rl_history.entries = []
                if os.path.exists(histfilename):
                    self.rl_history.load(histfilename, getpreferredencoding())
                self.rl_history.append(s)
                try:
                    self.rl_history.save(
                        histfilename,
                        getpreferredencoding(),
                        self.config.hist_length)
                except EnvironmentError as err:
                    self.interact.notify(
                        'Error occurred while writing to file %s (%s) ' %
                        (histfilename, err.strerror))
                    self.rl_history.entries = oldhistory
                    self.rl_history.append(s)
            else:
                self.rl_history.append(s)

        more = self.interp.runsource('\n'.join(self.buffer))

        if not more:
            self.buffer = []

        return more
Example #12
0
    def __init__(self, report, description, send_database):
        signals.SignalEmitter.__init__(self)
        self.create_signal('finished')

        self.is_done = False

        backupfile = None
        if send_database:
            try:
                logging.info("Sending entire database")
                backupfile = self._backup_support_dir()
            except StandardError:
                logging.exception("Failed to backup database")

        if isinstance(report, str):
            report = report.decode(locale.getpreferredencoding())
        report = report.encode("utf-8", "ignore")
        if isinstance(description, str):
            description = description.decode(locale.getpreferredencoding())
        description = description.encode("utf-8", "ignore")
        post_vars = {"description": description,
                     "app_name": app.config.get(prefs.LONG_APP_NAME),
                     "log": report}
        if backupfile:
            post_files = {"databasebackup":
                              {"filename": "databasebackup.zip",
                               "mimetype": "application/octet-stream",
                               "handle": backupfile,
                               }}
        else:
            post_files = None
        logging.info("Sending crash report....")
        self.client = httpclient.grab_url(BOGON_URL,
                           self.callback, self.errback,
                           post_vars=post_vars, post_files=post_files)
Example #13
0
    def _run_esqloc(self, file_path):
        """
        Run the esqloc process and returns it's exit code and
        output (both stderr and stdout).

        :param file_path: .sqb path.
        :return: int, str, str
        """
        path = os.path.dirname(file_path)
        source = os.path.split(file_path)[1]
        destination = os.path.splitext(source)[0] + '.cob'
        pgm, options = self.make_command(source, destination)
        process = QtCore.QProcess()
        process.setWorkingDirectory(path)
        process.setProcessChannelMode(QtCore.QProcess.MergedChannels)
        cmd = '%s %s' % (pgm, ' '.join(options))
        _logger().info('command: %s', cmd)
        _logger().debug('working directory: %s', path)
        _logger().debug('system environment: %s', process.systemEnvironment())
        process.start(pgm, options)
        self.started.emit(cmd)
        process.waitForFinished()
        status = process.exitCode()
        try:
            output = process.readAllStandardOutput().data().decode(
                locale.getpreferredencoding())
        except UnicodeDecodeError:
            output = 'Failed to decode esqloc output with encoding: ' % \
                locale.getpreferredencoding()
        self.output_available.emit(output)
        return output, status, destination
Example #14
0
def format_datetime(dt, format=None):
    if format:
        enc = locale.getpreferredencoding(False)
        if enc is None or len(enc) == 0:
            enc = "UTF8"
        return dt.strftime(format).decode(enc)

    now = datetime.datetime.now()
    delta = now - dt

    returner = ""
    if delta.days == 0:
        if delta.seconds < 60:
            returner = _("just now")
        elif delta.seconds >= 60 and delta.seconds < 600:
            returner = _("%d minute(s) ago") % (delta.seconds / 60)
        elif delta.seconds >= 600 and delta.seconds < 43200:
            returner = dt.strftime("%I:%M%P")
        else:
            returner = dt.strftime(DT_FORMAT_THISWEEK)
    elif delta.days > 0 and delta.days < 7:
        returner = dt.strftime(DT_FORMAT_THISWEEK)
    elif delta.days >= 7 and delta.days < 365:
        returner = dt.strftime(DT_FORMAT_THISYEAR)
    else:
        returner = dt.strftime(DT_FORMAT_ALL)

    enc = locale.getpreferredencoding(False)
    if enc is None or len(enc) == 0:
        enc = "UTF8"
    return returner.decode(enc)
Example #15
0
    def _run_command(pgm, args):
        if ' ' in pgm:
            pgm = '"%s"' % pgm

        p = QtCore.QProcess()
        p.setProcessChannelMode(QtCore.QProcess.MergedChannels)
        p.setProcessEnvironment(GnuCobolCompiler.setup_process_environment())
        p.start(pgm, args)
        p.waitForFinished()

        # determine exit code (handle crashed processes)
        if p.exitStatus() != p.Crashed:
            status = p.exitCode()
        else:
            status = 139

        # get compiler output
        try:
            output = p.readAllStandardOutput().data().decode(
                locale.getpreferredencoding()).replace('\r\n', '\n')
        except UnicodeDecodeError:
            output = 'Failed to decode compiler output with encoding %s' % \
                     locale.getpreferredencoding()

        return status, output
def call(command, quiet = True):
    '''
    Call external process. command is a list of command line arguments including name
    of external process and arguments.
    '''
    if isinstance(command, str):
        command_line = shlex.split(command)
    elif isinstance(command, list):
        command_line = command
    else:
        raise EBSCliException('Parameter must be instance of list or string.')
    
    log.debug('Running external commands "{0}".'.\
              format(misc.collection_to_string(command_line)))
    # Using OS native code page 
    command_line = [x.encode(locale.getpreferredencoding()) for x in command_line]
    args = {'args':command_line}
    if misc.is_os_windows():
        # TODO: set shell to True will allow Windows translate "git" to "git.cmd", 
        # but might introduce other issues.
        args['shell'] = True
    if quiet:
        args['stderr'] = subprocess.STDOUT        
        
    return misc.to_unicode(subprocess.check_output(**args), False, locale.getpreferredencoding())
Example #17
0
def fix_shebang(f, osx_is_app=False):
    path = join(build_prefix, f)
    if is_obj(path):
        return
    elif os.path.islink(path):
        return
    with open(path, encoding=locale.getpreferredencoding()) as fi:
        try:
            data = fi.read()
        except UnicodeDecodeError:  # file is binary
            return
    m = SHEBANG_PAT.match(data)
    if not (m and "python" in m.group()):
        return

    py_exec = (
        build_prefix + "/python.app/Contents/MacOS/python"
        if sys.platform == "darwin" and osx_is_app
        else build_prefix + "/bin/" + basename(build_python)
    )
    new_data = SHEBANG_PAT.sub("#!" + py_exec, data, count=1)
    if new_data == data:
        return
    print("updating shebang:", f)
    with open(path, "w", encoding=locale.getpreferredencoding()) as fo:
        fo.write(new_data)
    os.chmod(path, int("755", 8))
Example #18
0
  def RunProgram(self, args, env_remove=None, env_add=None):
    """Runs a program (args[0]), waits for it to exit.

    Args:
      args: [string]; the args to run; args[0] should be the program itself
      env_remove: set(string); optional; environment variables to NOT pass to
        the program
      env_add: set(string); optional; environment variables to pass to
        the program, won't be removed by env_remove.
    Returns:
      (int, [string], [string]) tuple: exit code, stdout lines, stderr lines
    """
    with tempfile.TemporaryFile(dir=self._test_cwd) as stdout:
      with tempfile.TemporaryFile(dir=self._test_cwd) as stderr:
        proc = subprocess.Popen(
            args,
            stdout=stdout,
            stderr=stderr,
            cwd=self._test_cwd,
            env=self._EnvMap(env_remove, env_add))
        exit_code = proc.wait()

        stdout.seek(0)
        stdout_lines = [
            l.decode(locale.getpreferredencoding()).strip()
            for l in stdout.readlines()
        ]

        stderr.seek(0)
        stderr_lines = [
            l.decode(locale.getpreferredencoding()).strip()
            for l in stderr.readlines()
        ]

        return exit_code, stdout_lines, stderr_lines
Example #19
0
File: i18n.py Project: Gosha/mcomix
def to_unicode(string):
    """Convert <string> to unicode. First try the default filesystem
    encoding, and then fall back on some common encodings.
    """
    if isinstance(string, unicode):
        return string

    # Try chardet heuristic
    if chardet:
        probable_encoding = chardet.detect(string)['encoding'] or \
            locale.getpreferredencoding() # Fallback if chardet detection fails
    else:
        probable_encoding = locale.getpreferredencoding()

    for encoding in (
        probable_encoding,
        sys.getfilesystemencoding(),
        'utf-8',
        'latin-1'):

        try:
            ustring = unicode(string, encoding)
            return ustring

        except (UnicodeError, LookupError):
            pass

    return string.decode('utf-8', 'replace')
Example #20
0
def main():
    """
    The entry point of the `dhnbackup` child process. 
    Use command line arguments to get the command from `dhnmain`. 
    """
    try:
        import sys
        reload(sys)
        if hasattr(sys, "setdefaultencoding"):
            import locale
            denc = locale.getpreferredencoding()
            if denc != '':
                sys.setdefaultencoding(denc)
    except:
        pass

#    if len(sys.argv) < 4:
#        printlog('sys.argv: %s\n' % str(sys.argv))
#        printlog('dhnbackup ["subdirs"/"nosubdirs"/"extract"] ["none"/"bz2"/"gz"] [folder path]\n')
#        return 2

    # printlog(str(sys.argv) + '\n')

    try:
        cmd = sys.argv[1].strip().lower()
        if cmd == 'extract':
            readtar(sys.argv[2], sys.argv[3])
        else:
            writetar(sys.argv[3], cmd == 'subdirs', sys.argv[2], encoding=locale.getpreferredencoding())
    except:
        import traceback
        printlog('\n'+traceback.format_exc()+'\n')
        return 1
    
    return 0
Example #21
0
 def read(self, file):
     """ Return data of file with `7z x -so archive.7z file` """
     arc = self.archive.encode(locale.getpreferredencoding())
     fname = file.encode(locale.getpreferredencoding())
     cmd = [SZ_TOOL, 'x', '-so', arc, fname]
     p = custom_popen(cmd)
     return p.communicate()[0]
Example #22
0
    def load_doge(self):
        """
        Return pretty ASCII Shibe.

        wow

        """

        if self.ns.no_shibe:
            return ['']

        with open(self.doge_path) as f:
            if sys.version_info < (3, 0):
                if locale.getpreferredencoding() == 'UTF-8':
                    doge_lines = [l.decode('utf-8') for l in f.xreadlines()]
                else:
                    # encode to printable characters, leaving a space in place
                    # of untranslatable characters, resulting in a slightly
                    # blockier doge on non-UTF8 terminals
                    doge_lines = [
                        l.decode('utf-8')
                        .encode(locale.getpreferredencoding(), 'replace')
                        .replace('?', ' ')
                        for l in f.xreadlines()
                    ]
            else:
                doge_lines = [l for l in f.readlines()]
            return doge_lines
Example #23
0
def main():
  args = parser.parse_args()
  minifier = Minifier(
    remove_comments=args.remove_comments,
    remove_empty_space=args.remove_empty_space,
    remove_optional_attribute_quotes=not args.keep_optional_attribute_quotes,
    pre_tags=args.pre_tags,
    keep_pre=args.keep_pre_attr,
    pre_attr=args.pre_attr,
    )
  default_encoding = args.encoding or 'utf-8'

  if args.input_file:
    inp = codecs.open(args.input_file, encoding=default_encoding)
  else:
    encoding = args.encoding or sys.stdin.encoding \
      or locale.getpreferredencoding() or default_encoding
    inp = io.open(sys.stdin.fileno(), encoding=encoding)

  for line in inp.readlines():
    minifier.input(line)

  if args.output_file:
    codecs.open(
      args.output_file, 'w', encoding=default_encoding).write(minifier.output)
  else:
    encoding = args.encoding or sys.stdout.encoding \
      or locale.getpreferredencoding() or default_encoding
    io.open(sys.stdout.fileno(), 'w', encoding=encoding).write(minifier.output)
Example #24
0
        def run_command(self, exe, param):
            s = sublime.load_settings("delphi-ide.sublime-settings")
            Other_params = s.get("other_params", '')

            command = ('"' + exe + '" ' + Other_params + ' "' + param + '"')

            startup_info = subprocess.STARTUPINFO()
            startup_info.dwFlags |= subprocess.STARTF_USESHOWWINDOW
            p = subprocess.Popen(
                command,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                stdin=subprocess.PIPE,
                startupinfo=startup_info
            )
            (out, err) = p.communicate()

            out = (out.decode(locale.getpreferredencoding())
                   if not out is None else None)
            err = (err.decode(locale.getpreferredencoding())
                   if not err is None else None)

            msglist = []
            msg = out.split('\r\n')
            for s in msg:
                if s.find(param) == 0:
                    msglist.append(s[len(param):].lstrip())
                else:
                    msglist.append(s.lstrip())
            else:
                out_msg = '\n'.join(msglist)

            if out_msg:
                sublime.message_dialog(out_msg)
Example #25
0
    def SetEditText(self, text, pos_start = None, pos_end = None):
        "Set the text of the edit control"
        self.VerifyActionable()

        # allow one or both of pos_start and pos_end to be None
        if pos_start is not None or pos_end is not None:

            # if only one has been specified - then set the other
            # to the current selection start or end
            start, end = self.SelectionIndices()
            if pos_start is None:
                pos_start = start
            if pos_end is None:
                pos_end = end

            # set the selection if either start or end has
            # been specified
            self.Select(pos_start, pos_end)
        else:
            self.Select()

        # replace the selection with
        #buffer = ctypes.c_wchar_p(six.text_type(text))
        
        if isinstance(text, six.text_type):
            if six.PY3:
                buffer = ctypes.create_unicode_buffer(text, size=len(text) + 1)
            else:
                buffer = ctypes.create_string_buffer(text.encode(locale.getpreferredencoding(), 'ignore'), size=len(text) + 1)
        else:
            if six.PY3:
                buffer = ctypes.create_unicode_buffer(text.decode(locale.getpreferredencoding()), size=len(text) + 1)
            else:
                buffer = ctypes.create_string_buffer(text, size=len(text) + 1)
        #buffer = ctypes.create_unicode_buffer(text, size=len(text) + 1)
        '''
        remote_mem = RemoteMemoryBlock(self)
        _setTextExStruct = win32structures.SETTEXTEX()
        _setTextExStruct.flags = win32defines.ST_SELECTION #| win32defines.ST_UNICODE
        _setTextExStruct.codepage = win32defines.CP_WINUNICODE
        
        remote_mem.Write(_setTextExStruct)
        
        self.SendMessage(win32defines.EM_SETTEXTEX, remote_mem, ctypes.byref(buffer))
        '''
        self.SendMessage(win32defines.EM_REPLACESEL, True, ctypes.byref(buffer))

        #win32functions.WaitGuiThreadIdle(self)
        #time.sleep(Timings.after_editsetedittext_wait)

        if isinstance(text, six.text_type):
            if six.PY3:
                self.actions.log('Set text to the edit box: ' + text)
            else:
                self.actions.log('Set text to the edit box: ' + text.encode(locale.getpreferredencoding(), 'ignore'))
        elif isinstance(text, six.binary_type):
            self.actions.log(b'Set text to the edit box: ' + text)

        # return this control so that actions can be chained.
        return self
Example #26
0
    def paste(self, s):
        """Call out to helper program for pastebin upload."""

        try:
            helper = subprocess.Popen('',
                                      executable=self.executable,
                                      stdin=subprocess.PIPE,
                                      stdout=subprocess.PIPE)
            helper.stdin.write(s.encode(getpreferredencoding()))
            output = helper.communicate()[0].decode(getpreferredencoding())
            paste_url = output.split()[0]
        except OSError as e:
            if e.errno == errno.ENOENT:
                raise PasteFailed(_('Helper program not found.'))
            else:
                raise PasteFailed(_('Helper program could not be run.'))

        if helper.returncode != 0:
            raise PasteFailed(_('Helper program returned non-zero exit '
                                'status %d.' % (helper.returncode, )))

        if not paste_url:
            raise PasteFailed(_('No output from helper program.'))
        else:
            parsed_url = urlparse(paste_url)
            if (not parsed_url.scheme or
                any(unicodedata.category(c) == 'Cc'
                    for c in paste_url)):
                raise PasteFailed(_('Failed to recognize the helper '
                                    'program\'s output as an URL.'))

        return paste_url,
    def _run_dbpre(self, file_path):
        """
        Run the dbpre process and returns it's exit code and
        output (both stderr and stdout).

        :param file_path: .scb path.
        :return: int, str
        """
        path = os.path.dirname(file_path)
        pgm, options = self.make_command(file_path)
        process = QtCore.QProcess()
        process.setWorkingDirectory(path)
        process.setProcessChannelMode(QtCore.QProcess.MergedChannels)
        cmd = "%s %s" % (pgm, " ".join(options))
        _logger().info("command: %s", cmd)
        _logger().debug("working directory: %s", path)
        _logger().debug("system environment: %s", process.systemEnvironment())
        process.start(pgm, options)
        self.started.emit(cmd)
        process.waitForFinished()
        status = process.exitCode()
        try:
            output = process.readAllStandardOutput().data().decode(locale.getpreferredencoding())
        except UnicodeDecodeError:
            output = "Failed to decode dbpre output with encoding: %s" % locale.getpreferredencoding()
        self.output_available.emit(output)
        return output, status
Example #28
0
def getGPG():
    global lunch_gpg
    if not lunch_gpg:
        from gnupg import GPG
        gbinary = getBinary("gpg", "bin")
        if not gbinary:
            raise Exception("GPG not found")
        
        ghome = os.path.join(get_settings().get_main_config_dir(),"gnupg")
        
        if not locale.getpreferredencoding():
            # Fix for GnuPG on Mac
            # TODO will this work on systems without English locale?
            os.putenv("LANG", "en_US.UTF-8")
        
        if not locale.getpreferredencoding():
            # Fix for GnuPG on Mac
            # TODO will this work on systems without English locale?
            os.putenv("LANG", "en_US.UTF-8")
        
        try:
            if getPlatform() == PLATFORM_WINDOWS:
                lunch_gpg = GPG("\""+gbinary+"\"",ghome)
            else:
                lunch_gpg = GPG(gbinary,ghome)
            if not lunch_gpg.encoding:
                lunch_gpg.encoding = 'utf-8'
        except Exception, e:
            raise Exception("GPG not working: "+str(e))
Example #29
0
def cat(lang):
    """
        Translate any text into Greeklish by [typing + enter]

        @param lang: a string to check which mode to activate {'greek', anything}
    """

    if lang == 'greek':
        while True:
            try:
                text = raw_input().decode(stdin.encoding
                                          or locale.getpreferredencoding(True))

                new_data = to_greeklish(list(text))
                print ''.join(new_data)
            except KeyboardInterrupt:
                print '\nexit'
                exit()
    else:
        while True:
            try:
                text = raw_input().decode(stdin.encoding
                                          or locale.getpreferredencoding(True))

                translated = Translator().translate(''.join(text), dest='el')
                new_data = to_greeklish(list(unicode(translated.text)))
                print ''.join(new_data)
            except KeyboardInterrupt:
                print '\nexit'
                exit()
Example #30
0
def main():
    locale.getpreferredencoding()
    p = argparse.ArgumentParser(__doc__)
    p.add_argument('-c', '--commit', action='store_true', help='commit')
    config = p.parse_args()
    db = Factory.get('Database')(client_encoding='UTF-8')
    tables = set()

    def search_for_tables(consts):
        for name in dir(consts):
            const = getattr(consts, name)
            if isinstance(const, _CerebrumCode):
                yield (const._lookup_table, const._lookup_code_column)

    for e in search_for_tables([Factory.get('Constants'),
                                Factory.get('CLConstants')]):
                tables.add(e)

    for table, code in tables:
        print('fixing table {}'.format(table))
        rows = db.query('SELECT * FROM {}'.format(table))
        fixes = []
        for row in rows:
            fix = dict()
            for key in row.keys():
                if isinstance(row[key], six.text_type):
                    try:
                        new = row[key].encode('latin1').decode('utf-8')
                        if new != row[key]:
                            fix[key] = new
                    except UnicodeError:
                        pass
            if fix:
                fixes.append((row[code], fix))
        for key, fix in fixes:
            for row in rows:
                if any(row[k] == v for k, v in fix.items()):
                    print('Manual intervention may be needed.'
                          'Suspicious:\n\told = {}\n\tnew = {}={},'
                          '{}'.format(row, code, key, fix))
            sql = ('UPDATE {table} SET {fix} WHERE {code} = :fixargcode'
                   .format(table=table,
                           code=code,
                           fix=', '.join('{}=:{}'.format(x, x)
                                         for x in fix.keys())))
            print('Will update for {} = {}: {}'.format(
                code, key, ', '.join('{} = {}'.format(k, v) for k, v in
                                     fix.items())))
            fix['fixargcode'] = key
            try:
                db.execute(sql, fix)
                if config.commit:
                    db.commit()
                else:
                    db.rollback()
            except Exception as e:
                print('Manual intervention needed:')
                print(e)
                db.rollback()
Example #31
0
def decode_output_cmd(output):
    return output.decode(locale.getpreferredencoding())
Example #32
0
def main(args):
	""" Command line argument parser. 
	"""
	
	try:
		usage = [
###############################################################################
'',
'eXtensible Python-based Build System %s on Python %s.%s.%s'% (XPYBUILD_VERSION, sys.version_info[0], sys.version_info[1], sys.version_info[2]),
'',
'xpybuild.py [operation]? [options]* [property=value]* [-x] [target|tag|regex]* ', 
'',
'A regex containing * can be used instead of a target, but only if it uniquely ', 
'identifies a single target. ',
'',
'Special pseudo-tags:',
'  full                       Include all targets for a full build (the default)',
'',
'Special properties:',
'  OUTPUT_DIR=output          The main directory output will be written to',
'  BUILD_MODE=release         Specifies release, debug (or user-defined) mode',
'  BUILD_NUMBER=n             Build number string, for reporting and use by build',
'',
'Operations: ',
###############################################################################

'  (if none is specified, the default operation is a normal build)',
'      --clean                Clean specified targets incl all deps',
'      --rebuild              Clean specified targets incl all deps then build',
'      --rebuild-ignore-deps  Clean only the specified targets (not deps) then ',
'        (or --rid)           build those targets and any missing dependencies, ',
'                             but not any out-of-date dependencies. This is a ',
'                             fast but less correct way to get a quick ',
'                             incremental build, so use with care. ',
'',
'   -s --search <str>         Show info on targets/tags/properties/options ',
'                             containing the specified substring or regex', 
# hide these from usage (though they still work), as superceded by the more useful "-s" option
#' --ft --find-targets <str>   List targets containing the specified substring', 
#' --ti --target-info <str>    Print details including build file location for ',
#'                             targets containing the specified substring',
'      --targets              List available targets and tags (filtered by any ', 
'                             target or tag names specified on the command line)',
'      --properties           List properties that can be set and their ',
'                             defaults in this build file',
'      --options              List the target options available to build rules ',
'                             and their default values in this build file',
'',
'Options:',
'   -x --exclude <target>     Specifies a target or tag to exclude (unless ',
'                             needed as a dependency of an included target) ',
'',
'   -J --parallel             Build in parallel (this is the default). ',
'                             The number of workers is determined from the ',
'                             `build.workers` build file option or else the ',
'                             number of CPUs and the XPYBUILD_WORKERS_PER_CPU ',
'                             environment varable (default is currently 1.0), ',
'                             with an upper limit for this machine from the ',
'                             XPYBUILD_MAX_WORKERS variable. ',
'   -j --workers <number>     Override the number of worker threads to use for ',
'                             building. Use -j1 for single-threaded. ',
'                             (ignores any environment variables)',
'',
'   -k --keep-going           Continue rather than aborting on errors',
'',
'   -n --dry-run              Don\'t actually build anything, just print',
'                             what would be done (finds missing dependencies)',
'',
' --id --ignore-deps          Skip all dependency/up-to-date checking: only ', 
'                             clean/build targets that do not exist at all ',
'                             (faster builds, but no guarantee of correctness)', 
'',
'   -f --buildfile <file>     Specify the root build file to import ',
'                             (default is ./root.xpybuild.py)',
'',
'   -l --log-level LEVEL      Set the log level to debug/info/critical',
'   -L --logfile <file>       Set the log file location',
'      --timefile <file>      Dump the time for each target in <file> at the',
'                             end of the run',
'      --depgraph <file>      Just resolve dependencies and dump them to <file>',
'      --cpu-stats            Log CPU utilisation stats',
'      --random-priority      Randomizes build order',
'      --verify               Performs additional verifications during the ',
'                             build to to help detect bugs in the build files. ',
'                             [verify is currently an experimental feature]',
'      --profile              Profiles all the worker threads',
'   -F --format               Message output format.',
'                             Options:',
] + [
'                                - '+ h for h in _registeredConsoleFormatters
] + [

]
		if reduce(max, list(map(len, usage))) > 80:
			raise Exception('Invalid usage string - all lines must be less than 80 characters')

		# set up defaults
		properties = {} 
		buildOptions = { "keep-going":False, "workers":0, "dry-run":False, 
			"ignore-deps":False, "logCPUUtilisation":False, "profile":False, "verify":False } 
		includedTargets = []
		excludedTargets = []
		task = _TASK_BUILD
		buildFile = os.path.abspath('root.xpybuild.py')
		logLevel = None
		logFile = None
		findTargetsPattern = None
		format = "default"

		opts,targets = getopt.gnu_getopt(args, "knJh?x:j:l:L:f:F:s:", 
			["help","exclude=","parallel","workers=","keep-going",
			"log-level=","logfile=","buildfile=", "dry-run",
			"targets", 'target-info=', 'ti=', "properties", "options", "clean", "rebuild", "rebuild-ignore-deps", "rid", "ignore-deps", "id",
			"format=", "timefile=", "ft=", "find-targets=", "search=", "depgraph=", 'cpu-stats', 'random-priority', 'profile', 'verify'])
		
		for o, a in opts: # option arguments
			o = o.strip('-')
			if o in ["?", "h", "help"]:
				print('\n'.join(usage))
				return 0
			elif o in ["x", "exclude"]:
				excludedTargets.append(a)
			elif o in ["f", "buildfile"]:
				buildFile = os.path.abspath(a)
			elif o in ['targets']:
				task = _TASK_LIST_TARGETS
			elif o in ['find-targets', 'ft']:
				task = _TASK_LIST_FIND_TARGETS
				findTargetsPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['target-info', 'ti']:
				task = _TASK_LIST_TARGET_INFO
				findTargetsPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['search', 's']:
				task = _TASK_LIST_SEARCH
				searchPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['properties']:
				task = _TASK_LIST_PROPERTIES
			elif o in ['options']:
				task = _TASK_LIST_OPTIONS
			elif o in ['J', 'parallel']:
				buildOptions['workers'] = 0
			elif o in ['j', 'workers']:
				buildOptions['workers'] = int(a)
			elif o in ['l', 'log-level']:
				logLevel = getattr(logging, a.upper(), None)
			elif o in ['cpu-stats']:
				buildOptions["logCPUUtilisation"] = True
			elif o in ['random-priority']:
				buildOptions["randomizePriorities"] = True
			elif o in ['L', 'logfile']:
				logFile = a
			elif o in ['F', 'format']:
				format = None
				if a =='xpybuild': a = 'default' # for compatibility
				for h in _registeredConsoleFormatters:
					if h.upper() == a.upper():
						format = h
				if not format:
					print('invalid format "%s"; valid formatters are: %s'%(a, ', '.join(_registeredConsoleFormatters.keys())))
					print('\n'.join(usage))
					return 1
			elif o in ['clean']:
				task = _TASK_CLEAN
				buildOptions['keep-going'] = True
			elif o in ['rebuild']:
				task = _TASK_REBUILD
			elif o in ['rebuild-ignore-deps', 'rid']:
				task = _TASK_REBUILD
				buildOptions['ignore-deps'] = True
			elif o in ['id', 'ignore-deps']:
				buildOptions['ignore-deps'] = True
			elif o in ['k', 'keep-going']:
				buildOptions['keep-going'] = True
			elif o in ['n', 'dry-run']:
				buildOptions['dry-run'] = True
			elif o in ['timefile']:
				buildOptions['timeFile'] = a
			elif o in ['verify']:
				buildOptions['verify'] = True
			elif o in ['profile']:
				buildOptions['profile'] = True
			elif o in ['depgraph']:
				buildOptions['depGraphFile'] = a
			else:
				assert False, "unhandled option: '%s'" % o

		for o in targets: # non-option arguments (i.e. no -- prefix)
			arg = o.strip()
			if arg:
				if '=' in arg:
					properties[arg.split('=')[0].upper()] = arg.split('=')[1]
				elif arg=='all': # pre-4.0 alias for all
					includedTargets.append('full')
				else:
					includedTargets.append(BaseTarget._normalizeTargetName(arg))
			
		# default is all
		if (not includedTargets) or includedTargets==['']:
			includedTargets = ['full']
		
	except getopt.error as msg:
		print(msg)
		print("For help use --help")
		return 2
	
	threading.currentThread().setName('main')
	logging.getLogger().setLevel(logLevel or logging.INFO)

	if buildOptions["workers"] < 0: buildOptions["workers"] = 0 # means there's no override
	
	outputBufferingDisabled = buildOptions['workers']==1 # nb: this also affects the .log handler below
	
	# nb: it's possible workers=0 (auto) and will later be set to 1 but doesn't really matter much

	# initialize logging to stdout - minimal output to avoid clutter, but indicate progress
	hdlr = _registeredConsoleFormatters.get(format, None)
	assert hdlr # shouldn't happen
	wrapper = OutputBufferingStreamWrapper(sys.stdout, bufferingDisabled=outputBufferingDisabled)
	# actually instantiate it
	hdlr = hdlr(
		wrapper, 
		buildOptions=buildOptions) 
	# Handler can override wrapper with a preference in either direction
	if hdlr.bufferingDisabled: wrapper.bufferingDisabled = True
	if hdlr.bufferingRequired: wrapper.bufferingDisabled = False
		
	hdlr.setLevel(logLevel or logging.WARNING)
	logging.getLogger().addHandler(hdlr)
	log.info('Build options: %s'%{k:buildOptions[k] for k in buildOptions if k != 'workers'})
	
	stdout = sys.stdout
	
	# redirect to None, to prevent any target code from doing 'print' statements - should always use the logger
	sys.stdout = None

	listen_for_stack_signal() # make USR1 print a python stack trace

	allTargets = ('full' in includedTargets) and not excludedTargets

	try:
		def loadBuildFile():
			init = BuildInitializationContext(properties)
			isRealBuild = (task in [_TASK_BUILD, _TASK_CLEAN, _TASK_REBUILD])
			init._defineOption("process.timeout", 600)
			init._defineOption("build.keepGoing", buildOptions["keep-going"])
			
			# 0 means default behaviour
			init._defineOption("build.workers", 0)
			
			init.initializeFromBuildFile(buildFile, isRealBuild=isRealBuild)
			
			# now handle setting real value of workers, starting with value from build file
			workers = int(init._globalOptions.get("build.workers", 0))
			# default value if not specified in build file
			if workers <= 0: 
				workers = multiprocessing.cpu_count() 
			if os.getenv('XPYBUILD_WORKERS_PER_CPU'):
				workers = min(workers, int(round(multiprocessing.cpu_count()  * float(os.getenv('XPYBUILD_WORKERS_PER_CPU')))))
			
			# machine/user-specific env var can cap it
			if os.getenv('XPYBUILD_MAX_WORKERS'):
				workers = min(workers, int(os.getenv('XPYBUILD_MAX_WORKERS')))
			
			# finally an explicit command line --workers take precedence
			if buildOptions['workers']: workers = buildOptions['workers']
			
			if workers < 1: workers = 1
			
			# finally write the final number of workers where it's available to both scheduler and targets
			buildOptions['workers'] = workers
			init._globalOptions['build.workers'] = workers
			
			init._finalizeGlobalOptions()
			
			return init

		if buildOptions['profile']:
			import cProfile, pstats
			profiler = cProfile.Profile()
			profiler.enable()

		init = loadBuildFile()

		# nb: don't import any modules that might define options (including outputhandler) 
		# until build file is loaded
		# or we may not have a build context in place yet#
		from xpybuild.internal.scheduler import BuildScheduler, logTargetTimes


		if buildOptions['profile']:
			profilepath = 'xpybuild-profile-%s.txt'%'parsing'
			with open(profilepath, 'w') as f:
				p = pstats.Stats(profiler, stream=f)
				p.sort_stats('cumtime').print_stats(f)
				p.dump_stats(profilepath.replace('.txt', '')) # also in binary format
				log.critical('=== Wrote Python profiling output to: %s', profilepath)

		def lookupTarget(s):
			tfound = init.targets().get(s,None)
			if not tfound and '*' in s: 
				
				matchregex = s.rstrip('$')+'$'
				try:
					matchregex = re.compile(matchregex, re.IGNORECASE)
				except Exception as e:
					raise BuildException('Invalid target regular expression "%s": %s'%(matchregex, e))
				matches = [t for t in init.targets().values() if matchregex.match(t.name)]
				if len(matches) > 1:
					print('Found multiple targets matching pattern %s:'%(s), file=stdout)
					print(file=stdout)
					for m in matches:
						print(m.name, file=stdout)
					print(file=stdout)
					raise BuildException('Target regex must uniquely identify a single target: %s (use tags to specify multiple related targets)'%s)
				if matches: return matches[0]
				
			if not tfound: raise BuildException('Unknown target name, target regex or tag name: %s'%s)
			return tfound

		# expand tags to targets here, and do include/exclude calculations
		selectedTargets = set() # contains BaseTarget objects
		for t in includedTargets:
			tlist = init.tags().get(t,None)
			if tlist:
				selectedTargets.update(tlist)
			else:
				selectedTargets.add(lookupTarget(t))
		for t in excludedTargets:
			tlist = init.tags().get(t,None)
			if tlist:
				selectedTargets.difference_update(tlist)
			else:
				selectedTargets.discard(lookupTarget(t))

		# convert findTargetsPattern to list
		if findTargetsPattern:
			findTargetsPattern = findTargetsPattern.lower()
			# sort matches at start of path first, then anywhere in name, finally anywhere in type
			# make 'all'/'full' into a special case that maps to all *selected* targets 
			# (could be different to 'all' tag if extra args were specified, but this is unlikely and kindof useful)
			findTargetsList = [t for t in sorted(
				 init.targets().values() if allTargets else selectedTargets, key=lambda t:(
					'/'+findTargetsPattern.lower() not in t.name.lower(), 
					findTargetsPattern.lower() not in t.name.lower(), 
					findTargetsPattern.lower() not in t.type.lower(), 
					t.name
					)) if findTargetsPattern in t.name.lower() or findTargetsPattern in t.type.lower() 
						or findTargetsPattern == 'full' or findTargetsPattern == 'all']

		if task == _TASK_LIST_PROPERTIES:
			p = init.getProperties()
			print("Properties: ", file=stdout)
			pad = max(list(map(len, p.keys())))
			if pad > 30: pad = 0
			for k in sorted(p.keys()):
				print(('%'+str(pad)+'s = %s') % (k, p[k]), file=stdout)
				
		elif task == _TASK_LIST_OPTIONS:
			options = init.mergeOptions(None)
			pad = max(list(map(len, options.keys())))
			if pad > 30: pad = 0
			for k in sorted(options.keys()):
				print(("%"+str(pad)+"s = %s") % (k, options[k]), file=stdout)

		elif task == _TASK_LIST_TARGETS:
			if len(init.targets())-len(selectedTargets) > 0:
				print("%d target(s) excluded (unless required as dependencies): "%(len(init.targets())-len(selectedTargets)), file=stdout)
				for t in sorted(['   %-15s %s'%('<'+t.type+'>', t.name) for t in init.targets().values() if t not in selectedTargets]):
					print(t, file=stdout)
				print(file=stdout)
				
			print("%d target(s) included: "%(len(selectedTargets)), file=stdout)
			for t in sorted(['   %-15s %s'%('<'+t.type+'>', t.name) for t in selectedTargets]):
				print(t, file=stdout)
			print(file=stdout)

			if allTargets:
				print("%d tag(s) are defined: "%(len(init.tags())), file=stdout)
				for t in sorted(['   %-15s (%d targets)'%(t, len(init.tags()[t])) for t in init.tags()]):
					print(t, file=stdout)

		elif task == _TASK_LIST_TARGET_INFO:
			if findTargetsList == '*': findTargetsList = init.targets().values()
			for t in sorted(findTargetsList, key=lambda t:(t.type+' '+t.name)):
				print('- %s priority: %s, tags: %s, location: \n   %s'%(t, t.getPriority(), t.getTags(), t.location), file=stdout)

		elif task == _TASK_LIST_FIND_TARGETS:
			# sort matches at start of path first, then anywhere in name, finally anywhere in type
			for t in findTargetsList:
				# this must be very easy to copy+paste, so don't put anything else on the line at all
				print('%s'%(t.name), file=stdout)

		elif task == _TASK_LIST_SEARCH:
			def showPatternMatches(x): # case sensitive is fine (and probably useful)
				if searchPattern.replace('\\', '/') in x.replace('\\','/'): return True # basic substring check (with path normalization)
				if '*' in searchPattern or '?' in searchPattern or '[' in searchPattern: # probably a regex
					if re.search(searchPattern, x): return True
				return False

			for t in init.targets().values():
				t._resolveTargetPath(init)

			print('', file=stdout)

			tagMatches = [t for t in init.tags() if showPatternMatches(t)]
			if tagMatches: 
				print ('%d matching tags:'%len(tagMatches), file=stdout)
				for t in sorted(tagMatches):
					print(t, file=stdout)
				print('', file=stdout)
				
			targetMatches = [t for t in init.targets().values() if showPatternMatches(t.name) or showPatternMatches(t.path)]
			if targetMatches: 
				print ('%d matching targets:'%len(targetMatches), file=stdout)
				for t in sorted(targetMatches, key=lambda t:(t.type+' '+t.name)):
					print('- %s priority: %s, tags: [%s]\n   output:  %s\n   defined:  %s'%(t, t.getPriority(), ' '.join(sorted(t.getTags())) or 'none', os.path.relpath(t.path), t.location), file=stdout)
				print('', file=stdout)

			propMatches = {key:value for (key,value) in init.getProperties().items() if showPatternMatches(key)}
			if propMatches:
				print('%d matching properties:'%len(propMatches), file=stdout)
				pad = max(list(map(len, propMatches.keys())))
				for k in sorted(propMatches.keys()):
					print(('%'+str(pad)+'s = %s') % (k, propMatches[k]), file=stdout)
					if init._propertyLocations[k]: # don't do this for built-in property like BUILD_MODE
						print(('%'+str(pad)+'s   (defined: %s)') % ('', init._propertyLocations[k]), file=stdout)
					
			options = init.mergeOptions(None)
			optionMatches = {key:value for (key,value) in options.items() if showPatternMatches(key)}
			if optionMatches:
				print('%d matching options:'%len(optionMatches), file=stdout)
				pad = max(list(map(len, optionMatches.keys())))
				for k in sorted(optionMatches.keys()):
					print(('%'+str(pad)+'s = %s') % (k, optionMatches[k]), file=stdout)

				
		elif task in [_TASK_BUILD, _TASK_CLEAN, _TASK_REBUILD]:
			
			if not logFile:
				if allTargets:
					buildtag = None
				else:
					buildtag = 'custom'
				logFile = _maybeCustomizeLogFilename(init.getPropertyValue('LOG_FILE'), 
					buildtag,
					task==_TASK_CLEAN)
			logFile = os.path.abspath(logFile)

			logdir = os.path.dirname(logFile)
			if logdir and not os.path.exists(logdir): mkdir(logdir)
			log.critical('Writing build log to: %s', os.path.abspath(logFile))
			
			# also buffer the .log file, since it's just a lot harder to read when multiple target lines are all jumbled up; 
			# we have an undocumented env var for disabling this in case of debugging
			if os.getenv('XPYBUILD_LOGFILE_OUTPUT_BUFFERING_DISABLED','')=='true': outputBufferingDisabled = True
			logFileStream = OutputBufferingStreamWrapper(open(logFile, 'w', encoding='UTF-8'), bufferingDisabled=outputBufferingDisabled)
			hdlr = logging.StreamHandler(logFileStream)
			hdlr.setFormatter(logging.Formatter('%(asctime)s %(relativeCreated)05d %(levelname)-8s [%(threadName)s %(thread)5d] %(name)-10s - %(message)s', None))
			hdlr.setLevel(logLevel or logging.INFO)
			logging.getLogger().addHandler(hdlr)

			log.info('Using xpybuild %s from %s on Python %s.%s.%s', XPYBUILD_VERSION, os.path.normpath(os.path.dirname(__file__)), sys.version_info[0], sys.version_info[1], sys.version_info[2])
			log.info('Using build options: %s (logfile target outputBuffering=%s, stdout target outputBuffering=%s)', buildOptions, not outputBufferingDisabled, not wrapper.bufferingDisabled)
			
			try:
				# sometimes useful to have this info available
				import socket, getpass
				log.info('Build running on %s as user %s', socket.gethostname(), getpass.getuser())
			except Exception as e:
				log.info('Failed to get host/user: %s', e)

			log.info('Default encoding for subprocesses assumed to be: %s (stdout=%s, preferred=%s)', 
				DEFAULT_PROCESS_ENCODING, stdout.encoding, locale.getpreferredencoding())
			
			def lowerCurrentProcessPriority():
				if xpybuild.buildcommon.IS_WINDOWS:
					import win32process, win32api,win32con
					win32process.SetPriorityClass(win32api.GetCurrentProcess(), win32process.BELOW_NORMAL_PRIORITY_CLASS)
				else:
					# on unix, people may run nice before executing the process, so 
					# only change the priority unilaterally if it's currently at its 
					# default value
					if os.nice(0) == 0:
						os.nice(1) # change to 1 below the current level

			try:
				# if possible, set priority of builds to below normal by default, 
				# to avoid starving machines (e.g. on windows) of resources 
				# that should be used for interactive processes
				if os.getenv('XPYBUILD_DISABLE_PRIORITY_CHANGE','') != 'true':
					lowerCurrentProcessPriority()
					log.info('Successfully changed process priority to below normal')
			except Exception as e:
				log.warning('Failed to lower current process priority: %s'%e)
			
			if buildOptions['ignore-deps']:
				log.warning('The ignore-deps option is enabled: dependency graph will be ignored for all targets that already exist on disk, so correctness is not guaranteed')
			
			for (k,v) in sorted(init.getProperties().items()):
				log.info('Setting property %s=%s', k, v)

			try:
				DATE_TIME_FORMAT = "%a %Y-%m-%d %H:%M:%S %Z"
				
				errorsList = []
				if task in [_TASK_CLEAN, _TASK_REBUILD]:
					startTime = time.time()
					log.critical('Starting "%s" clean "%s" at %s', init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )))
					
					cleanBuildOptions = buildOptions.copy()
					cleanBuildOptions['clean'] = True
					if allTargets: cleanBuildOptions['ignore-deps'] = True
					scheduler = BuildScheduler(init, selectedTargets, cleanBuildOptions)
					errorsList, targetsBuilt, targetsCompleted, totalTargets = scheduler.run()
		
					if allTargets and not cleanBuildOptions['dry-run']: # special-case this common case
						for dir in init.getOutputDirs():
							deleteDir(dir)
		
					log.critical('Completed "%s" clean "%s" at %s after %s\n', init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), formatTimePeriod(time.time()-startTime))
						
					if errorsList: 
						log.critical('XPYBUILD FAILED: %d error(s): \n   %s', len(errorsList), '\n   '.join(sorted(errorsList)))
						return 3
				
				if task == _TASK_REBUILD:
					# we must reload the build file here, as it's the only way of flushing out 
					# cached data (especially in PathSets) that may have changed as a 
					# result of the clean
					init = loadBuildFile()
				
				if task in [_TASK_BUILD, _TASK_REBUILD] and not errorsList:

					for cb in init.getPreBuildCallbacks():
						try:
							cb(BuildContext(init))
						except BuildException as be:
							log.error("Pre-build check failed: %s", be)
							return 7

					buildtype = 'incremental' if any(os.path.exists(dir) for dir in init.getOutputDirs()) else 'fresh'
					if not buildOptions['dry-run']:
						for dir in init.getOutputDirs():
							log.info('Creating output directory: %s', dir)
							mkdir(dir)
					
					startTime = time.time()
					log.critical('Starting %s "%s" build "%s" at %s using %d workers', buildtype, 
						init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), 
						buildOptions['workers']
						)
					
					buildOptions['clean'] = False
					scheduler = BuildScheduler(init, selectedTargets, buildOptions)
					errorsList, targetsBuilt, targetsCompleted, totalTargets = scheduler.run()
					log.critical('Completed %s "%s" build "%s" at %s after %s\n', buildtype, init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), formatTimePeriod(time.time()-startTime))
					if 'timeFile' in buildOptions:
						logTargetTimes(buildOptions['timeFile'], scheduler, init)
	
				if errorsList: 
					# heuristically: it's useful to have them in order of failure when a small number, but if there are 
					# lots then it's too hard to read and better to sort, so similar ones are together
					if len(errorsList)>=10: errorsList.sort()
						
					log.critical('*** XPYBUILD FAILED: %d error(s) (aborted with %d targets outstanding): \n   %s', len(errorsList), totalTargets-targetsCompleted, '\n   '.join(errorsList))
					return 4
				else:
					# using *** here means we get a valid final progress message
					log.critical('*** XPYBUILD SUCCEEDED: %s built (%d up-to-date)', targetsBuilt if targetsBuilt else '<NO TARGETS>', (totalTargets-targetsBuilt))
					return 0
			finally:
				publishArtifact('Xpybuild logfile', logFile)
		else:
			raise Exception('Task type not implemented yet - '+task) # should not happen
		
	except BuildException as e:
		# hopefully we don't end up here very often
		log.error('*** XPYBUILD FAILED: %s', e.toMultiLineString(None))
		return 5

	except Exception as e:
		log.exception('*** XPYBUILD FAILED: ')
		return 6
Example #33
0
        if verbose:
            print "yes"


try:
    # On Solaris 10, the thousands_sep is the empty string
    sep = locale.localeconv()['thousands_sep']
    testformat("%f", 1024, grouping=1, output='1%s024.000000' % sep)
    testformat("%f", 102, grouping=1, output='102.000000')
    testformat("%f", -42, grouping=1, output='-42.000000')
    testformat("%+f", -42, grouping=1, output='-42.000000')
    testformat("%20.f", -42, grouping=1, output='                 -42')
    testformat("%+10.f", -4200, grouping=1, output='    -4%s200' % sep)
    testformat("%-10.f", 4200, grouping=1, output='4%s200     ' % sep)
    # Invoke getpreferredencoding to make sure it does not cause exceptions,
    locale.getpreferredencoding()

    # === Test format() with more complex formatting strings
    # test if grouping is independent from other characters in formatting string
    testformat("One million is %i",
               1000000,
               grouping=1,
               output='One million is 1%s000%s000' % (sep, sep),
               func=locale.format_string)
    testformat("One  million is %i",
               1000000,
               grouping=1,
               output='One  million is 1%s000%s000' % (sep, sep),
               func=locale.format_string)
    # test dots in formatting string
    testformat(".%f.",
Example #34
0
    def start(self, args):  # pylint: disable=too-many-branches,too-many-statements
        """
        Start Application
        """
        # do some preliminary stuff
        app.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__))
        app.MY_NAME = os.path.basename(app.MY_FULLNAME)
        app.PROG_DIR = os.path.dirname(app.MY_FULLNAME)
        app.DATA_DIR = app.PROG_DIR
        app.MY_ARGS = args

        try:
            locale.setlocale(locale.LC_ALL, '')
            app.SYS_ENCODING = locale.getpreferredencoding()
        except (locale.Error, IOError):
            app.SYS_ENCODING = 'UTF-8'

        # pylint: disable=no-member
        if not app.SYS_ENCODING or app.SYS_ENCODING.lower() in ('ansi_x3.4-1968', 'us-ascii', 'ascii', 'charmap') or \
                (sys.platform.startswith('win') and sys.getwindowsversion()[0] >= 6 and str(getattr(sys.stdout, 'device', sys.stdout).encoding).lower() in ('cp65001', 'charmap')):
            app.SYS_ENCODING = 'UTF-8'

        # TODO: Continue working on making this unnecessary, this hack creates all sorts of hellish problems
        if not hasattr(sys, 'setdefaultencoding'):
            reload(sys)

        try:
            # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
            sys.setdefaultencoding(app.SYS_ENCODING)  # pylint: disable=no-member
        except (AttributeError, LookupError):
            sys.exit(
                'Sorry, you MUST add the Medusa folder to the PYTHONPATH environment variable\n'
                'or find another way to force Python to use %s for string encoding.'
                % app.SYS_ENCODING)

        # Need console logging for SickBeard.py and SickBeard-console.exe
        self.console_logging = (not hasattr(
            sys, 'frozen')) or (app.MY_NAME.lower().find('-console') > 0)

        # Rename the main thread
        threading.currentThread().name = 'MAIN'

        try:
            opts, _ = getopt.getopt(args, 'hqdp::', [
                'help', 'quiet', 'nolaunch', 'daemon', 'pidfile=', 'port=',
                'datadir=', 'config=', 'noresize'
            ])
        except getopt.GetoptError:
            sys.exit(self.help_message())

        for option, value in opts:
            # Prints help message
            if option in ('-h', '--help'):
                sys.exit(self.help_message())

            # For now we'll just silence the logging
            if option in ('-q', '--quiet'):
                self.console_logging = False

            # Suppress launching web browser
            # Needed for OSes without default browser assigned
            # Prevent duplicate browser window when restarting in the app
            if option in ('--nolaunch', ):
                self.no_launch = True

            # Override default/configured port
            if option in ('-p', '--port'):
                try:
                    self.forced_port = int(value)
                except ValueError:
                    sys.exit('Port: %s is not a number. Exiting.' % value)

            # Run as a double forked daemon
            if option in ('-d', '--daemon'):
                self.run_as_daemon = True
                # When running as daemon disable console_logging and don't start browser
                self.console_logging = False
                self.no_launch = True

                if sys.platform == 'win32' or sys.platform == 'darwin':
                    self.run_as_daemon = False

            # Write a pid file if requested
            if option in ('--pidfile', ):
                self.create_pid = True
                self.pid_file = str(value)

                # If the pid file already exists, Medusa may still be running, so exit
                if os.path.exists(self.pid_file):
                    sys.exit('PID file: %s already exists. Exiting.' %
                             self.pid_file)

            # Specify folder to load the config file from
            if option in ('--config', ):
                app.CONFIG_FILE = os.path.abspath(value)

            # Specify folder to use as the data directory
            if option in ('--datadir', ):
                app.DATA_DIR = os.path.abspath(value)

            # Prevent resizing of the banner/posters even if PIL is installed
            if option in ('--noresize', ):
                app.NO_RESIZE = True

        # Keep backwards compatibility
        Application.backwards_compatibility()

        # The pid file is only useful in daemon mode, make sure we can write the file properly
        if self.create_pid:
            if self.run_as_daemon:
                pid_dir = os.path.dirname(self.pid_file)
                if not os.access(pid_dir, os.F_OK):
                    sys.exit('PID dir: %s doesn\'t exist. Exiting.' % pid_dir)
                if not os.access(pid_dir, os.W_OK):
                    sys.exit(
                        'PID dir: %s must be writable (write permissions). Exiting.'
                        % pid_dir)

            else:
                if self.console_logging:
                    sys.stdout.write(
                        'Not running in daemon mode. PID file creation disabled.\n'
                    )

                self.create_pid = False

        # If they don't specify a config file then put it in the data dir
        if not app.CONFIG_FILE:
            app.CONFIG_FILE = os.path.join(app.DATA_DIR, 'config.ini')

        # Make sure that we can create the data dir
        if not os.access(app.DATA_DIR, os.F_OK):
            try:
                os.makedirs(app.DATA_DIR, 0o744)
            except os.error:
                raise SystemExit('Unable to create data directory: %s' %
                                 app.DATA_DIR)

        # Make sure we can write to the data dir
        if not os.access(app.DATA_DIR, os.W_OK):
            raise SystemExit('Data directory must be writeable: %s' %
                             app.DATA_DIR)

        # Make sure we can write to the config file
        if not os.access(app.CONFIG_FILE, os.W_OK):
            if os.path.isfile(app.CONFIG_FILE):
                raise SystemExit('Config file must be writeable: %s' %
                                 app.CONFIG_FILE)
            elif not os.access(os.path.dirname(app.CONFIG_FILE), os.W_OK):
                raise SystemExit('Config file root dir must be writeable: %s' %
                                 os.path.dirname(app.CONFIG_FILE))

        os.chdir(app.DATA_DIR)

        # Check if we need to perform a restore first
        restore_dir = os.path.join(app.DATA_DIR, 'restore')
        if os.path.exists(restore_dir):
            success = self.restore_db(restore_dir, app.DATA_DIR)
            if self.console_logging:
                sys.stdout.write('Restore: restoring DB and config.ini %s!\n' %
                                 ('FAILED', 'SUCCESSFUL')[success])

        # Load the config and publish it to the application package
        if self.console_logging and not os.path.isfile(app.CONFIG_FILE):
            sys.stdout.write(
                'Unable to find %s, all settings will be default!\n' %
                app.CONFIG_FILE)

        app.CFG = ConfigObj(app.CONFIG_FILE)

        # Initialize the config and our threads
        app.initialize(consoleLogging=self.console_logging)

        if self.run_as_daemon:
            self.daemonize()

        # Get PID
        app.PID = os.getpid()

        # Build from the DB to start with
        self.load_shows_from_db()

        logger.log('Starting Medusa [{branch}] using \'{config}\''.format(
            branch=app.BRANCH, config=app.CONFIG_FILE))

        self.clear_cache()

        if self.forced_port:
            logger.log('Forcing web server to port {port}'.format(
                port=self.forced_port))
            self.start_port = self.forced_port
        else:
            self.start_port = app.WEB_PORT

        if app.WEB_LOG:
            self.log_dir = app.LOG_DIR
        else:
            self.log_dir = None

        # app.WEB_HOST is available as a configuration value in various
        # places but is not configurable. It is supported here for historic reasons.
        if app.WEB_HOST and app.WEB_HOST != '0.0.0.0':
            self.web_host = app.WEB_HOST
        else:
            self.web_host = '' if app.WEB_IPV6 else '0.0.0.0'

        # web server options
        self.web_options = {
            'port': int(self.start_port),
            'host': self.web_host,
            'data_root': os.path.join(app.PROG_DIR, 'static'),
            'web_root': app.WEB_ROOT,
            'log_dir': self.log_dir,
            'username': app.WEB_USERNAME,
            'password': app.WEB_PASSWORD,
            'enable_https': app.ENABLE_HTTPS,
            'handle_reverse_proxy': app.HANDLE_REVERSE_PROXY,
            'https_cert': os.path.join(app.PROG_DIR, app.HTTPS_CERT),
            'https_key': os.path.join(app.PROG_DIR, app.HTTPS_KEY),
        }

        # start web server
        self.web_server = AppWebServer(self.web_options)
        self.web_server.start()

        # Fire up all our threads
        app.start()

        # Build internal name cache
        name_cache.buildNameCache()

        # Pre-populate network timezones, it isn't thread safe
        network_timezones.update_network_dict()

        # sure, why not?
        if app.USE_FAILED_DOWNLOADS:
            failed_history.trimHistory()

        # # Check for metadata indexer updates for shows (Disabled until we use api)
        # app.showUpdateScheduler.forceRun()

        # Launch browser
        if app.LAUNCH_BROWSER and not (self.no_launch or self.run_as_daemon):
            app.launchBrowser('https' if app.ENABLE_HTTPS else 'http',
                              self.start_port, app.WEB_ROOT)

        # main loop
        while True:
            time.sleep(1)
Example #35
0
 def read_text(self, encoding=None, errors=None):
     if not encoding:
         encoding = locale.getpreferredencoding(False)
     assert errors is None
     return self.read_bytes().decode(encoding)
Example #36
0
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
"""
sabnzbd.encoding - Unicode/byte translation functions
"""

import locale
import chardet
from xml.sax.saxutils import escape
from typing import AnyStr

CODEPAGE = locale.getpreferredencoding()


def utob(str_in: AnyStr) -> bytes:
    """ Shorthand for converting UTF-8 string to bytes """
    if isinstance(str_in, bytes):
        return str_in
    return str_in.encode("utf-8")


def ubtou(str_in: AnyStr) -> str:
    """ Shorthand for converting unicode bytes to UTF-8 string """
    if not isinstance(str_in, bytes):
        return str_in
    return str_in.decode("utf-8")
Example #37
0
    except OSError as e:
        sys.exit('Response file {!r} could not be read: {}'
                 .format(rspfile, e.strerror))
    return shlex.split(cmdline)


def write_output(output):
    global output_stream
    print(output, file=output_stream)


# Python 2 defaults to ASCII in case stdout is redirected.
# This should make it match Python 3, which uses the locale encoding.
if sys.stdout.encoding is None:
    output_stream = codecs.getwriter(
        locale.getpreferredencoding())(sys.stdout)
else:
    output_stream = sys.stdout


# Some source files aren't UTF-8 and the old perl version didn't care.
# Replace invalid data with a replacement character to keep things working.
# https://bugzilla.gnome.org/show_bug.cgi?id=785113#c20
def replace_and_warn(err):
    # 7 characters of context either side of the offending character
    print_warning('UnicodeWarning: {} at {} ({})'.format(
        err.reason, err.start,
        err.object[err.start - 7:err.end + 7]))
    return ('?', err.end)

codecs.register_error('replace_and_warn', replace_and_warn)
Example #38
0
    def get_arg_parser(cls, settings = None, option_prefix = u'--',
                                                            add_help = False):
        """Make a command-line option parser.

        The returned parser may be used as a parent parser for application
        argument parser.

        :Parameters:
            - `settings`: list of PyXMPP2 settings to consider. By default
              all 'basic' settings are provided.
            - `option_prefix`: custom prefix for PyXMPP2 options. E.g. 
              ``'--xmpp'`` to differentiate them from not xmpp-related
              application options.
            - `add_help`: when `True` a '--help' option will be included
              (probably already added in the application parser object)
        :Types:
            - `settings`: list of `unicode`
            - `option_prefix`: `str`
            - `add_help`: 

        :return: an argument parser object.
        :returntype: :std:`argparse.ArgumentParser`
        """
        # pylint: disable-msg=R0914,R0912
        parser = argparse.ArgumentParser(add_help = add_help, 
                                            prefix_chars = option_prefix[0])
        if settings is None:
            settings = cls.list_all(basic = True)

        if sys.version_info.major < 3:
            # pylint: disable-msg=W0404
            from locale import getpreferredencoding
            encoding = getpreferredencoding()
            def decode_string_option(value):
                """Decode a string option."""
                return value.decode(encoding)

        for name in settings:
            if name not in cls._defs:
                logger.debug("get_arg_parser: ignoring unknown option {0}"
                                                                .format(name))
                return
            setting = cls._defs[name]
            if not setting.cmdline_help:
                logger.debug("get_arg_parser: option {0} has no cmdline"
                                                                .format(name))
                return
            if sys.version_info.major < 3:
                name = name.encode(encoding, "replace")
            option = option_prefix + name.replace("_", "-")
            dest = "pyxmpp2_" + name
            if setting.validator:
                opt_type = setting.validator
            elif setting.type is unicode and sys.version_info.major < 3:
                opt_type = decode_string_option
            else:
                opt_type = setting.type
            if setting.default_d:
                default_s = setting.default_d
                if sys.version_info.major < 3:
                    default_s = default_s.encode(encoding, "replace")
            elif setting.default is not None:
                default_s = repr(setting.default)
            else:
                default_s = None
            opt_help = setting.cmdline_help
            if sys.version_info.major < 3:
                opt_help = opt_help.encode(encoding, "replace")
            if default_s:
                opt_help += " (Default: {0})".format(default_s)
            if opt_type is bool:
                opt_action = _YesNoAction
            else:
                opt_action = "store"
            parser.add_argument(option,
                                action = opt_action,
                                default = setting.default,
                                type = opt_type,
                                help = opt_help,
                                metavar = name.upper(),
                                dest = dest)
        return parser
Example #39
0
from .binarywave import MAXDIMS as _MAXDIMS
from .packed import load as _load
from .record.base import UnknownRecord as _UnknownRecord
from .record.folder import FolderStartRecord as _FolderStartRecord
from .record.folder import FolderEndRecord as _FolderEndRecord
from .record.history import HistoryRecord as _HistoryRecord
from .record.history import GetHistoryRecord as _GetHistoryRecord
from .record.history import RecreationRecord as _RecreationRecord
from .record.packedfile import PackedFileRecord as _PackedFileRecord
from .record.procedure import ProcedureRecord as _ProcedureRecord
from .record.wave import WaveRecord as _WaveRecord
from .record.variables import VariablesRecord as _VariablesRecord

__version__ = '0.10'

ENCODING = _locale.getpreferredencoding() or _sys.getdefaultencoding()
PYKEYWORDS = set(('and', 'as', 'assert', 'break', 'class', 'continue', 'def',
                  'elif', 'else', 'except', 'exec', 'finally', 'for', 'global',
                  'if', 'import', 'in', 'is', 'lambda', 'or', 'pass', 'print',
                  'raise', 'return', 'try', 'with', 'yield'))
PYID = _re.compile(r"^[^\d\W]\w*$", _re.UNICODE)


def valid_identifier(s):
    """Check if a name is a valid identifier"""
    return PYID.match(s) and s not in PYKEYWORDS


class IgorObject(object):
    """ Parent class for all objects the parser can return """
    pass
Example #40
0
 def write_text(self, contents, encoding=None, errors=None):
     if not encoding:
         encoding = locale.getpreferredencoding(False)
     assert errors is None
     self.write_bytes(contents.encode(encoding))
Example #41
0
    def load(self, filename=None):
        if filename:
            self.fileName = filename
        if self.fileName == None:
            return
        if not os.access(self.fileName, os.R_OK):
            #            print("warning: can't access %s" % self.fileName)
            return

        f = open(self.fileName, "r")

        multiline = ''
        for line in f.readlines():
            # strip comments
            if line.find('#') == 0:
                continue
            line = multiline + line.strip()
            if not line:
                continue

            # if line ends in '\', append the next line before parsing
            if line[-1] == '\\':
                multiline = line[:-1].strip()
                continue
            else:
                multiline = ''

            split = line.split('=', 1)
            if len(split) != 2:
                # not in 'a = b' format. we should log this
                # or maybe error.
                continue
            key = split[0].strip()
            value = ustr(split[1].strip())

            # decode a comment line
            comment = None
            pos = key.find("[comment]")
            if pos != -1:
                key = key[:pos]
                comment = value
                value = None

            # figure out if we need to parse the value further
            if value:
                # possibly split value into a list
                values = value.split(";")
                if key in ['proxyUser', 'proxyPassword']:
                    value = str(value.encode(locale.getpreferredencoding()))
                elif len(values) == 1:
                    try:
                        value = int(value)
                    except ValueError:
                        pass
                elif values[0] == "":
                    value = []
                else:
                    # there could be whitespace between the values on
                    # one line, let's strip it out
                    value = [val.strip() for val in values if val.strip()]

            # now insert the (comment, value) in the dictionary
            newval = (comment, value)
            if key in self.dict:  # do we need to update
                newval = self.dict[key]
                if comment is not None:  # override comment
                    newval = (comment, newval[1])
                if value is not None:  # override value
                    newval = (newval[0], value)
            self.dict[key] = newval
        f.close()
Example #42
0
def dispatch(parser,
             argv=None,
             add_help_command=True,
             encoding=None,
             intercept=False):
    """Parses given list of arguments using given parser, calls the relevant
    function and prints the result.

    The target function should expect one positional argument: the
    :class:`argparse.Namespace` object. However, if the function is decorated with
    :func:`plain_signature`, the positional and named arguments from the
    namespace object are passed to the function instead of the object itself.

    :param parser:
        the ArgumentParser instance.
    :param argv:
        a list of strings representing the arguments. If `None`, ``sys.argv``
        is used instead. Default is `None`.
    :param add_help_command:
        if `True`, converts first positional argument "help" to a keyword
        argument so that ``help foo`` becomes ``foo --help`` and displays usage
        information for "foo". Default is `True`.

    Exceptions are not wrapped and will propagate. The only exception among the
    exceptions is :class:`CommandError` which is interpreted as an expected
    event so the traceback is hidden.
    """
    if argv is None:
        argv = sys.argv[1:]
    if add_help_command:
        if argv and argv[0] == 'help':
            argv.pop(0)
            argv.append('--help')

    # this will raise SystemExit if parsing fails
    args = parser.parse_args(argv)

    if not hasattr(args, 'function'):
        # FIXME: "./prog.py" hits this error while "./prog.py foo" doesn't
        # if there were no commands defined for the parser (a possible case)
        raise NotImplementedError('Cannot dispatch without commands')

    # try different ways of calling the command; if meanwhile it raises
    # CommandError, return the string representation of that error
    try:
        if getattr(args.function, 'argh_no_namespace', False):
            # filter the namespace variables so that only those expected by the
            # actual function will pass
            f = args.function
            expected_args = f.func_code.co_varnames[:f.func_code.co_argcount]
            ok_args = [x for x in args._get_args() if x in expected_args]
            ok_kwargs = dict(
                (k, v) for k, v in args._get_kwargs() if k in expected_args)
            result = args.function(*ok_args, **ok_kwargs)
        else:
            result = args.function(args)
        if isinstance(result, (GeneratorType, list, tuple)):
            # handle iterable results (function marked with @generator decorator)
            if not encoding:
                # choose between terminal's and system's preferred encodings
                if sys.stdout.isatty():
                    encoding = sys.stdout.encoding
                else:
                    encoding = locale.getpreferredencoding()
            if intercept:
                return '\n'.join([line.encode(encoding) for line in result])
            else:
                # we must print each line as soon as it is generated to ensure that
                # it is displayed to the user before anything else happens, e.g.
                # raw_input() is called
                for line in result:
                    if not isinstance(line, unicode):
                        try:
                            line = unicode(line)
                        except UnicodeDecodeError:
                            line = str(line).decode('utf-8')
                    print(line.encode(encoding))
        else:
            return result
    except CommandError, e:
        if intercept:
            return str(e)
        else:
            print(e)
Example #43
0
#!/usr/bin/env python3
"""
High-level tests for the class InfoGroup
"""
import os
import sys
import unittest
import tempfile
import shutil
import stat
import machinestate
from locale import getpreferredencoding

ENCODING = getpreferredencoding()


class TestToStrList(unittest.TestCase):
    # Tests for tostrlist
    def test_tostrlistNone(self):
        out = machinestate.tostrlist(None)
        self.assertEqual(out, None)
    def test_tostrlistInt(self):
        out = machinestate.tostrlist(1)
        self.assertEqual(out, ["1"])
    def test_tostrlistValidSpaces(self):
        out = machinestate.tostrlist("a b c")
        self.assertEqual(out, ["a", "b", "c"])
    def test_tostrlistValidTabs(self):
        out = machinestate.tostrlist("a\tb\tc")
        self.assertEqual(out, ["a", "b", "c"])
    def test_tostrlistValidComma(self):
Example #44
0
File: ntlm.py Project: vitan/ldap3
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.

# NTLMv2 authentication as per [MS-NLMP] (https://msdn.microsoft.com/en-us/library/cc236621.aspx)

from struct import pack, unpack
from platform import system, version
from socket import gethostname
from time import time
import hmac
import hashlib
from os import urandom

try:
    from locale import getpreferredencoding
    oem_encoding = getpreferredencoding()
except Exception:
    oem_encoding = 'utf-8'

from ..protocol.formatters.formatters import format_ad_timestamp

NTLM_SIGNATURE = b'NTLMSSP\x00'
NTLM_MESSAGE_TYPE_NTLM_NEGOTIATE = 1
NTLM_MESSAGE_TYPE_NTLM_CHALLENGE = 2
NTLM_MESSAGE_TYPE_NTLM_AUTHENTICATE = 3

FLAG_NEGOTIATE_56 = 31  # W
FLAG_NEGOTIATE_KEY_EXCH = 30  # V
FLAG_NEGOTIATE_128 = 29  # U
FLAG_NEGOTIATE_VERSION = 25  # T
FLAG_NEGOTIATE_TARGET_INFO = 23  # S
Example #45
0
class Config(object):
    _instance = None
    _parsed_files = []
    _doc = {}
    access_key = u""
    secret_key = u""
    access_token = u""
    _access_token_refresh = True
    host_base = u"s3.amazonaws.com"
    host_bucket = u"%(bucket)s.s3.amazonaws.com"
    kms_key = u""  #can't set this and Server Side Encryption at the same time
    # simpledb_host looks useless, legacy? to remove?
    simpledb_host = u"sdb.amazonaws.com"
    cloudfront_host = u"cloudfront.amazonaws.com"
    verbosity = logging.WARNING
    progress_meter = sys.stdout.isatty()
    progress_class = Progress.ProgressCR
    send_chunk = 64 * 1024
    recv_chunk = 64 * 1024
    list_md5 = False
    long_listing = False
    human_readable_sizes = False
    extra_headers = SortedDict(ignore_case=True)
    force = False
    server_side_encryption = False
    enable = None
    get_continue = False
    put_continue = False
    upload_id = u""
    skip_existing = False
    recursive = False
    restore_days = 1
    restore_priority = u"Standard"
    acl_public = None
    acl_grants = []
    acl_revokes = []
    proxy_host = u""
    proxy_port = 3128
    encrypt = False
    dry_run = False
    add_encoding_exts = u""
    preserve_attrs = True
    preserve_attrs_list = [
        u'uname',  # Verbose owner Name (e.g. 'root')
        u'uid',  # Numeric user ID (e.g. 0)
        u'gname',  # Group name (e.g. 'users')
        u'gid',  # Numeric group ID (e.g. 100)
        u'atime',  # Last access timestamp
        u'mtime',  # Modification timestamp
        u'ctime',  # Creation timestamp
        u'mode',  # File mode (e.g. rwxr-xr-x = 755)
        u'md5',  # File MD5 (if known)
        #u'acl',     # Full ACL (not yet supported)
    ]
    delete_removed = False
    delete_after = False
    delete_after_fetch = False
    max_delete = -1
    limit = -1
    _doc[
        'delete_removed'] = u"[sync] Remove remote S3 objects when local file has been deleted"
    delay_updates = False  # OBSOLETE
    gpg_passphrase = u""
    gpg_command = u""
    gpg_encrypt = u"%(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
    gpg_decrypt = u"%(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
    use_https = True
    ca_certs_file = u""
    ssl_client_key_file = u""
    ssl_client_cert_file = u""
    check_ssl_certificate = True
    check_ssl_hostname = True
    bucket_location = u"US"
    default_mime_type = u"binary/octet-stream"
    guess_mime_type = True
    use_mime_magic = True
    mime_type = u""
    enable_multipart = True
    # Chunk size is at the same time the chunk size and the threshold
    multipart_chunk_size_mb = 15  # MiB
    # Maximum chunk size for s3-to-s3 copy is 5 GiB.
    # But, use a lot lower value by default (1GiB)
    multipart_copy_chunk_size_mb = 1 * 1024
    # Maximum chunks on AWS S3, could be different on other S3-compatible APIs
    multipart_max_chunks = 10000
    # List of checks to be performed for 'sync'
    sync_checks = ['size', 'md5']  # 'weak-timestamp'
    # List of compiled REGEXPs
    exclude = []
    include = []
    # Dict mapping compiled REGEXPs back to their textual form
    debug_exclude = {}
    debug_include = {}
    encoding = locale.getpreferredencoding() or "UTF-8"
    urlencoding_mode = u"normal"
    log_target_prefix = u""
    reduced_redundancy = False
    storage_class = u""
    follow_symlinks = False
    # If too big, this value can be overriden by the OS socket timeouts max values.
    # For example, on Linux, a connection attempt will automatically timeout after 120s.
    socket_timeout = 300
    invalidate_on_cf = False
    # joseprio: new flags for default index invalidation
    invalidate_default_index_on_cf = False
    invalidate_default_index_root_on_cf = True
    website_index = u"index.html"
    website_error = u""
    website_endpoint = u"http://%(bucket)s.s3-website-%(location)s.amazonaws.com/"
    additional_destinations = []
    files_from = []
    cache_file = u""
    add_headers = u""
    remove_headers = []
    expiry_days = u""
    expiry_date = u""
    expiry_prefix = u""
    signature_v2 = False
    limitrate = 0
    requester_pays = False
    stop_on_error = False
    content_disposition = u""
    content_type = u""
    stats = False
    # Disabled by default because can create a latency with a CONTINUE status reply
    # expected for every send file requests.
    use_http_expect = False
    signurl_use_https = False
    # Maximum sleep duration for throtte / limitrate.
    # s3 will timeout if a request/transfer is stuck for more than a short time
    throttle_max = 100
    public_url_use_https = False
    connection_pooling = True

    ## Creating a singleton
    def __new__(self,
                configfile=None,
                access_key=None,
                secret_key=None,
                access_token=None):
        if self._instance is None:
            self._instance = object.__new__(self)
        return self._instance

    def __init__(self,
                 configfile=None,
                 access_key=None,
                 secret_key=None,
                 access_token=None):
        if configfile:
            try:
                self.read_config_file(configfile)
            except IOError:
                if 'AWS_CREDENTIAL_FILE' in os.environ or 'AWS_PROFILE' in os.environ:
                    self.aws_credential_file()

            # override these if passed on the command-line
            if access_key and secret_key:
                self.access_key = access_key
                self.secret_key = secret_key
            if access_token:
                self.access_token = access_token
                # Do not refresh the IAM role when an access token is provided.
                self._access_token_refresh = False

            if len(self.access_key) == 0:
                env_access_key = os.getenv('AWS_ACCESS_KEY') or os.getenv(
                    'AWS_ACCESS_KEY_ID')
                env_secret_key = os.getenv('AWS_SECRET_KEY') or os.getenv(
                    'AWS_SECRET_ACCESS_KEY')
                env_access_token = os.getenv('AWS_SESSION_TOKEN') or os.getenv(
                    'AWS_SECURITY_TOKEN')
                if env_access_key:
                    # py3 getenv returns unicode and py2 returns bytes.
                    self.access_key = config_unicodise(env_access_key)
                    self.secret_key = config_unicodise(env_secret_key)
                    if env_access_token:
                        # Do not refresh the IAM role when an access token is provided.
                        self._access_token_refresh = False
                        self.access_token = config_unicodise(env_access_token)
                else:
                    self.role_config()

            #TODO check KMS key is valid
            if self.kms_key and self.server_side_encryption == True:
                warning(
                    'Cannot have server_side_encryption (S3 SSE) and KMS_key set (S3 KMS). KMS encryption will be used. Please set server_side_encryption to False'
                )
            if self.kms_key and self.signature_v2 == True:
                raise Exception(
                    'KMS encryption requires signature v4. Please set signature_v2 to False'
                )

    def role_config(self):
        """
        Get credentials from IAM authentication
        """
        try:
            role_arn = os.environ.get('AWS_ROLE_ARN')
            web_identity_token_file = os.environ.get(
                'AWS_WEB_IDENTITY_TOKEN_FILE')
            role_session_name = 'role-session-%s' % (int(time.time()))
            if web_identity_token_file:
                with open(web_identity_token_file) as f:
                    web_identity_token = f.read()
                params = {
                    "Action": "AssumeRoleWithWebIdentity",
                    "Version": "2011-06-15",
                    "RoleArn": role_arn,
                    "RoleSessionName": role_session_name,
                    "WebIdentityToken": web_identity_token
                }
                conn = httplib.HTTPSConnection(host='sts.amazonaws.com',
                                               timeout=2)
                conn.request('POST', '/?' + urllib.parse.urlencode(params))
                resp = conn.getresponse()
                files = resp.read()
                if resp.status == 200 and len(files) > 1:
                    creds = parse_xml_to_dict(files)
                    Config().update_option(
                        'access_key', creds['AssumeRoleWithWebIdentityResult']
                        ['Credentials']['AccessKeyId'])
                    Config().update_option(
                        'secret_key', creds['AssumeRoleWithWebIdentityResult']
                        ['Credentials']['SecretAccessKey'])
                    Config().update_option(
                        'access_token',
                        creds['AssumeRoleWithWebIdentityResult']['Credentials']
                        ['SessionToken'])
                else:
                    raise IOError
            else:
                conn = httplib.HTTPConnection(host='169.254.169.254',
                                              timeout=2)
                conn.request('GET',
                             "/latest/meta-data/iam/security-credentials/")
                resp = conn.getresponse()
                files = resp.read()
                if resp.status == 200 and len(files) > 1:
                    conn.request(
                        'GET',
                        "/latest/meta-data/iam/security-credentials/%s" %
                        files.decode('utf-8'))
                    resp = conn.getresponse()
                    if resp.status == 200:
                        resp_content = config_unicodise(resp.read())
                        creds = json.loads(resp_content)
                        Config().update_option(
                            'access_key',
                            config_unicodise(creds['AccessKeyId']))
                        Config().update_option(
                            'secret_key',
                            config_unicodise(creds['SecretAccessKey']))
                        Config().update_option(
                            'access_token', config_unicodise(creds['Token']))
                    else:
                        raise IOError
                else:
                    raise IOError
        except:
            raise

    def role_refresh(self):
        if self._access_token_refresh:
            try:
                self.role_config()
            except Exception:
                warning("Could not refresh role")

    def aws_credential_file(self):
        try:
            aws_credential_file = os.path.expanduser('~/.aws/credentials')
            credential_file_from_env = os.environ.get('AWS_CREDENTIAL_FILE')
            if credential_file_from_env and \
               os.path.isfile(credential_file_from_env):
                aws_credential_file = config_unicodise(
                    credential_file_from_env)
            elif not os.path.isfile(aws_credential_file):
                return

            config = PyConfigParser()

            debug("Reading AWS credentials from %s" % (aws_credential_file))
            with io.open(aws_credential_file,
                         "r",
                         encoding=getattr(self, 'encoding', 'UTF-8')) as fp:
                config_string = fp.read()
            try:
                try:
                    # readfp is replaced by read_file in python3,
                    # but so far readfp it is still available.
                    config.readfp(io.StringIO(config_string))
                except MissingSectionHeaderError:
                    # if header is missing, this could be deprecated
                    # credentials file format as described here:
                    # https://blog.csanchez.org/2011/05/
                    # then do the hacky-hack and add default header
                    # to be able to read the file with PyConfigParser()
                    config_string = u'[default]\n' + config_string
                    config.readfp(io.StringIO(config_string))
            except ParsingError as exc:
                raise ValueError("Error reading aws_credential_file "
                                 "(%s): %s" % (aws_credential_file, str(exc)))

            profile = config_unicodise(os.environ.get('AWS_PROFILE',
                                                      "default"))
            debug("Using AWS profile '%s'" % (profile))

            # get_key - helper function to read the aws profile credentials
            # including the legacy ones as described here:
            # https://blog.csanchez.org/2011/05/
            def get_key(profile, key, legacy_key, print_warning=True):
                result = None

                try:
                    result = config.get(profile, key)
                except NoOptionError as e:
                    # we may want to skip warning message for optional keys
                    if print_warning:
                        warning(
                            "Couldn't find key '%s' for the AWS Profile "
                            "'%s' in the credentials file '%s'", e.option,
                            e.section, aws_credential_file)
                    # if the legacy_key defined and original one wasn't found,
                    # try read the legacy_key
                    if legacy_key:
                        try:
                            key = legacy_key
                            profile = "default"
                            result = config.get(profile, key)
                            warning(
                                "Legacy configuratin key '%s' used, please use"
                                " the standardized config format as described "
                                "here: https://aws.amazon.com/blogs/security/a-new-and-standardized-way-to-manage-credentials-in-the-aws-sdks/",
                                key)
                        except NoOptionError as e:
                            pass

                if result:
                    debug(
                        "Found the configuration option '%s' for the AWS "
                        "Profile '%s' in the credentials file %s", key,
                        profile, aws_credential_file)
                return result

            profile_access_key = get_key(profile, "aws_access_key_id",
                                         "AWSAccessKeyId")
            if profile_access_key:
                Config().update_option('access_key',
                                       config_unicodise(profile_access_key))

            profile_secret_key = get_key(profile, "aws_secret_access_key",
                                         "AWSSecretKey")
            if profile_secret_key:
                Config().update_option('secret_key',
                                       config_unicodise(profile_secret_key))

            profile_access_token = get_key(profile, "aws_session_token", None,
                                           False)
            if profile_access_token:
                Config().update_option('access_token',
                                       config_unicodise(profile_access_token))

        except IOError as e:
            warning("Errno %d accessing credentials file %s", e.errno,
                    aws_credential_file)
        except NoSectionError as e:
            warning(
                "Couldn't find AWS Profile '%s' in the credentials file "
                "'%s'", profile, aws_credential_file)

    def option_list(self):
        retval = []
        for option in dir(self):
            ## Skip attributes that start with underscore or are not string, int or bool
            option_type = type(getattr(Config, option))
            if option.startswith("_") or \
               not (option_type in (
                    type(u"string"), # str
                        type(42),   # int
                    type(True))):   # bool
                continue
            retval.append(option)
        return retval

    def read_config_file(self, configfile):
        cp = ConfigParser(configfile)
        for option in self.option_list():
            _option = cp.get(option)
            if _option is not None:
                _option = _option.strip()
            self.update_option(option, _option)

        # allow acl_public to be set from the config file too, even though by
        # default it is set to None, and not present in the config file.
        if cp.get('acl_public'):
            self.update_option('acl_public', cp.get('acl_public'))

        if cp.get('add_headers'):
            for option in cp.get('add_headers').split(","):
                (key, value) = option.split(':', 1)
                self.extra_headers[key.strip()] = value.strip()

        self._parsed_files.append(configfile)

    def dump_config(self, stream):
        ConfigDumper(stream).dump(u"default", self)

    def update_option(self, option, value):
        if value is None:
            return

        #### Handle environment reference
        if unicode(value).startswith("$"):
            return self.update_option(option, os.getenv(value[1:]))

        #### Special treatment of some options
        ## verbosity must be known to "logging" module
        if option == "verbosity":
            # support integer verboisities
            try:
                value = int(value)
            except ValueError:
                try:
                    # otherwise it must be a key known to the logging module
                    try:
                        # python 3 support
                        value = logging._levelNames[value]
                    except AttributeError:
                        value = logging._nameToLevel[value]
                except KeyError:
                    raise ValueError(
                        "Config: verbosity level '%s' is not valid" % value)

        elif option == "limitrate":
            #convert kb,mb to bytes
            if value.endswith("k") or value.endswith("K"):
                shift = 10
            elif value.endswith("m") or value.endswith("M"):
                shift = 20
            else:
                shift = 0
            try:
                value = shift and int(value[:-1]) << shift or int(value)
            except Exception:
                raise ValueError(
                    "Config: value of option %s must have suffix m, k, or nothing, not '%s'"
                    % (option, value))

        ## allow yes/no, true/false, on/off and 1/0 for boolean options
        ## Some options default to None, if that's the case check the value to see if it is bool
        elif (type(getattr(Config, option)) is type(True) or  # Config is bool
              (getattr(Config, option) is None
               and is_bool(value))):  # Config is None and value is bool
            if is_bool_true(value):
                value = True
            elif is_bool_false(value):
                value = False
            else:
                raise ValueError(
                    "Config: value of option '%s' must be Yes or No, not '%s'"
                    % (option, value))

        elif type(getattr(Config, option)) is type(42):  # int
            try:
                value = int(value)
            except ValueError:
                raise ValueError(
                    "Config: value of option '%s' must be an integer, not '%s'"
                    % (option, value))

        elif option in ["host_base", "host_bucket", "cloudfront_host"]:
            if value.startswith("http://"):
                value = value[7:]
            elif value.startswith("https://"):
                value = value[8:]

        setattr(Config, option, value)
Example #46
0
 def __str__(self):
     if PY2:
         return self.__unicode__().encode(locale.getpreferredencoding())
     else:
         return self.__unicode__()
Example #47
0
# -*- coding: utf-8 -*-
# @File  : shutdownjava.py
# @Author: Feng
# @Date  : 2019/1/23
# @Desc  :

import re
import codecs
import locale
import subprocess

ps = subprocess.Popen('netstat -nltup',
                      stdin=subprocess.PIPE,
                      stdout=subprocess.PIPE,
                      shell=True)

while True:
    data = ps.stdout.readline()
    if data == b'':
        break
    data = data.decode(codecs.lookup(locale.getpreferredencoding()).name)
    search = re.search(r'(\d*)?/java', data)
    if search is not None:
        pid = search[1]
        subprocess.Popen('kill -9 ' + str(pid),
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         shell=True)
Example #48
0
def _encoding(key, config):
    if key in config:
        codecs.lookup(config[key])  # Check that the encoding exists
    else:
        config[key] = locale.getpreferredencoding()
Example #49
0
def getpreferredencoding():
    import locale
    # Borrowed from Invoke
    # (see https://github.com/pyinvoke/invoke/blob/93af29d/invoke/runners.py#L881)
    return locale.getpreferredencoding(False)
Example #50
0
class SysModuleTest(unittest.TestCase):
    def setUp(self):
        self.orig_stdout = sys.stdout
        self.orig_stderr = sys.stderr
        self.orig_displayhook = sys.displayhook

    def tearDown(self):
        sys.stdout = self.orig_stdout
        sys.stderr = self.orig_stderr
        sys.displayhook = self.orig_displayhook
        test.support.reap_children()

    def test_original_displayhook(self):
        import builtins
        out = io.StringIO()
        sys.stdout = out

        dh = sys.__displayhook__

        self.assertRaises(TypeError, dh)
        if hasattr(builtins, "_"):
            del builtins._

        dh(None)
        self.assertEqual(out.getvalue(), "")
        self.assertTrue(not hasattr(builtins, "_"))
        dh(42)
        self.assertEqual(out.getvalue(), "42\n")
        self.assertEqual(builtins._, 42)

        del sys.stdout
        self.assertRaises(RuntimeError, dh, 42)

    def test_lost_displayhook(self):
        del sys.displayhook
        code = compile("42", "<string>", "single")
        self.assertRaises(RuntimeError, eval, code)

    def test_custom_displayhook(self):
        def baddisplayhook(obj):
            raise ValueError

        sys.displayhook = baddisplayhook
        code = compile("42", "<string>", "single")
        self.assertRaises(ValueError, eval, code)

    def test_original_excepthook(self):
        err = io.StringIO()
        sys.stderr = err

        eh = sys.__excepthook__

        self.assertRaises(TypeError, eh)
        try:
            raise ValueError(42)
        except ValueError as exc:
            eh(*sys.exc_info())

        self.assertTrue(err.getvalue().endswith("ValueError: 42\n"))

    def test_excepthook(self):
        with test.support.captured_output("stderr") as stderr:
            sys.excepthook(1, '1', 1)
        self.assertTrue("TypeError: print_exception(): Exception expected for " \
                         "value, str found" in stderr.getvalue())

    # FIXME: testing the code for a lost or replaced excepthook in
    # Python/pythonrun.c::PyErr_PrintEx() is tricky.

    def test_exit(self):
        # call with two arguments
        self.assertRaises(TypeError, sys.exit, 42, 42)

        # call without argument
        with self.assertRaises(SystemExit) as cm:
            sys.exit()
        self.assertIsNone(cm.exception.code)

        rc, out, err = assert_python_ok('-c', 'import sys; sys.exit()')
        self.assertEqual(rc, 0)
        self.assertEqual(out, b'')
        self.assertEqual(err, b'')

        # call with integer argument
        with self.assertRaises(SystemExit) as cm:
            sys.exit(42)
        self.assertEqual(cm.exception.code, 42)

        # call with tuple argument with one entry
        # entry will be unpacked
        with self.assertRaises(SystemExit) as cm:
            sys.exit((42, ))
        self.assertEqual(cm.exception.code, 42)

        # call with string argument
        with self.assertRaises(SystemExit) as cm:
            sys.exit("exit")
        self.assertEqual(cm.exception.code, "exit")

        # call with tuple argument with two entries
        with self.assertRaises(SystemExit) as cm:
            sys.exit((17, 23))
        self.assertEqual(cm.exception.code, (17, 23))

        # test that the exit machinery handles SystemExits properly
        rc, out, err = assert_python_failure('-c', 'raise SystemExit(47)')
        self.assertEqual(rc, 47)
        self.assertEqual(out, b'')
        self.assertEqual(err, b'')

        def check_exit_message(code, expected, **env_vars):
            rc, out, err = assert_python_failure('-c', code, **env_vars)
            self.assertEqual(rc, 1)
            self.assertEqual(out, b'')
            self.assertTrue(
                err.startswith(expected),
                "%s doesn't start with %s" % (ascii(err), ascii(expected)))

        # test that stderr buffer is flushed before the exit message is written
        # into stderr
        check_exit_message(
            r'import sys; sys.stderr.write("unflushed,"); sys.exit("message")',
            b"unflushed,message")

        # test that the exit message is written with backslashreplace error
        # handler to stderr
        check_exit_message(r'import sys; sys.exit("surrogates:\uDCFF")',
                           b"surrogates:\\udcff")

        # test that the unicode message is encoded to the stderr encoding
        # instead of the default encoding (utf8)
        check_exit_message(r'import sys; sys.exit("h\xe9")',
                           b"h\xe9",
                           PYTHONIOENCODING='latin-1')

    def test_getdefaultencoding(self):
        self.assertRaises(TypeError, sys.getdefaultencoding, 42)
        # can't check more than the type, as the user might have changed it
        self.assertIsInstance(sys.getdefaultencoding(), str)

    # testing sys.settrace() is done in test_sys_settrace.py
    # testing sys.setprofile() is done in test_sys_setprofile.py

    def test_setcheckinterval(self):
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            self.assertRaises(TypeError, sys.setcheckinterval)
            orig = sys.getcheckinterval()
            for n in 0, 100, 120, orig:  # orig last to restore starting state
                sys.setcheckinterval(n)
                self.assertEqual(sys.getcheckinterval(), n)

    def test_switchinterval(self):
        self.assertRaises(TypeError, sys.setswitchinterval)
        self.assertRaises(TypeError, sys.setswitchinterval, "a")
        self.assertRaises(ValueError, sys.setswitchinterval, -1.0)
        self.assertRaises(ValueError, sys.setswitchinterval, 0.0)
        orig = sys.getswitchinterval()
        # sanity check
        self.assertTrue(orig < 0.5, orig)
        try:
            for n in 0.00001, 0.05, 3.0, orig:
                sys.setswitchinterval(n)
                self.assertAlmostEqual(sys.getswitchinterval(), n)
        finally:
            sys.setswitchinterval(orig)

    def test_recursionlimit(self):
        self.assertRaises(TypeError, sys.getrecursionlimit, 42)
        oldlimit = sys.getrecursionlimit()
        self.assertRaises(TypeError, sys.setrecursionlimit)
        self.assertRaises(ValueError, sys.setrecursionlimit, -42)
        sys.setrecursionlimit(10000)
        self.assertEqual(sys.getrecursionlimit(), 10000)
        sys.setrecursionlimit(oldlimit)

    def test_recursionlimit_recovery(self):
        if hasattr(sys, 'gettrace') and sys.gettrace():
            self.skipTest('fatal error if run with a trace function')

        oldlimit = sys.getrecursionlimit()

        def f():
            f()

        try:
            for depth in (10, 25, 50, 75, 100, 250, 1000):
                try:
                    sys.setrecursionlimit(depth)
                except RecursionError:
                    # Issue #25274: The recursion limit is too low at the
                    # current recursion depth
                    continue

                # Issue #5392: test stack overflow after hitting recursion
                # limit twice
                self.assertRaises(RecursionError, f)
                self.assertRaises(RecursionError, f)
        finally:
            sys.setrecursionlimit(oldlimit)

    @test.support.cpython_only
    def test_setrecursionlimit_recursion_depth(self):
        # Issue #25274: Setting a low recursion limit must be blocked if the
        # current recursion depth is already higher than the "lower-water
        # mark". Otherwise, it may not be possible anymore to
        # reset the overflowed flag to 0.

        from _testcapi import get_recursion_depth

        def set_recursion_limit_at_depth(depth, limit):
            recursion_depth = get_recursion_depth()
            if recursion_depth >= depth:
                with self.assertRaises(RecursionError) as cm:
                    sys.setrecursionlimit(limit)
                self.assertRegex(
                    str(cm.exception),
                    "cannot set the recursion limit to [0-9]+ "
                    "at the recursion depth [0-9]+: "
                    "the limit is too low")
            else:
                set_recursion_limit_at_depth(depth, limit)

        oldlimit = sys.getrecursionlimit()
        try:
            sys.setrecursionlimit(1000)

            for limit in (10, 25, 50, 75, 100, 150, 200):
                # formula extracted from _Py_RecursionLimitLowerWaterMark()
                if limit > 200:
                    depth = limit - 50
                else:
                    depth = limit * 3 // 4
                set_recursion_limit_at_depth(depth, limit)
        finally:
            sys.setrecursionlimit(oldlimit)

    def test_recursionlimit_fatalerror(self):
        # A fatal error occurs if a second recursion limit is hit when recovering
        # from a first one.
        code = textwrap.dedent("""
            import sys

            def f():
                try:
                    f()
                except RecursionError:
                    f()

            sys.setrecursionlimit(%d)
            f()""")
        with test.support.SuppressCrashReport():
            for i in (50, 1000):
                sub = subprocess.Popen([sys.executable, '-c', code % i],
                                       stderr=subprocess.PIPE)
                err = sub.communicate()[1]
                self.assertTrue(sub.returncode, sub.returncode)
                self.assertIn(
                    b"Fatal Python error: Cannot recover from stack overflow",
                    err)

    def test_getwindowsversion(self):
        # Raise SkipTest if sys doesn't have getwindowsversion attribute
        test.support.get_attribute(sys, "getwindowsversion")
        v = sys.getwindowsversion()
        self.assertEqual(len(v), 5)
        self.assertIsInstance(v[0], int)
        self.assertIsInstance(v[1], int)
        self.assertIsInstance(v[2], int)
        self.assertIsInstance(v[3], int)
        self.assertIsInstance(v[4], str)
        self.assertRaises(IndexError, operator.getitem, v, 5)
        self.assertIsInstance(v.major, int)
        self.assertIsInstance(v.minor, int)
        self.assertIsInstance(v.build, int)
        self.assertIsInstance(v.platform, int)
        self.assertIsInstance(v.service_pack, str)
        self.assertIsInstance(v.service_pack_minor, int)
        self.assertIsInstance(v.service_pack_major, int)
        self.assertIsInstance(v.suite_mask, int)
        self.assertIsInstance(v.product_type, int)
        self.assertEqual(v[0], v.major)
        self.assertEqual(v[1], v.minor)
        self.assertEqual(v[2], v.build)
        self.assertEqual(v[3], v.platform)
        self.assertEqual(v[4], v.service_pack)

        # This is how platform.py calls it. Make sure tuple
        #  still has 5 elements
        maj, min, buildno, plat, csd = sys.getwindowsversion()

    def test_call_tracing(self):
        self.assertRaises(TypeError, sys.call_tracing, type, 2)

    @unittest.skipUnless(hasattr(sys, "setdlopenflags"),
                         'test needs sys.setdlopenflags()')
    def test_dlopenflags(self):
        self.assertTrue(hasattr(sys, "getdlopenflags"))
        self.assertRaises(TypeError, sys.getdlopenflags, 42)
        oldflags = sys.getdlopenflags()
        self.assertRaises(TypeError, sys.setdlopenflags)
        sys.setdlopenflags(oldflags + 1)
        self.assertEqual(sys.getdlopenflags(), oldflags + 1)
        sys.setdlopenflags(oldflags)

    @test.support.refcount_test
    def test_refcount(self):
        # n here must be a global in order for this test to pass while
        # tracing with a python function.  Tracing calls PyFrame_FastToLocals
        # which will add a copy of any locals to the frame object, causing
        # the reference count to increase by 2 instead of 1.
        global n
        self.assertRaises(TypeError, sys.getrefcount)
        c = sys.getrefcount(None)
        n = None
        self.assertEqual(sys.getrefcount(None), c + 1)
        del n
        self.assertEqual(sys.getrefcount(None), c)
        if hasattr(sys, "gettotalrefcount"):
            self.assertIsInstance(sys.gettotalrefcount(), int)

    def test_getframe(self):
        self.assertRaises(TypeError, sys._getframe, 42, 42)
        self.assertRaises(ValueError, sys._getframe, 2000000000)
        self.assertTrue(
            SysModuleTest.test_getframe.__code__ \
            is sys._getframe().f_code
        )

    # sys._current_frames() is a CPython-only gimmick.
    @test.support.reap_threads
    def test_current_frames(self):
        import threading
        import traceback

        # Spawn a thread that blocks at a known place.  Then the main
        # thread does sys._current_frames(), and verifies that the frames
        # returned make sense.
        entered_g = threading.Event()
        leave_g = threading.Event()
        thread_info = []  # the thread's id

        def f123():
            g456()

        def g456():
            thread_info.append(threading.get_ident())
            entered_g.set()
            leave_g.wait()

        t = threading.Thread(target=f123)
        t.start()
        entered_g.wait()

        # At this point, t has finished its entered_g.set(), although it's
        # impossible to guess whether it's still on that line or has moved on
        # to its leave_g.wait().
        self.assertEqual(len(thread_info), 1)
        thread_id = thread_info[0]

        d = sys._current_frames()
        for tid in d:
            self.assertIsInstance(tid, int)
            self.assertGreater(tid, 0)

        main_id = threading.get_ident()
        self.assertIn(main_id, d)
        self.assertIn(thread_id, d)

        # Verify that the captured main-thread frame is _this_ frame.
        frame = d.pop(main_id)
        self.assertTrue(frame is sys._getframe())

        # Verify that the captured thread frame is blocked in g456, called
        # from f123.  This is a litte tricky, since various bits of
        # threading.py are also in the thread's call stack.
        frame = d.pop(thread_id)
        stack = traceback.extract_stack(frame)
        for i, (filename, lineno, funcname, sourceline) in enumerate(stack):
            if funcname == "f123":
                break
        else:
            self.fail("didn't find f123() on thread's call stack")

        self.assertEqual(sourceline, "g456()")

        # And the next record must be for g456().
        filename, lineno, funcname, sourceline = stack[i + 1]
        self.assertEqual(funcname, "g456")
        self.assertIn(sourceline, ["leave_g.wait()", "entered_g.set()"])

        # Reap the spawned thread.
        leave_g.set()
        t.join()

    def test_attributes(self):
        self.assertIsInstance(sys.api_version, int)
        self.assertIsInstance(sys.argv, list)
        self.assertIn(sys.byteorder, ("little", "big"))
        self.assertIsInstance(sys.builtin_module_names, tuple)
        self.assertIsInstance(sys.copyright, str)
        self.assertIsInstance(sys.exec_prefix, str)
        self.assertIsInstance(sys.base_exec_prefix, str)
        self.assertIsInstance(sys.executable, str)
        self.assertEqual(len(sys.float_info), 11)
        self.assertEqual(sys.float_info.radix, 2)
        self.assertEqual(len(sys.int_info), 2)
        self.assertTrue(sys.int_info.bits_per_digit % 5 == 0)
        self.assertTrue(sys.int_info.sizeof_digit >= 1)
        self.assertEqual(type(sys.int_info.bits_per_digit), int)
        self.assertEqual(type(sys.int_info.sizeof_digit), int)
        self.assertIsInstance(sys.hexversion, int)

        self.assertEqual(len(sys.hash_info), 9)
        self.assertLess(sys.hash_info.modulus, 2**sys.hash_info.width)
        # sys.hash_info.modulus should be a prime; we do a quick
        # probable primality test (doesn't exclude the possibility of
        # a Carmichael number)
        for x in range(1, 100):
            self.assertEqual(
                pow(x, sys.hash_info.modulus - 1, sys.hash_info.modulus), 1,
                "sys.hash_info.modulus {} is a non-prime".format(
                    sys.hash_info.modulus))
        self.assertIsInstance(sys.hash_info.inf, int)
        self.assertIsInstance(sys.hash_info.nan, int)
        self.assertIsInstance(sys.hash_info.imag, int)
        algo = sysconfig.get_config_var("Py_HASH_ALGORITHM")
        if sys.hash_info.algorithm in {"fnv", "siphash24"}:
            self.assertIn(sys.hash_info.hash_bits, {32, 64})
            self.assertIn(sys.hash_info.seed_bits, {32, 64, 128})

            if algo == 1:
                self.assertEqual(sys.hash_info.algorithm, "siphash24")
            elif algo == 2:
                self.assertEqual(sys.hash_info.algorithm, "fnv")
            else:
                self.assertIn(sys.hash_info.algorithm, {"fnv", "siphash24"})
        else:
            # PY_HASH_EXTERNAL
            self.assertEqual(algo, 0)
        self.assertGreaterEqual(sys.hash_info.cutoff, 0)
        self.assertLess(sys.hash_info.cutoff, 8)

        self.assertIsInstance(sys.maxsize, int)
        self.assertIsInstance(sys.maxunicode, int)
        self.assertEqual(sys.maxunicode, 0x10FFFF)
        self.assertIsInstance(sys.platform, str)
        self.assertIsInstance(sys.prefix, str)
        self.assertIsInstance(sys.base_prefix, str)
        self.assertIsInstance(sys.version, str)
        vi = sys.version_info
        self.assertIsInstance(vi[:], tuple)
        self.assertEqual(len(vi), 5)
        self.assertIsInstance(vi[0], int)
        self.assertIsInstance(vi[1], int)
        self.assertIsInstance(vi[2], int)
        self.assertIn(vi[3], ("alpha", "beta", "candidate", "final"))
        self.assertIsInstance(vi[4], int)
        self.assertIsInstance(vi.major, int)
        self.assertIsInstance(vi.minor, int)
        self.assertIsInstance(vi.micro, int)
        self.assertIn(vi.releaselevel, ("alpha", "beta", "candidate", "final"))
        self.assertIsInstance(vi.serial, int)
        self.assertEqual(vi[0], vi.major)
        self.assertEqual(vi[1], vi.minor)
        self.assertEqual(vi[2], vi.micro)
        self.assertEqual(vi[3], vi.releaselevel)
        self.assertEqual(vi[4], vi.serial)
        self.assertTrue(vi > (1, 0, 0))
        self.assertIsInstance(sys.float_repr_style, str)
        self.assertIn(sys.float_repr_style, ('short', 'legacy'))
        if not sys.platform.startswith('win'):
            self.assertIsInstance(sys.abiflags, str)

    def test_thread_info(self):
        info = sys.thread_info
        self.assertEqual(len(info), 3)
        self.assertIn(info.name, ('nt', 'pthread', 'solaris', None))
        self.assertIn(info.lock, ('semaphore', 'mutex+cond', None))

    def test_43581(self):
        # Can't use sys.stdout, as this is a StringIO object when
        # the test runs under regrtest.
        self.assertEqual(sys.__stdout__.encoding, sys.__stderr__.encoding)

    def test_intern(self):
        global numruns
        numruns += 1
        self.assertRaises(TypeError, sys.intern)
        s = "never interned before" + str(numruns)
        self.assertTrue(sys.intern(s) is s)
        s2 = s.swapcase().swapcase()
        self.assertTrue(sys.intern(s2) is s)

        # Subclasses of string can't be interned, because they
        # provide too much opportunity for insane things to happen.
        # We don't want them in the interned dict and if they aren't
        # actually interned, we don't want to create the appearance
        # that they are by allowing intern() to succeed.
        class S(str):
            def __hash__(self):
                return 123

        self.assertRaises(TypeError, sys.intern, S("abc"))

    def test_sys_flags(self):
        self.assertTrue(sys.flags)
        attrs = ("debug", "inspect", "interactive", "optimize",
                 "dont_write_bytecode", "no_user_site", "no_site",
                 "ignore_environment", "verbose", "bytes_warning", "quiet",
                 "hash_randomization", "isolated", "dev_mode", "utf8_mode")
        for attr in attrs:
            self.assertTrue(hasattr(sys.flags, attr), attr)
            attr_type = bool if attr == "dev_mode" else int
            self.assertEqual(type(getattr(sys.flags, attr)), attr_type, attr)
        self.assertTrue(repr(sys.flags))
        self.assertEqual(len(sys.flags), len(attrs))

        self.assertIn(sys.flags.utf8_mode, {0, 1, 2})

    def assert_raise_on_new_sys_type(self, sys_attr):
        # Users are intentionally prevented from creating new instances of
        # sys.flags, sys.version_info, and sys.getwindowsversion.
        attr_type = type(sys_attr)
        with self.assertRaises(TypeError):
            attr_type()
        with self.assertRaises(TypeError):
            attr_type.__new__(attr_type)

    def test_sys_flags_no_instantiation(self):
        self.assert_raise_on_new_sys_type(sys.flags)

    def test_sys_version_info_no_instantiation(self):
        self.assert_raise_on_new_sys_type(sys.version_info)

    def test_sys_getwindowsversion_no_instantiation(self):
        # Skip if not being run on Windows.
        test.support.get_attribute(sys, "getwindowsversion")
        self.assert_raise_on_new_sys_type(sys.getwindowsversion())

    @test.support.cpython_only
    def test_clear_type_cache(self):
        sys._clear_type_cache()

    def test_ioencoding(self):
        env = dict(os.environ)

        # Test character: cent sign, encoded as 0x4A (ASCII J) in CP424,
        # not representable in ASCII.

        env["PYTHONIOENCODING"] = "cp424"
        p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
                             stdout=subprocess.PIPE,
                             env=env)
        out = p.communicate()[0].strip()
        expected = ("\xa2" + os.linesep).encode("cp424")
        self.assertEqual(out, expected)

        env["PYTHONIOENCODING"] = "ascii:replace"
        p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
                             stdout=subprocess.PIPE,
                             env=env)
        out = p.communicate()[0].strip()
        self.assertEqual(out, b'?')

        env["PYTHONIOENCODING"] = "ascii"
        p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             env=env)
        out, err = p.communicate()
        self.assertEqual(out, b'')
        self.assertIn(b'UnicodeEncodeError:', err)
        self.assertIn(rb"'\xa2'", err)

        env["PYTHONIOENCODING"] = "ascii:"
        p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             env=env)
        out, err = p.communicate()
        self.assertEqual(out, b'')
        self.assertIn(b'UnicodeEncodeError:', err)
        self.assertIn(rb"'\xa2'", err)

        env["PYTHONIOENCODING"] = ":surrogateescape"
        p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xdcbd))'],
                             stdout=subprocess.PIPE,
                             env=env)
        out = p.communicate()[0].strip()
        self.assertEqual(out, b'\xbd')

    @unittest.skipUnless(test.support.FS_NONASCII,
                         'requires OS support of non-ASCII encodings')
    @unittest.skipUnless(
        sys.getfilesystemencoding() == locale.getpreferredencoding(False),
        'requires FS encoding to match locale')
    def test_ioencoding_nonascii(self):
        env = dict(os.environ)

        env["PYTHONIOENCODING"] = ""
        p = subprocess.Popen(
            [sys.executable, "-c",
             'print(%a)' % test.support.FS_NONASCII],
            stdout=subprocess.PIPE,
            env=env)
        out = p.communicate()[0].strip()
        self.assertEqual(out, os.fsencode(test.support.FS_NONASCII))

    @unittest.skipIf(sys.base_prefix != sys.prefix,
                     'Test is not venv-compatible')
    def test_executable(self):
        # sys.executable should be absolute
        self.assertEqual(os.path.abspath(sys.executable), sys.executable)

        # Issue #7774: Ensure that sys.executable is an empty string if argv[0]
        # has been set to a non existent program name and Python is unable to
        # retrieve the real program name

        # For a normal installation, it should work without 'cwd'
        # argument. For test runs in the build directory, see #7774.
        python_dir = os.path.dirname(os.path.realpath(sys.executable))
        p = subprocess.Popen([
            "nonexistent", "-c",
            'import sys; print(sys.executable.encode("ascii", "backslashreplace"))'
        ],
                             executable=sys.executable,
                             stdout=subprocess.PIPE,
                             cwd=python_dir)
        stdout = p.communicate()[0]
        executable = stdout.strip().decode("ASCII")
        p.wait()
        self.assertIn(
            executable,
            ["b''",
             repr(sys.executable.encode("ascii", "backslashreplace"))])

    def check_fsencoding(self, fs_encoding, expected=None):
        self.assertIsNotNone(fs_encoding)
        codecs.lookup(fs_encoding)
        if expected:
            self.assertEqual(fs_encoding, expected)

    def test_getfilesystemencoding(self):
        fs_encoding = sys.getfilesystemencoding()
        if sys.platform == 'darwin':
            expected = 'utf-8'
        else:
            expected = None
        self.check_fsencoding(fs_encoding, expected)

    def c_locale_get_error_handler(self,
                                   locale,
                                   isolated=False,
                                   encoding=None):
        # Force the POSIX locale
        env = dict(os.environ)
        env["LC_ALL"] = locale
        code = '\n'.join((
            'import sys',
            'def dump(name):',
            '    std = getattr(sys, name)',
            '    print("%s: %s" % (name, std.errors))',
            'dump("stdin")',
            'dump("stdout")',
            'dump("stderr")',
        ))
        args = [
            sys.executable, "-X", "utf8=0", "-X", "coerce_c_locale=0", "-c",
            code
        ]
        if isolated:
            args.append("-I")
        if encoding is not None:
            env['PYTHONIOENCODING'] = encoding
        else:
            env.pop('PYTHONIOENCODING', None)
        p = subprocess.Popen(args,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT,
                             env=env,
                             universal_newlines=True)
        stdout, stderr = p.communicate()
        return stdout

    def check_locale_surrogateescape(self, locale):
        out = self.c_locale_get_error_handler(locale, isolated=True)
        self.assertEqual(
            out, 'stdin: surrogateescape\n'
            'stdout: surrogateescape\n'
            'stderr: backslashreplace\n')

        # replace the default error handler
        out = self.c_locale_get_error_handler(locale, encoding=':ignore')
        self.assertEqual(
            out, 'stdin: ignore\n'
            'stdout: ignore\n'
            'stderr: backslashreplace\n')

        # force the encoding
        out = self.c_locale_get_error_handler(locale, encoding='iso8859-1')
        self.assertEqual(
            out, 'stdin: strict\n'
            'stdout: strict\n'
            'stderr: backslashreplace\n')
        out = self.c_locale_get_error_handler(locale, encoding='iso8859-1:')
        self.assertEqual(
            out, 'stdin: strict\n'
            'stdout: strict\n'
            'stderr: backslashreplace\n')

        # have no any effect
        out = self.c_locale_get_error_handler(locale, encoding=':')
        self.assertEqual(
            out, 'stdin: surrogateescape\n'
            'stdout: surrogateescape\n'
            'stderr: backslashreplace\n')
        out = self.c_locale_get_error_handler(locale, encoding='')
        self.assertEqual(
            out, 'stdin: surrogateescape\n'
            'stdout: surrogateescape\n'
            'stderr: backslashreplace\n')

    def test_c_locale_surrogateescape(self):
        self.check_locale_surrogateescape('C')

    def test_posix_locale_surrogateescape(self):
        self.check_locale_surrogateescape('POSIX')

    def test_implementation(self):
        # This test applies to all implementations equally.

        levels = {'alpha': 0xA, 'beta': 0xB, 'candidate': 0xC, 'final': 0xF}

        self.assertTrue(hasattr(sys.implementation, 'name'))
        self.assertTrue(hasattr(sys.implementation, 'version'))
        self.assertTrue(hasattr(sys.implementation, 'hexversion'))
        self.assertTrue(hasattr(sys.implementation, 'cache_tag'))

        version = sys.implementation.version
        self.assertEqual(version[:2], (version.major, version.minor))

        hexversion = (version.major << 24 | version.minor << 16
                      | version.micro << 8 | levels[version.releaselevel] << 4
                      | version.serial << 0)
        self.assertEqual(sys.implementation.hexversion, hexversion)

        # PEP 421 requires that .name be lower case.
        self.assertEqual(sys.implementation.name,
                         sys.implementation.name.lower())

    @test.support.cpython_only
    def test_debugmallocstats(self):
        # Test sys._debugmallocstats()
        from test.support.script_helper import assert_python_ok
        args = ['-c', 'import sys; sys._debugmallocstats()']
        ret, out, err = assert_python_ok(*args)
        self.assertIn(b"free PyDictObjects", err)

        # The function has no parameter
        self.assertRaises(TypeError, sys._debugmallocstats, True)

    @unittest.skipUnless(hasattr(sys, "getallocatedblocks"),
                         "sys.getallocatedblocks unavailable on this build")
    def test_getallocatedblocks(self):
        try:
            import _testcapi
        except ImportError:
            with_pymalloc = support.with_pymalloc()
        else:
            alloc_name = _testcapi.pymem_getallocatorsname()
            with_pymalloc = (alloc_name in ('pymalloc', 'pymalloc_debug'))

        # Some sanity checks
        a = sys.getallocatedblocks()
        self.assertIs(type(a), int)
        if with_pymalloc:
            self.assertGreater(a, 0)
        else:
            # When WITH_PYMALLOC isn't available, we don't know anything
            # about the underlying implementation: the function might
            # return 0 or something greater.
            self.assertGreaterEqual(a, 0)
        try:
            # While we could imagine a Python session where the number of
            # multiple buffer objects would exceed the sharing of references,
            # it is unlikely to happen in a normal test run.
            self.assertLess(a, sys.gettotalrefcount())
        except AttributeError:
            # gettotalrefcount() not available
            pass
        gc.collect()
        b = sys.getallocatedblocks()
        self.assertLessEqual(b, a)
        gc.collect()
        c = sys.getallocatedblocks()
        self.assertIn(c, range(b - 50, b + 50))

    @test.support.requires_type_collecting
    def test_is_finalizing(self):
        self.assertIs(sys.is_finalizing(), False)
        # Don't use the atexit module because _Py_Finalizing is only set
        # after calling atexit callbacks
        code = """if 1:
            import sys

            class AtExit:
                is_finalizing = sys.is_finalizing
                print = print

                def __del__(self):
                    self.print(self.is_finalizing(), flush=True)

            # Keep a reference in the __main__ module namespace, so the
            # AtExit destructor will be called at Python exit
            ref = AtExit()
        """
        rc, stdout, stderr = assert_python_ok('-c', code)
        self.assertEqual(stdout.rstrip(), b'True')

    @unittest.skipUnless(hasattr(sys, 'getandroidapilevel'),
                         'need sys.getandroidapilevel()')
    def test_getandroidapilevel(self):
        level = sys.getandroidapilevel()
        self.assertIsInstance(level, int)
        self.assertGreater(level, 0)

    def test_sys_tracebacklimit(self):
        code = """if 1:
            import sys
            def f1():
                1 / 0
            def f2():
                f1()
            sys.tracebacklimit = %r
            f2()
        """

        def check(tracebacklimit, expected):
            p = subprocess.Popen([sys.executable, '-c', code % tracebacklimit],
                                 stderr=subprocess.PIPE)
            out = p.communicate()[1]
            self.assertEqual(out.splitlines(), expected)

        traceback = [
            b'Traceback (most recent call last):',
            b'  File "<string>", line 8, in <module>',
            b'  File "<string>", line 6, in f2',
            b'  File "<string>", line 4, in f1',
            b'ZeroDivisionError: division by zero'
        ]
        check(10, traceback)
        check(3, traceback)
        check(2, traceback[:1] + traceback[2:])
        check(1, traceback[:1] + traceback[3:])
        check(0, [traceback[-1]])
        check(-1, [traceback[-1]])
        check(1 << 1000, traceback)
        check(-1 << 1000, [traceback[-1]])
        check(None, traceback)

    def test_no_duplicates_in_meta_path(self):
        self.assertEqual(len(sys.meta_path), len(set(sys.meta_path)))

    @unittest.skipUnless(hasattr(sys, "_enablelegacywindowsfsencoding"),
                         'needs sys._enablelegacywindowsfsencoding()')
    def test__enablelegacywindowsfsencoding(self):
        code = (
            'import sys', 'sys._enablelegacywindowsfsencoding()',
            'print(sys.getfilesystemencoding(), sys.getfilesystemencodeerrors())'
        )
        rc, out, err = assert_python_ok('-c', '; '.join(code))
        out = out.decode('ascii', 'replace').rstrip()
        self.assertEqual(out, 'mbcs replace')
    def __init__(self, configFile=None, logger=None):
        self.log = logger or logging.getLogger(__name__)

        self.log.info(sys.executable)
        if sys.version_info.major == 2:
            self.log.warning(
                "Python 2 is no longer officially supported. Use with caution."
            )

        defaultConfigFile = os.path.normpath(
            os.path.join(os.path.dirname(os.path.realpath(__file__)),
                         "../config/autoProcess.ini"))
        oldConfigFile = os.path.normpath(
            os.path.join(os.path.dirname(os.path.realpath(__file__)),
                         "../autoProcess.ini"))
        envConfigFile = os.environ.get("SMA_CONFIG")

        if envConfigFile and os.path.exists(os.path.realpath(envConfigFile)):
            configFile = os.path.realpath(envConfigFile)
            self.log.debug("SMACONFIG environment variable override found.")
        elif not configFile:
            if not os.path.exists(defaultConfigFile) and os.path.exists(
                    oldConfigFile):
                configFile = oldConfigFile
            else:
                configFile = defaultConfigFile
            self.log.debug("Loading default config file.")

        if os.path.isdir(configFile):
            configFile = os.path.realpath(
                os.path.join(configFile, "autoProcess.ini"))
            self.log.debug(
                "ConfigFile specified is a directory, joining with autoProcess.ini."
            )

        self.log.info("Loading config file %s." % configFile)

        # Setup encoding to avoid UTF-8 errors
        if sys.version[0] == '2':
            SYS_ENCODING = None
            try:
                locale.setlocale(locale.LC_ALL, "")
                SYS_ENCODING = locale.getpreferredencoding()
            except (locale.Error, IOError):
                pass

            # For OSes that are poorly configured just force UTF-8
            if not SYS_ENCODING or SYS_ENCODING in ('ANSI_X3.4-1968',
                                                    'US-ASCII', 'ASCII'):
                SYS_ENCODING = 'UTF-8'

            if not hasattr(sys, "setdefaultencoding"):
                reload(sys)

            try:
                # pylint: disable=E1101
                # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
                sys.setdefaultencoding(SYS_ENCODING)
            except:
                self.log.exception(
                    "Sorry, your environment is not setup correctly for utf-8 support. Please fix your setup and try again"
                )
                sys.exit(
                    "Sorry, your environment is not setup correctly for utf-8 support. Please fix your setup and try again"
                )

        write = False  # Will be changed to true if a value is missing from the config file and needs to be written

        config = SMAConfigParser()
        if os.path.isfile(configFile):
            config.read(configFile)
        else:
            self.log.error("Config file not found, creating %s." % configFile)
            # config.filename = filename
            write = True

        config = self.migrateFromOld(config, configFile)

        # Make sure all sections and all keys for each section are present
        for s in self.defaults:
            if not config.has_section(s):
                config.add_section(s)
                write = True
            for k in self.defaults[s]:
                if not config.has_option(s, k):
                    config.set(s, k, str(self.defaults[s][k]))
                    write = True

        # If any keys are missing from the config file, write them
        if write:
            self.writeConfig(config, configFile)

        self.readConfig(config)
Example #52
0
    def __init__(self, stdscr=None):
        signal.signal(signal.SIGUSR2, self.debug_out)

        # Let locale figure itself out
        locale.setlocale(locale.LC_ALL, "")
        enc = locale.getpreferredencoding()

        # If we're canto-fetch, jump to that main function
        if sys.argv[0].endswith("canto-fetch"):
            canto_fetch.main(enc)

        # Parse arguments that canto shares with canto-fetch, return
        # a lot of file locations and an optlist that will contain the
        # parsed, but yet unused canto specific arguments.

        conf_dir, log_file, conf_file, feed_dir, script_dir, optlist =\
                args.parse_common_args(enc,
                    "hvulaor:t:i:n:",
                    ["help","version","update","list","checkall","opml",
                        "import=","url=","checknew=","tag="])

        # Instantiate the config and start the log.
        try:
            self.cfg = get_cfg(conf_file, log_file, feed_dir, script_dir)
            self.cfg.parse()
        except:
            traceback.print_exc()
            upgrade_help()
            sys.exit(-1)

        self.cfg.log("Canto v %s (%s)" % \
                ("%d.%d.%d" % VERSION_TUPLE, GIT_SHA), "w")
        self.cfg.log("Time: %s" % time.asctime())
        self.cfg.log("Config parsed successfully.")

        # If we were passed an existing curses screen (i.e. restart)
        # pass it through to the config.

        self.cfg.stdscr = stdscr
        if self.cfg.stdscr:
            self.restarting = True
        else:
            self.restarting = False
        self.restart = False

        # Default arguments.
        flags = 0
        feed_ct = None
        opml_file = None
        url = None
        newtag = None

        # Note that every single flag that takes an argument has its
        # argument converted to unicode. Saves a lot of bullshit later.

        for opt, arg in optlist:
            if opt in ["-u", "--update"]:
                flags |= UPDATE_FIRST
            elif opt in ["-n", "--checknew"]:
                flags |= CHECK_NEW
                feed_ct = unicode(arg, enc, "ignore")
            elif opt in ["-a", "--checkall"]:
                flags |= CHECK_NEW
            elif opt in ["-l", "--list"]:
                flags |= FEED_LIST
            elif opt in ["-o", "--opml"]:
                flags |= OUT_OPML
            elif opt in ["-i", "--import"]:
                flags |= IN_OPML
                opml_file = unicode(arg, enc, "ignore")
            elif opt in ["-r", "--url"]:
                flags |= IN_URL
                url = unicode(arg, enc, "ignore")
            elif opt in ["-t", "--tag"]:
                newtag = unicode(arg, enc, "ignore")

        # Import flags harness the same functions as their config
        # based counterparts, source_opml and source_url.

        if flags & IN_OPML:
            self.cfg.locals['source_opml'](opml_file, append=True)
            print "OPML imported."

        if flags & IN_URL:
            self.cfg.locals['source_url'](url, append=True, tag=newtag)
            print "URL added."

        # All import options should terminate.

        if flags & (IN_OPML + IN_URL):
            sys.exit(0)

        # If self.cfg had to generate a config, make sure we
        # update first.

        if self.cfg.no_conf:
            self.cfg.log("Conf was auto-generated, adding -u")
            flags |= UPDATE_FIRST

        if flags & UPDATE_FIRST:
            self.cfg.log("Pausing to update...")
            canto_fetch.run(self.cfg, True, True)

        # Detect if there are any new feeds by whether their
        # set path exists. If not, run canto-fetch but don't
        # force it, so canto-fetch intelligently updates.

        for i, f in enumerate(self.cfg.feeds):
            if not os.path.exists(f.path):
                self.cfg.log("Detected unfetched feed: %s." % f.URL)
                canto_fetch.run(self.cfg, True, False)

                #Still no go?
                if not os.path.exists(f.path):
                    self.cfg.log("Failed to fetch %s, removing" % f.URL)
                    self.cfg.feeds[i] = None
                else:
                    self.cfg.log("Fetched.\n")
                break

        # Collapse the feed array, if we had to remove some unfetchables.
        self.cfg.feeds = filter(lambda x: x != None, self.cfg.feeds)

        self.new = []
        self.old = []
        self.ph = ProcessHandler(self.cfg)

        # Force an update from disk by queueing a work item for each thread.
        # At this point, we just want to do the portion of the update where the
        # disk is read, so PROC_UPDATE is used.

        self.cfg.log("Populating feeds...")
        for f in self.cfg.feeds:
            self.ph.send((PROC_UPDATE, f.URL, []))
        for f in self.cfg.feeds:
            f.merge(self.ph.recv()[1])

        self.ph.send((PROC_GETTAGS, ))
        fixedtags = self.ph.recv()

        self.ph.kill_process()

        for i, f in enumerate(self.cfg.feeds):
            self.cfg.feeds[i].tags = fixedtags[i]

        # Now that the tags have all been straightened out, validate the config.
        # Making sure the tags are unique before validation is important because
        # part of validation is the actual creation of Tag() objects.

        try:
            self.cfg.validate()
        except Exception, err:
            print err
            upgrade_help()
            sys.exit(0)
Example #53
0
                BIN_PYTHON = os.path.join(PREFIX, 'bin', 'python')
            BIN_LORE = os.path.join(PREFIX, 'bin', 'lore')
            BIN_JUPYTER = os.path.join(PREFIX, 'bin', 'jupyter')
            BIN_FLASK = os.path.join(PREFIX, 'bin', 'flask')
            FLASK_APP = os.path.join(PREFIX, 'lib', python_minor,
                                     'site-packages', 'lore', 'www',
                                     '__init__.py')


# -- Check Local -------------------------------------------------------------
# It's critical to check locale.getpreferredencoding() before changing os.environ, to see what python actually has configured.
UNICODE_LOCALE = True  #: does the current python locale support unicode?
UNICODE_UPGRADED = False  #: did lore change current system locale for unicode support?

if platform.system() != 'Windows':
    if 'utf' not in locale.getpreferredencoding().lower():
        if os.environ.get('LANG', None):
            UNICODE_LOCALE = False
        else:
            try:
                locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
                UNICODE_UPGRADED = True
            except StandardError:
                UNICODE_LOCALE = False

# -- Load Environment --------------------------------------------------------
ENV_FILE = os.environ.get(
    'ENV_FILE',
    '.env')  #: environment variables will be loaded from this file first
load_env_file()
Example #54
0
            sanitized_str = re.sub(r'[\x00-\x08\x0a-\x1f]', '?', s)
            raw_str = self.get_raw_string(sanitized_str)
            self.screen.addnstr(y, x, raw_str, bytes_count_to_display, style)
            return True
        except curses.error:
            return False


if __name__ == "__main__":
    import locale

    locale.setlocale(locale.LC_ALL, '')

    screen = curses.initscr()

    display = Display(screen, locale.getpreferredencoding())

    display.add_string("-" * display.WIDTH, pos_y=2)

    display.add_aligned_string_markup(
        "<underline><bold><red>foo</red> <blue>bar</blue> <green>baz<green/> <cyan>qux</cyan></bold></underline>",
        x_align="center",
        y_offset=3)

    display.add_aligned_string_markup(
        "ああ,<on_green>なんて<red>赤くて<bold>太くて</on_green>太い,</bold>そして赤い</red>リンゴ",
        y_offset=4,
        x_offset=-20,
        x_align="center",
        fill=True,
        fill_char="*")
Example #55
0
 def __init__(self, encoding):
     self.encoding = encoding
     if self.encoding is None:
         self.encoding = locale.getpreferredencoding()
Example #56
0
File: io.py Project: qeryq/SFECOMLA
    def read(self):
        for encoding in (
                lambda: ('us-ascii', None),  # fast
                lambda: (detect_encoding(self.filename), None),  # precise
                lambda: (locale.getpreferredencoding(False), None),
                lambda: (sys.getdefaultencoding(), None),  # desperate
                lambda: ('utf-8', None),  # ...
                lambda: ('utf-8', 'ignore')):  # fallback
            encoding, errors = encoding()
            # Clear the error flag for all except the last check, because
            # the error of second-to-last check is stored and shown as warning in owfile
            if errors != 'ignore':
                error = ''
            with self.open(self.filename,
                           mode='rt',
                           newline='',
                           encoding=encoding,
                           errors=errors) as file:
                # Sniff the CSV dialect (delimiter, quotes, ...)
                try:
                    dialect = csv.Sniffer().sniff(
                        # Take first couple of *complete* lines as sample
                        ''.join(file.readline() for _ in range(10)),
                        self.DELIMITERS)
                    delimiter = dialect.delimiter
                    quotechar = dialect.quotechar
                except UnicodeDecodeError as e:
                    error = e
                    continue
                except csv.Error:
                    delimiter = self.DELIMITERS[0]
                    quotechar = csv.excel.quotechar

                file.seek(0)
                try:
                    reader = csv.reader(
                        file,
                        delimiter=delimiter,
                        quotechar=quotechar,
                        skipinitialspace=True,
                    )
                    data = self.data_table(reader)

                    # TODO: Name can be set unconditionally when/if
                    # self.filename will always be a string with the file name.
                    # Currently, some tests pass StringIO instead of
                    # the file name to a reader.
                    if isinstance(self.filename, str):
                        data.name = path.splitext(
                            path.split(self.filename)[-1])[0]
                    if error and isinstance(error, UnicodeDecodeError):
                        pos, endpos = error.args[2], error.args[3]
                        warning = ('Skipped invalid byte(s) in position '
                                   '{}{}').format(pos, ('-' + str(endpos)) if
                                                  (endpos - pos) > 1 else '')
                        warnings.warn(warning)
                    self.set_table_metadata(self.filename, data)
                    return data
                except Exception as e:
                    error = e
                    continue
        raise ValueError('Cannot parse dataset {}: {}'.format(
            self.filename, error)) from error
Example #57
0
    'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
    'Accept-Charset':
    'UTF-8,*;q=0.5',
    'Accept-Encoding':
    'gzip,deflate,sdch',
    'Accept-Language':
    'en-US,en;q=0.8',
    'User-Agent':
    'Mozilla/5.0 (X11; Linux x86_64; rv:13.0) Gecko/20100101 Firefox/13.0'
}

sys.stdout = sys.__stdout__
if sys.stdout.isatty():
    default_encoding = sys.stdout.encoding.lower()
else:
    default_encoding = locale.getpreferredencoding().lower()
sys.stdout = r_obj


def maybe_print(*s):
    try:
        print(*s)
    except:
        pass


def tr(s):
    if default_encoding == 'utf-8':
        return s
    else:
        return s
Example #58
0
# Licensed under the terms of the MIT License
# (see spyderlib/__init__.py for details)
"""
Encoding utilities
"""

import re
import os
import locale
import sys

# Local imports
from anaconda_navigator.utils.py3compat import (is_string, to_text_string,
                                                is_binary_string, is_unicode)

PREFERRED_ENCODING = locale.getpreferredencoding()


# The default encoding for file paths and environment variables should be set
# to match the default encoding that the OS is using.
def getfilesystemencoding():
    """
    Query the filesystem for the encoding used to encode filenames
    and environment variables.
    """
    encoding = sys.getfilesystemencoding()
    if encoding is None:
        # Must be Linux or Unix and nl_langinfo(CODESET) failed.
        encoding = PREFERRED_ENCODING
    return encoding
Example #59
0
import locale

origin_str = ['сетевое программирование', 'сокет', 'декоратор']

with open('resurs.txt', 'w+') as f_n:
    for i in origin_str:
        f_n.write(i + '\n')
    f_n.seek(0)

print(f_n)
#name='resurs.txt' mode='w+' encoding='UTF-8'>

file_coding = locale.getpreferredencoding()

with open('resurs.txt', 'r', encoding=file_coding) as f_n:
    for i in f_n:
        print(i)



Example #60
0
    def compile_str(module_name,
                    src_code,
                    location=None,
                    include_dirs=[],
                    lib_dirs=[],
                    libs=[],
                    preargs=[],
                    rpaths=rpath_defaults,
                    py_module=True,
                    hide_symbols=True):
        """

        Parameters
        ----------
        module_name: str
             This has been embedded in the src_code.
        src_code
            A complete c or c++ source listing for the module.
        location
            A pre-existing filesystem directory where the
            cpp file and .so will be written.
        include_dirs
            A list of include directory names (each gets prefixed with -I).
        lib_dirs
            A list of library search path directory names (each gets
            prefixed with -L).
        libs
            A list of libraries to link with (each gets prefixed with -l).
        preargs
            A list of extra compiler arguments.
        rpaths
            List of rpaths to use with Xlinker. Defaults to `rpath_defaults`.
        py_module
            If False, compile to a shared library, but
            do not import as a Python module.
        hide_symbols
            If True (the default), hide all symbols from the library symbol
            table unless explicitely exported.

        Returns
        -------
        module
            Dynamically-imported python module of the compiled code.
            (unless py_module is False, in that case returns None.)

        Notes
        -----
        On Windows 7 with nvcc 3.1 we need to compile in the real directory
        Otherwise nvcc never finish.

        """
        # Remove empty string directory
        include_dirs = [d for d in include_dirs if d]
        lib_dirs = [d for d in lib_dirs if d]

        rpaths = list(rpaths)

        if sys.platform == "win32":
            # Remove some compilation args that cl.exe does not understand.
            # cl.exe is the compiler used by nvcc on Windows.
            for a in [
                    "-Wno-write-strings", "-Wno-unused-label",
                    "-Wno-unused-variable", "-fno-math-errno"
            ]:
                if a in preargs:
                    preargs.remove(a)
        if preargs is None:
            preargs = []
        else:
            preargs = list(preargs)
        if sys.platform != 'win32':
            preargs.append('-fPIC')
        if config.cmodule.remove_gxx_opt:
            preargs = [p for p in preargs if not p.startswith('-O')]

        cuda_root = config.cuda.root

        # The include dirs gived by the user should have precedence over
        # the standards ones.
        include_dirs = include_dirs + std_include_dirs()
        if os.path.abspath(os.path.split(__file__)[0]) not in include_dirs:
            include_dirs.append(os.path.abspath(os.path.split(__file__)[0]))

        libs = libs + std_libs()
        if 'cudart' not in libs:
            libs.append('cudart')

        lib_dirs = lib_dirs + std_lib_dirs()

        if sys.platform != 'darwin':
            # config.dnn.include_path add this by default for cudnn in the
            # new back-end. This should not be used in this back-end. So
            # just remove them.
            lib_dirs = [
                ld for ld in lib_dirs
                if not (ld == os.path.join(cuda_root, 'lib')
                        or ld == os.path.join(cuda_root, 'lib64'))
            ]

        if sys.platform != 'darwin':
            # sometimes, the linker cannot find -lpython so we need to tell it
            # explicitly where it is located
            # this returns somepath/lib/python2.x
            python_lib = distutils.sysconfig.get_python_lib(plat_specific=1,
                                                            standard_lib=1)
            python_lib = os.path.dirname(python_lib)
            if python_lib not in lib_dirs:
                lib_dirs.append(python_lib)

        cppfilename = os.path.join(location, 'mod.cu')
        with open(cppfilename, 'w') as cppfile:

            _logger.debug('Writing module C++ code to %s', cppfilename)
            cppfile.write(src_code)

        lib_filename = os.path.join(
            location, '%s.%s' % (module_name, get_lib_extension()))

        _logger.debug('Generating shared lib %s', lib_filename)
        # TODO: Why do these args cause failure on gtx285 that has 1.3
        # compute capability? '--gpu-architecture=compute_13',
        # '--gpu-code=compute_13',
        # nvcc argument
        preargs1 = []
        preargs2 = []
        for pa in preargs:
            if pa.startswith('-Wl,'):
                # the -rpath option is not understood by the Microsoft linker
                if sys.platform != 'win32' or not pa.startswith('-Wl,-rpath'):
                    preargs1.append('-Xlinker')
                    preargs1.append(pa[4:])
                continue
            for pattern in [
                    '-O', '-arch=', '-ccbin=', '-G', '-g', '-I', '-L',
                    '--fmad', '--ftz', '--maxrregcount', '--prec-div',
                    '--prec-sqrt', '--use_fast_math', '-fmad', '-ftz',
                    '-maxrregcount', '-prec-div', '-prec-sqrt',
                    '-use_fast_math', '--use-local-env', '--cl-version=',
                    '-std='
            ]:

                if pa.startswith(pattern):
                    preargs1.append(pa)
                    break
            else:
                preargs2.append(pa)

        # Don't put -G by default, as it slow things down.
        # We aren't sure if -g slow things down, so we don't put it by default.
        cmd = [nvcc_path, '-shared'] + preargs1
        if config.nvcc.compiler_bindir:
            cmd.extend(['--compiler-bindir', config.nvcc.compiler_bindir])

        if sys.platform == 'win32':
            # add flags for Microsoft compiler to create .pdb files
            preargs2.extend(['/Zi', '/MD'])
            cmd.extend(['-Xlinker', '/DEBUG'])
            # remove the complaints for the duplication of `double round(double)`
            # in both math_functions.h and pymath.h,
            # by not including the one in pymath.h
            cmd.extend(['-D HAVE_ROUND'])
        else:
            if hide_symbols:
                preargs2.append('-fvisibility=hidden')

        if local_bitwidth() == 64:
            cmd.append('-m64')
        else:
            cmd.append('-m32')

        if len(preargs2) > 0:
            cmd.extend(['-Xcompiler', ','.join(preargs2)])

        # We should not use rpath if possible. If the user provided
        # provided an cuda.root flag, we need to add one, but
        # otherwise, we don't add it. See gh-1540 and
        # https://wiki.debian.org/RpathIssue for details.

        if (not type(config.cuda).root.is_default
                and os.path.exists(os.path.join(config.cuda.root, 'lib'))):

            rpaths.append(os.path.join(config.cuda.root, 'lib'))
            if sys.platform != 'darwin':
                # the CUDA libs are universal (contain both 32-bit and 64-bit)
                rpaths.append(os.path.join(config.cuda.root, 'lib64'))
        if sys.platform != 'win32':
            # the -rpath option is not understood by the Microsoft linker
            for rpath in rpaths:
                cmd.extend(['-Xlinker', ','.join(['-rpath', rpath])])
        # to support path that includes spaces, we need to wrap it with double quotes on Windows
        path_wrapper = "\"" if os.name == 'nt' else ""
        cmd.extend([
            '-I%s%s%s' % (path_wrapper, idir, path_wrapper)
            for idir in include_dirs
        ])
        cmd.extend([
            '-L%s%s%s' % (path_wrapper, ldir, path_wrapper)
            for ldir in lib_dirs
        ])
        cmd.extend(['-o', lib_filename])
        cmd.append(os.path.split(cppfilename)[-1])
        cmd.extend(['-l%s' % l for l in libs])
        if sys.platform == 'darwin':
            # This tells the compiler to use the already-loaded python
            # symbols (which should always be the right ones).
            cmd.extend(['-Xcompiler', '-undefined,dynamic_lookup'])

        # Remove "-u Symbol" arguments, since they are usually not
        # relevant for the new compilation, even if they were used for
        # compiling python.  If they are necessary, the nvcc syntax is
        # "-U Symbol" with a capital U.
        done = False
        while not done:
            try:
                indexof = cmd.index('-u')
                cmd.pop(indexof)  # Remove -u
                cmd.pop(indexof)  # Remove argument to -u
            except ValueError:
                done = True

        # CUDA Toolkit v4.1 Known Issues:
        # Host linker on Mac OS 10.7 (and 10.6 for me) passes -no_pie option
        # to nvcc this option is not recognized and generates an error
        # http://stackoverflow.com/questions/9327265/nvcc-unknown-option-no-pie
        # Passing -Xlinker -pie stops -no_pie from getting passed
        if sys.platform == 'darwin' and nvcc_version >= '4.1':
            cmd.extend(['-Xlinker', '-pie'])

        # cmd.append("--ptxas-options=-v") #uncomment this to see
        # register and shared-mem requirements
        _logger.debug('Running cmd %s', ' '.join(cmd))
        orig_dir = os.getcwd()
        try:
            os.chdir(location)
            p = subprocess.Popen(cmd,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            nvcc_stdout_raw, nvcc_stderr_raw = p.communicate()[:2]
            console_encoding = getpreferredencoding()
            nvcc_stdout = decode_with(nvcc_stdout_raw, console_encoding)
            nvcc_stderr = decode_with(nvcc_stderr_raw, console_encoding)
        finally:
            os.chdir(orig_dir)

        for eline in nvcc_stderr.split('\n'):
            if not eline:
                continue
            if 'skipping incompatible' in eline:
                # ld is skipping an incompatible library
                continue
            if 'declared but never referenced' in eline:
                continue
            if 'statement is unreachable' in eline:
                continue
            _logger.info("NVCC: %s", eline)

        if p.returncode:
            for i, l in enumerate(src_code.split('\n')):
                print(i + 1, l, file=sys.stderr)
            print('===============================', file=sys.stderr)
            # filter the output from the compiler
            for l in nvcc_stderr.split('\n'):
                if not l:
                    continue
                # filter out the annoying declaration warnings

                try:
                    if l[l.index(':'):].startswith(': warning: variable'):
                        continue
                    if l[l.index(':'):].startswith(': warning: label'):
                        continue
                except Exception:
                    pass
                print(l, file=sys.stderr)
            print(nvcc_stdout)
            print(cmd)
            raise Exception('nvcc return status', p.returncode, 'for cmd',
                            ' '.join(cmd))
        elif config.cmodule.compilation_warning and nvcc_stdout:
            print(nvcc_stdout)

        # On Windows, nvcc print useless stuff by default
        if sys.platform != 'win32' and nvcc_stdout:
            # this doesn't happen to my knowledge
            print("DEBUG: nvcc STDOUT", nvcc_stdout, file=sys.stderr)

        if py_module:
            # touch the __init__ file
            open(os.path.join(location, "__init__.py"), 'w').close()
            return dlimport(lib_filename)