Example #1
0
def _copy(src, dst):
    """
    Copy a file src to dst (directory).
    
    If dst exists and is the same, nothing is done

    :param src: source file or directory
    :type src: path
    :param dst: destination file or directory
    :type dst: path
    """
    logger = logging.getLogger(__name__)
    src = text_to_native_str(src)
    dst = text_to_native_str(dst)
    if os.path.exists(dst) and os.path.isdir(dst):
        dst = os.path.join(dst, os.path.basename(src))
    if os.path.exists(dst):
        if not filecmp.cmp(src, dst):
            logger.debug('updating %s', get_unicode(dst, enc))
            shutil.copy2(src, dst)
        else:
            logger.debug('%s already up to date', get_unicode(dst, enc))
    else:
        logger.debug('copy %s to %s', get_unicode(src, enc),
                     get_unicode(dst, enc))
        shutil.copy2(src, dst)
Example #2
0
    def connect_to_smtp_server(self, config):
        self.host = config['smtp_host']
        self.port = config['smtp_port']
        self.ssl = config['smtp_ssl']
        self.tls = config['smtp_tls']
        self.username = config.get('smtp_username')
        self.password = config.get('smtp_password')
        try:
            log.debug('connecting to smtp server %s:%s', self.host, self.port)
            self.mail_server = smtplib.SMTP_SSL if self.ssl else smtplib.SMTP
            self.mail_server = self.mail_server(self.host, self.port)
            if self.tls:
                self.mail_server.ehlo()
                self.mail_server.starttls()
                self.mail_server.ehlo()
        except (socket.error, OSError) as e:
            raise PluginWarning(str(e))

        try:
            if self.username:
                # Forcing to use `str` type
                log.debug('logging in to smtp server using username: %s', self.username)
                self.mail_server.login(text_to_native_str(self.username), text_to_native_str(self.password))
        except (IOError, SMTPAuthenticationError) as e:
            raise PluginWarning(str(e))
Example #3
0
    def _importtestmodule(self):
        # We have to remove the __future__ statements *before* parsing
        # with compile, otherwise the flags are ignored.
        content = re.sub(_RE_FUTURE_IMPORTS, b'\n', self.content)

        new_mod = types.ModuleType(self.mod_name)
        new_mod.__file__ = text_to_native_str(self.fspath)

        if hasattr(self, '_transform_ast'):
            # ast.parse doesn't let us hand-select the __future__
            # statements, but built-in compile, with the PyCF_ONLY_AST
            # flag does.
            tree = compile(
                content, text_to_native_str(self.fspath), 'exec',
                self.flags | ast.PyCF_ONLY_AST, True)
            tree = self._transform_ast(tree)
            # Now that we've transformed the tree, recompile it
            code = compile(
                tree, text_to_native_str(self.fspath), 'exec')
        else:
            # If we don't need to transform the AST, we can skip
            # parsing/compiling in two steps
            code = compile(
                content, text_to_native_str(self.fspath), 'exec',
                self.flags, True)

        pwd = os.getcwd()
        try:
            os.chdir(os.path.dirname(text_to_native_str(self.fspath)))
            exec_(code, new_mod.__dict__)
        finally:
            os.chdir(pwd)
        self.config.pluginmanager.consider_module(new_mod)
        return new_mod
    def _applyToSolver(self, solver, matrix):
        if matrix.NumGlobalNonzeros() <= matrix.NumGlobalRows():
            return

        self.Prec = ML.MultiLevelPreconditioner(matrix, False)
        self.Prec.SetParameterList({text_to_native_str("output"): 0, text_to_native_str("smoother: type") : text_to_native_str("Aztec"), text_to_native_str("smoother: Aztec as solver") : True})
        self.Prec.ComputePreconditioner()
        solver.SetPrecOperator(self.Prec)
Example #5
0
def set_wsgi_header(headers, name, value):
    """Replace a wsgi header, ensuring correct encoding"""
    native_name = text_to_native_str(name)
    for i, (k, v) in enumerate(headers):
        if native_name == k:
            headers[i] = (native_name, text_to_native_str(value))
            return

    headers.append((native_name, text_to_native_str(value)))
Example #6
0
 def save_results(self, filename, append=False):
     if self.cached_tasklist:
         savekey = text_to_native_str('processed_tasks')
         db = shelve.open(text_to_native_str(filename), writeback=True)
         if not append or savekey not in db:
             db[savekey] = self.cached_tasklist
         else:
             db[savekey].extend(self.cached_tasklist)
         db.close()
     else:
         raise ValueError('Noting to save')
Example #7
0
def _copytree(src, dst, excludes=[], includes=[], recursive=True):
    """ 
    Copy a directory structure src to destination
    
    :param src: source file or directory
    :type src: path
    :param dst: destination file or directory
    :type dst: path
    :param excludes: list of patterns to exclude
    :type excludes: ngomodel.validators.List
    :param includes: list of patterns to include
    :type includes: ngomodel.validators.List
    :param recursive: recursive copy
    """
    logger = logging.getLogger(__name__)
    # make sure to convert string from ngopath
    # convert everything to ngopath and back to string
    src = text_to_native_str(str(src))
    dst = text_to_native_str(str(dst))

    incl = r'|'.join([fnmatch.translate(x) for x in includes])
    excl = r'|'.join([fnmatch.translate(x) for x in excludes]) or r'$.'

    allnames = os.listdir(src)
    names = [name for name in allnames if not re.match(excl, name)]

    errors = []

    if not os.path.exists(dst):
        logger.debug('making dir %s', get_unicode(dst, enc))
        os.makedirs(dst)

    for name in names:
        srcname = os.path.join(src, name)
        dstname = os.path.join(dst, name)
        try:
            if os.path.isdir(srcname):
                if recursive:
                    _copytree(srcname, dstname, excludes, includes, recursive)
                if re.match(incl, name) and recursive:
                    _copytree(srcname, dstname, excludes, includes, recursive)
            elif re.match(incl, name):
                _copy(srcname, dstname)
        except (IOError, os.error) as why:
            errors.append((srcname, dstname, str(why)))
    try:
        shutil.copystat(src, dst)
    except WindowsError:
        # can't copy file access times on Windows
        pass
    except OSError as why:
        errors.extend((src, dst, str(why)))
    if errors:
        raise CopyException(errors)
Example #8
0
 def load_results(self, cls, filename):
     loadkey = text_to_native_str('processed_tasks')
     db = shelve.open(text_to_native_str(filename))
     loaded_data = db[loadkey]
     db.close()
     # Hack to help Enrico convert data to the new structured
     if not isinstance(loaded_data[0].output, AnyPyProcessOutput):
         for task in loaded_data:
             task.output = AnyPyProcessOutput(task.output)
     # Check if the functions is called as an instance method.
     if self is not None:
         self.cached_tasklist = loaded_data
     results = [task.get_output(True) for task in loaded_data]
     return AnyPyProcessOutputList(results)
Example #9
0
    def _solve_(self, L, x, b):

        for iteration in range(self.iterations):
             # errorVector = L*x - b
             errorVector = Epetra.Vector(L.RangeMap())
             L.Multiply(False, x, errorVector)
             # If A is an Epetra.Vector with map M
             # and B is an Epetra.Vector with map M
             # and C = A - B
             # then C is an Epetra.Vector with *no map* !!!?!?!
             errorVector -= b

             tol = errorVector.Norm1()

             if iteration == 0:
                 tol0 = tol

             if (tol / tol0) <= self.tolerance:
                 break

             xError = Epetra.Vector(L.RowMap())

             Problem = Epetra.LinearProblem(L, xError, errorVector)
             Solver = self.Factory.Create(text_to_native_str("Klu"), Problem)
             Solver.Solve()

             x[:] = x - xError

        if 'FIPY_VERBOSE_SOLVER' in os.environ:
            from fipy.tools.debug import PRINT
            PRINT('iterations: %d / %d' % (iteration + 1, self.iterations))
            PRINT('residual:', errorVector.Norm2())
Example #10
0
    def _solve_(self, L, x, b):

        for iteration in range(self.iterations):
            # errorVector = L*x - b
            errorVector = Epetra.Vector(L.RangeMap())
            L.Multiply(False, x, errorVector)
            # If A is an Epetra.Vector with map M
            # and B is an Epetra.Vector with map M
            # and C = A - B
            # then C is an Epetra.Vector with *no map* !!!?!?!
            errorVector -= b

            tol = errorVector.Norm1()

            if iteration == 0:
                tol0 = tol

            if (tol / tol0) <= self.tolerance:
                break

            xError = Epetra.Vector(L.RowMap())

            Problem = Epetra.LinearProblem(L, xError, errorVector)
            Solver = self.Factory.Create(text_to_native_str("Klu"), Problem)
            Solver.Solve()

            x[:] = x - xError

        if 'FIPY_VERBOSE_SOLVER' in os.environ:
            from fipy.tools.debug import PRINT
            PRINT('iterations: %d / %d' % (iteration + 1, self.iterations))
            PRINT('residual:', errorVector.Norm2())
Example #11
0
 def exportMmf(self, filename):
     """
     Exports the matrix to a Matrix Market file of the given `filename`.
     """
     self.fillComplete()
     EpetraExt.RowMatrixToMatrixMarketFile(text_to_native_str(filename),
                                           self.matrix)
    def test_basic(self):
        cases = [
            {'data': 'chips=ahoy; vienna=finger',
             'dict': {'chips':'ahoy', 'vienna':'finger'},
             'repr': "<SimpleCookie: chips='ahoy' vienna='finger'>",
             'output': 'Set-Cookie: chips=ahoy\nSet-Cookie: vienna=finger'},

            {'data': 'keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"',
             'dict': {'keebler' : 'E=mc2; L="Loves"; fudge=\012;'},
             'repr': '''<SimpleCookie: keebler='E=mc2; L="Loves"; fudge=\\n;'>''',
             'output': 'Set-Cookie: keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'},

            # Check illegal cookies that have an '=' char in an unquoted value
            {'data': 'keebler=E=mc2',
             'dict': {'keebler' : 'E=mc2'},
             'repr': "<SimpleCookie: keebler='E=mc2'>",
             'output': 'Set-Cookie: keebler=E=mc2'},

            # Cookies with ':' character in their name. Though not mentioned in
            # RFC, servers / browsers allow it.

             {'data': 'key:term=value:term',
             'dict': {'key:term' : 'value:term'},
             'repr': "<SimpleCookie: key:term='value:term'>",
             'output': 'Set-Cookie: key:term=value:term'},

        ]

        for case in cases:
            C = cookies.SimpleCookie()
            C.load(case['data'])
            self.assertEqual(repr(C), text_to_native_str(case['repr']))
            self.assertEqual(C.output(sep='\n'), case['output'])
            for k, v in sorted(case['dict'].items()):
                self.assertEqual(C[k].value, v)
Example #13
0
def filter_formatdate(val, format):
    """Returns a string representation of a datetime object according to format string."""
    encoding = locale.getpreferredencoding()
    if not isinstance(val, (datetime, date, time)):
        return val
    return native_str_to_text(
        val.strftime(text_to_native_str(format, encoding=encoding)), encoding=encoding
    )
Example #14
0
 def __str__(self):
     format_str = self.format_str
     if isinstance(format_str, bytes):
         format_str = format_str.decode('utf-8')
     fstr = TimeUtils.strftime(self,
                               text_to_native_str(format_str, "utf-8"))
     if isinstance(fstr, bytes):
         fstr = fstr.decode("utf-8")
     return fstr
Example #15
0
    def set_source_pipeline_playing(self):
        if (self.source_pipeline.set_state(
                Gst.State.PAUSED) == Gst.StateChangeReturn.NO_PREROLL):
            # This is a live source, drop frames if we get behind
            self.source_pipeline.get_by_name('_stbt_raw_frames_queue') \
                .set_property('leaky', text_to_native_str('downstream'))
            self.source_pipeline.get_by_name('appsink') \
                .set_property('sync', False)

        self.source_pipeline.set_state(Gst.State.PLAYING)
Example #16
0
    def notify(self, title, message, config):
        """
        Send an email notification

        :param str message: message body
        :param str title: message subject
        :param dict config: email plugin config
        """

        if not isinstance(config['to'], list):
            config['to'] = [config['to']]

        email = MIMEMultipart('alternative')
        email['To'] = ','.join(config['to'])
        email['From'] = config['from']
        email['Subject'] = title
        email['Date'] = formatdate(localtime=True)
        content_type = 'html' if config['html'] else 'plain'
        email.attach(MIMEText(message.encode('utf-8'), content_type, _charset='utf-8'))

        try:
            log.debug('sending email notification to %s:%s', config['smtp_host'], config['smtp_port'])
            mail_server = smtplib.SMTP_SSL if config['smtp_ssl'] else smtplib.SMTP
            mail_server = mail_server(config['smtp_host'], config['smtp_port'])
            if config['smtp_tls']:
                mail_server.ehlo()
                mail_server.starttls()
                mail_server.ehlo()
        except (socket.error, OSError) as e:
            raise PluginWarning(str(e))

        try:
            if config.get('smtp_username'):
                # Forcing to use `str` type
                log.debug('logging in to smtp server using username: %s', config['smtp_username'])
                mail_server.login(text_to_native_str(config['smtp_username']),
                                  text_to_native_str(config['smtp_password']))
            mail_server.sendmail(email['From'], config['to'], email.as_string())
        except IOError as e:
            raise PluginWarning(str(e))

        mail_server.quit()
Example #17
0
 def execute_cmd(self, cmd, allow_background, encoding):
     log.verbose('Executing: %s', cmd)
     p = subprocess.Popen(text_to_native_str(cmd, encoding=io_encoding), shell=True, stdin=subprocess.PIPE,
                          stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=False)
     if not allow_background:
         r, w = (p.stdout, p.stdin)
         response = r.read().decode(io_encoding)
         r.close()
         w.close()
         if response:
             log.info('Stdout: %s', response.rstrip())  # rstrip to get rid of newlines
     return p.wait()
Example #18
0
 def execute(self, func, line, cell, magic_cls):
     formatter = DollarFormatter()
     cell = formatter.vformat(cell, args=[], kwargs=magic_cls.shell.user_ns.copy())
     _, args = self.arguments(func, line)
     result = relational.query(text_to_native_str(cell))
     if args.result:
         magic_cls.shell.user_ns[args.result] = result
     else:
         result = list(result)
         table = Table()
         if result:
             table.append(list(viewkeys(result[0])))
         for line in result:
             table.append(list(viewvalues(line)))
         return table
Example #19
0
 def execute(self, func, line, cell, magic_cls):
     formatter = DollarFormatter()
     cell = formatter.vformat(cell, args=[],
                              kwargs=magic_cls.shell.user_ns.copy())
     _, args = self.arguments(func, line)
     result = relational.query(text_to_native_str(cell))
     if args.result:
         magic_cls.shell.user_ns[args.result] = result
     else:
         result = list(result)
         table = Table()
         if result:
             table.append(list(viewkeys(result[0])))
         for line in result:
             table.append(list(viewvalues(line)))
         return table
Example #20
0
	def __repr__(self):
		s      = 'Dataset\n'
		s     += '   Name      : "%s"\n' %self.__class__.__name__
		s     += '   Design    :  %s\n' %self.design
		s     += '   Data dim  :  %d\n' %self.dim
		if self.cite:
			s += '   Reference :  %s\n' %self.cite
		if self.www:
			s += '   Web       :  %s\n' %self.www
		if self.datafile:
			s += '   Data file :  %s\n' %self.datafile
		if self.note:
			s += '   %s :  %s\n' %tuple(self.note)
		ss     = self.get_expected_results_as_string()
		s     += ss 
		return text_to_native_str(s, encoding='utf-8')
Example #21
0
def _wrap_urlopen(url, timeout=None):
    """
    Handles alternate schemes using urllib, wraps the response in a requests.Response

    This is not installed as an adapter in requests, since urls without network locations
    (e.g. file:///somewhere) will cause errors

    """
    try:
        raw = urlopen(text_to_native_str(url, encoding='utf-8'), timeout=timeout)
    except IOError as e:
        msg = 'Error getting %s: %s' % (url, e)
        log.error(msg)
        raise RequestException(msg)
    resp = requests.Response()
    resp.raw = raw
    # requests passes the `decode_content` kwarg to read
    orig_read = raw.read
    resp.raw.read = lambda size, **kwargs: orig_read(size)
    resp.status_code = raw.code or 200
    resp.headers = requests.structures.CaseInsensitiveDict(raw.headers)
    return resp
Example #22
0
    def collect(self):
        # First, just do the regular import of the module to make
        # sure it's sane and valid.  This block is copied directly
        # from py.test
        try:
            mod = self.fspath.pyimport(ensuresyspath=True)
        except SyntaxError:
            import py
            excinfo = py.code.ExceptionInfo()
            raise self.CollectError(excinfo.getrepr(style="short"))
        except self.fspath.ImportMismatchError:
            e = sys.exc_info()[1]
            raise self.CollectError(
                "import file mismatch:\n"
                "imported module %r has this __file__ attribute:\n"
                "  %s\n"
                "which is not the same as the test file we want to collect:\n"
                "  %s\n"
                "HINT: remove __pycache__ / .pyc files and/or use a "
                "unique basename for your test file modules"
                % e.args
            )

        # Now get the file's content.
        with io.open(text_to_native_str(self.fspath), 'rb') as fd:
            content = fd.read()

        # If the file contains the special marker, only test it both ways.
        if b'TEST_UNICODE_LITERALS' in content:
            # Return the file in both unicode_literal-enabled and disabled forms
            return [
                UnicodeLiteralsModule(mod.__name__, content, self.fspath, self),
                NoUnicodeLiteralsModule(mod.__name__, content, self.fspath, self)
            ]
        else:
            return [pytest.Module(self.fspath, self)]
Example #23
0
    def _make_torrent_options_dict(self, config, entry):

        opt_dic = {}

        for opt_key in (
            "path",
            "addpaused",
            "honourlimits",
            "bandwidthpriority",
            "maxconnections",
            "maxupspeed",
            "maxdownspeed",
            "ratio",
            "main_file_only",
            "main_file_ratio",
            "magnetization_timeout",
            "include_subs",
            "content_filename",
            "include_files",
            "skip_files",
            "rename_like_files",
            "queue_position",
        ):
            # Values do not merge config with task
            # Task takes priority then config is used
            if opt_key in entry:
                opt_dic[opt_key] = entry[opt_key]
            elif opt_key in config:
                opt_dic[opt_key] = config[opt_key]

        options = {"add": {}, "change": {}, "post": {}}

        add = options["add"]
        if opt_dic.get("path"):
            try:
                path = os.path.expanduser(entry.render(opt_dic["path"]))
                add["download_dir"] = text_to_native_str(pathscrub(path), "utf-8")
            except RenderError as e:
                log.error("Error setting path for %s: %s" % (entry["title"], e))
        if "bandwidthpriority" in opt_dic:
            add["bandwidthPriority"] = opt_dic["bandwidthpriority"]
        if "maxconnections" in opt_dic:
            add["peer_limit"] = opt_dic["maxconnections"]
        # make sure we add it paused, will modify status after adding
        add["paused"] = True

        change = options["change"]
        if "honourlimits" in opt_dic and not opt_dic["honourlimits"]:
            change["honorsSessionLimits"] = False
        if "maxupspeed" in opt_dic:
            change["uploadLimit"] = opt_dic["maxupspeed"]
            change["uploadLimited"] = True
        if "maxdownspeed" in opt_dic:
            change["downloadLimit"] = opt_dic["maxdownspeed"]
            change["downloadLimited"] = True

        if "ratio" in opt_dic:
            change["seedRatioLimit"] = opt_dic["ratio"]
            if opt_dic["ratio"] == -1:
                # seedRatioMode:
                # 0 follow the global settings
                # 1 override the global settings, seeding until a certain ratio
                # 2 override the global settings, seeding regardless of ratio
                change["seedRatioMode"] = 2
            else:
                change["seedRatioMode"] = 1

        if "queue_position" in opt_dic:
            change["queuePosition"] = opt_dic["queue_position"]

        post = options["post"]
        # set to modify paused status after
        if "addpaused" in opt_dic:
            post["paused"] = opt_dic["addpaused"]
        if "main_file_only" in opt_dic:
            post["main_file_only"] = opt_dic["main_file_only"]
        if "main_file_ratio" in opt_dic:
            post["main_file_ratio"] = opt_dic["main_file_ratio"]
        if "magnetization_timeout" in opt_dic:
            post["magnetization_timeout"] = opt_dic["magnetization_timeout"]
        if "include_subs" in opt_dic:
            post["include_subs"] = opt_dic["include_subs"]
        if "content_filename" in opt_dic:
            try:
                post["content_filename"] = entry.render(opt_dic["content_filename"])
            except RenderError as e:
                log.error("Unable to render content_filename %s: %s" % (entry["title"], e))
        if "skip_files" in opt_dic:
            post["skip_files"] = opt_dic["skip_files"]
        if "include_files" in opt_dic:
            post["include_files"] = opt_dic["include_files"]
        if "rename_like_files" in opt_dic:
            post["rename_like_files"] = opt_dic["rename_like_files"]
        return options
Example #24
0
from __future__ import print_function
from __future__ import unicode_literals
from builtins import input
import os
from distutils.core import Command
from future.utils import text_to_native_str

from ._nativize import nativize_all

__all__ = [text_to_native_str("copy_script")]

class copy_script(Command):
    description = "copy an example script into a new editable file"

    # List of option tuples: long name, short name (None if no short
    # name), and help string.
    user_options = [
        # Select installation scheme and set base director(y|ies)
        ('From=', None,
         "path and file name containing script to copy"),
        ('To=', None,
         "path and file name to save script to")
     ]
    user_options = [nativize_all(u) for u in user_options]

    def initialize_options(self):
        self.From = None
        self.To = None

    def finalize_options(self):
        if self.From == None:
Example #25
0
import zipfile
from builtins import str
from contextlib import contextmanager
from datetime import datetime

import requests
import sarge
import xml.etree.ElementTree as ET

CUMULUSCI_PATH = os.path.realpath(
    os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
)
META_XML_CLEAN_DIRS = ("classes/", "triggers/", "pages/", "aura/", "components/")
API_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
DATETIME_LEN = len("2018-08-07T16:00:56.000")
UTF8 = text_to_native_str("UTF-8")

BREW_UPDATE_CMD = "brew upgrade cumulusci"
PIP_UPDATE_CMD = "pip install --upgrade cumulusci"
PIPX_UPDATE_CMD = "pipx upgrade cumulusci"


def parse_api_datetime(value):
    """ parse a datetime returned from the salesforce API.

    in python 3 we should just use a strptime %z, but until then we're just going
    to assert that its a fixed offset of +0000 since thats the observed behavior. getting
    python 2 to support fixed offset parsing is too complicated for what we need imo."""
    dt = datetime.strptime(value[0:DATETIME_LEN], API_DATE_FORMAT)
    offset_str = value[DATETIME_LEN:]
    assert offset_str in ["+0000", "Z"], "The Salesforce API returned a weird timezone."
Example #26
0
def _reconstruct_header(cls_name, bases, value):
    return type(text_to_native_str(cls_name), bases, {})._reconstruct(value)
Example #27
0
 def id_header(self, raw_name):
     """Header containing request id. Defaults to X-Request-Id."""
     return text_to_native_str(self[raw_name])
Example #28
0
    def _make_torrent_options_dict(self, config, entry):

        opt_dic = {}

        for opt_key in ('path', 'addpaused', 'honourlimits', 'bandwidthpriority', 'maxconnections', 'maxupspeed',
                        'maxdownspeed', 'ratio', 'main_file_only', 'main_file_ratio', 'magnetization_timeout',
                        'include_subs', 'content_filename', 'include_files', 'skip_files', 'rename_like_files',
                        'queue_position'):
            # Values do not merge config with task
            # Task takes priority then config is used
            if opt_key in entry:
                opt_dic[opt_key] = entry[opt_key]
            elif opt_key in config:
                opt_dic[opt_key] = config[opt_key]

        options = {'add': {}, 'change': {}, 'post': {}}

        add = options['add']
        if opt_dic.get('path'):
            try:
                path = os.path.expanduser(entry.render(opt_dic['path']))
                add['download_dir'] = text_to_native_str(pathscrub(path), 'utf-8')
            except RenderError as e:
                log.error('Error setting path for %s: %s' % (entry['title'], e))
        if 'bandwidthpriority' in opt_dic:
            add['bandwidthPriority'] = opt_dic['bandwidthpriority']
        if 'maxconnections' in opt_dic:
            add['peer_limit'] = opt_dic['maxconnections']
        # make sure we add it paused, will modify status after adding
        add['paused'] = True

        change = options['change']
        if 'honourlimits' in opt_dic and not opt_dic['honourlimits']:
            change['honorsSessionLimits'] = False
        if 'maxupspeed' in opt_dic:
            change['uploadLimit'] = opt_dic['maxupspeed']
            change['uploadLimited'] = True
        if 'maxdownspeed' in opt_dic:
            change['downloadLimit'] = opt_dic['maxdownspeed']
            change['downloadLimited'] = True

        if 'ratio' in opt_dic:
            change['seedRatioLimit'] = opt_dic['ratio']
            if opt_dic['ratio'] == -1:
                # seedRatioMode:
                # 0 follow the global settings
                # 1 override the global settings, seeding until a certain ratio
                # 2 override the global settings, seeding regardless of ratio
                change['seedRatioMode'] = 2
            else:
                change['seedRatioMode'] = 1

        if 'queue_position' in opt_dic:
            change['queuePosition'] = opt_dic['queue_position']

        post = options['post']
        # set to modify paused status after
        if 'addpaused' in opt_dic:
            post['paused'] = opt_dic['addpaused']
        if 'main_file_only' in opt_dic:
            post['main_file_only'] = opt_dic['main_file_only']
        if 'main_file_ratio' in opt_dic:
            post['main_file_ratio'] = opt_dic['main_file_ratio']
        if 'magnetization_timeout' in opt_dic:
            post['magnetization_timeout'] = opt_dic['magnetization_timeout']
        if 'include_subs' in opt_dic:
            post['include_subs'] = opt_dic['include_subs']
        if 'content_filename' in opt_dic:
            try:
                post['content_filename'] = entry.render(opt_dic['content_filename'])
            except RenderError as e:
                log.error('Unable to render content_filename %s: %s' % (entry['title'], e))
        if 'skip_files' in opt_dic:
            post['skip_files'] = opt_dic['skip_files']
        if 'include_files' in opt_dic:
            post['include_files'] = opt_dic['include_files']
        if 'rename_like_files' in opt_dic:
            post['rename_like_files'] = opt_dic['rename_like_files']
        return options
Example #29
0
 def __getitem__(self, name):
     cls = self.registry.get(name.lower(), self.default_class)
     return type(text_to_native_str('_'+cls.__name__), (cls, self.base_class), {})
Example #30
0
    def wsgi_app(self, environ, start_response):
        """Execute this instance as a WSGI application.

        See the PEP for the meaning of parameters. The separation of
        __call__ and wsgi_app eases the insertion of middlewares.

        """
        request = Request(environ)
        request.encoding_errors = "strict"

        # The problem here is that we'd like to send an infinite stream
        # of events, but WSGI has been designed to handle only finite
        # responses. Hence, to do this we will have to "abuse" the API
        # a little. This works well with gevent's pywsgi implementation
        # but it may not with others (still PEP-compliant). Therefore,
        # just to be extra-safe, we will terminate the response anyway,
        # after a long timeout, to make it finite.

        # The first such "hack" is the mechanism to trigger the chunked
        # transfer-encoding. The PEP states just that "the server *may*
        # use chunked encoding" to send each piece of data we give it,
        # if we don't specify a Content-Length header and if both the
        # client and the server support it. According to the HTTP spec.
        # all (and only) HTTP/1.1 compliant clients have to support it.
        # We'll assume that the server software supports it too, and
        # actually uses it (gevent does!) even if we have no way to
        # check it. We cannot try to force such behavior as the PEP
        # doesn't even allow us to set the Transfer-Encoding header.

        # The second abuse is the use of the write() callable, returned
        # by start_response, even if the PEP strongly discourages its
        # use in new applications. We do it because we need a way to
        # detect when the client disconnects, and we hope to achieve
        # this by seeing when a call to write() fails, i.e. raises an
        # exception. This behavior isn't documented by the PEP, but it
        # seems reasonable and it's present in gevent (which raises a
        # socket.error).

        # The third non-standard behavior that we expect (related to
        # the previous one) is that no one in the application-to-client
        # chain does response buffering: neither any middleware nor the
        # server (gevent doesn't!). This should also hold outside the
        # server realm (i.e. no proxy buffering) but that's definitely
        # not our responsibility.

        # The fourth "hack" is to avoid an error to be printed on the
        # logs. If the client terminates the connection, we catch and
        # silently ignore the exception and return gracefully making
        # the server try to write the last zero-sized chunk (used to
        # mark the end of the stream). This will fail and produce an
        # error. To avoid this we detect if we're running on a gevent
        # server and make it "forget" this was a chunked response.

        # Check if the client will understand what we will produce.
        if request.accept_mimetypes.quality("text/event-stream") <= 0:
            return NotAcceptable()(environ, start_response)

        # Initialize the response and get the write() callback. The
        # Cache-Control header is useless for conforming clients, as
        # the spec. already imposes that behavior on them, but we set
        # it explicitly to avoid unwanted caching by unaware proxies and
        # middlewares.
        write = start_response(
            text_to_native_str("200 OK"),
            [(text_to_native_str("Content-Type"),
              text_to_native_str("text/event-stream; charset=utf-8")),
             (text_to_native_str("Cache-Control"),
              text_to_native_str("no-cache"))])

        # This is a part of the fourth hack (see above).
        if hasattr(start_response, "__self__") and \
                isinstance(start_response.__self__, WSGIHandler):
            handler = start_response.__self__
        else:
            handler = None

        # One-shot means that we will terminate the request after the
        # first batch of sent events. We do this when we believe the
        # client doesn't support chunked transfer. As this encoding has
        # been introduced in HTTP/1.1 (as mandatory!) we restrict to
        # requests in that HTTP version. Also, if it comes from an
        # XMLHttpRequest it has been probably sent from a polyfill (not
        # from the native browser implementation) which will be able to
        # read the response body only when it has been fully received.
        if environ["SERVER_PROTOCOL"] != "HTTP/1.1" or request.is_xhr:
            one_shot = True
        else:
            one_shot = False

        # As for the Server-Sent Events [1] spec., this is the way for
        # the client to tell us the ID of the last event it received
        # and to ask us to send it the ones that happened since then.
        # [1] http://www.w3.org/TR/eventsource/
        # The spec. requires implementations to retry the connection
        # when it fails, adding the "Last-Event-ID" HTTP header. But in
        # case of an error they stop, and we have to (manually) delete
        # the EventSource and create a new one. To obtain that behavior
        # again we give the "last_event_id" as a URL query parameter
        # (with lower priority, to have the header override it).
        last_event_id = request.headers.get("Last-Event-ID")
        if last_event_id is None:
            last_event_id = request.args.get("last_event_id")

        # We subscribe to the publisher to receive events.
        sub = self._pub.get_subscriber(last_event_id)

        # Send some data down the pipe. We need that to make the user
        # agent announces the connection (see the spec.). Since it's a
        # comment it will be ignored.
        write(b":\n")

        # XXX We could make the client change its reconnection timeout
        # by sending a "retry:" line.

        # As a last line of defence from very bad-behaving servers we
        # don't want to the request to last longer than _GLOBAL_TIMEOUT
        # seconds (see above). We use "False" to just cause the control
        # exit the with block, instead of raising an exception.
        with Timeout(self._GLOBAL_TIMEOUT, False):
            # Repeat indefinitely.
            while True:
                # Proxies often have a read timeout. We try not to hit
                # it by not being idle for more than _PING_TIMEOUT
                # seconds, sending a ping (i.e. a comment) if there's
                # no real data.
                try:
                    with Timeout(self._PING_TIMEOUT):
                        data = b"".join(sub.get())
                        got_sth = True
                except Timeout:
                    data = b":\n"
                    got_sth = False

                try:
                    with Timeout(self._WRITE_TIMEOUT):
                        write(data)
                # The PEP doesn't tell what has to happen when a write
                # fails. We're conservative, and allow any unexpected
                # event to interrupt the request. We hope it's enough
                # to detect when the client disconnects. It is with
                # gevent, which raises a socket.error. The timeout (we
                # catch that too) is just an extra precaution.
                except Exception:
                    # This is part of the fourth hack (see above).
                    if handler is not None:
                        handler.response_use_chunked = False
                    break

                # If we decided this is one-shot, stop the long-poll as
                # soon as we sent the client some real data.
                if one_shot and got_sth:
                    break

        # An empty iterable tells the server not to send anything.
        return []
    def _applyToSolver(self, solver, matrix):
        if matrix.NumGlobalNonzeros() <= matrix.NumGlobalRows():
            return

        self.Prec = ML.MultiLevelPreconditioner(matrix, False)

        self.Prec.SetParameterList({text_to_native_str("output"): 0,
                                    text_to_native_str("max levels") : 2,
                                    text_to_native_str("prec type") : text_to_native_str("MGV"),
                                    text_to_native_str("increasing or decreasing") : text_to_native_str("increasing"),
                                    text_to_native_str("aggregation: type") : text_to_native_str("METIS"),
                                    text_to_native_str("aggregation: local aggregates") : 1,
                                    text_to_native_str("aggregation: damping factor") : 4. / 3.,
                                    text_to_native_str("eigen-analysis: type") : text_to_native_str("power-method"),
                                    text_to_native_str("eigen-analysis: iterations") : 20,
                                    text_to_native_str("smoother: sweeps") : 1,
                                    text_to_native_str("smoother: pre or post") : text_to_native_str("both"),
                                    text_to_native_str("smoother: type") : text_to_native_str("Aztec"),
                                    text_to_native_str("smoother: Aztec as solver") : False,
                                    text_to_native_str("coarse: type") : text_to_native_str("Amesos-KLU"),
                                    text_to_native_str("coarse: max size") : 128
                                    })

        self.Prec.ComputePreconditioner()

        solver.SetPrecOperator(self.Prec)
Example #32
0
from future.utils import text_to_native_str
from future.utils import string_types

__all__ = [text_to_native_str("nativize_all")]

def nativize_all(t):
    def _nativize(s):
        if isinstance(s, string_types):
            s = text_to_native_str(s)
        return s
        
    return tuple([_nativize(s) for s in t])
    
Example #33
0
"""Uses basic authentication (Github username + password) to retrieve issues
from a repository that username has access to. Supports Github API v3.
Adapted from: https://gist.github.com/patrickfuller/e2ea8a94badc5b6967ef3ca0a9452a43
"""
from __future__ import print_function
from __future__ import unicode_literals

import os
import textwrap
from distutils.core import Command
from future.utils import text_to_native_str

from ._nativize import nativize_all

__all__ = [text_to_native_str("changelog")]

class changelog(Command):
    description = "Generate ReST change log from github issues and pull requests"

    # List of option tuples: long name, short name (None if no short
    # name), and help string.
    user_options = [
        ('repository=', None,
         "GitHub repository to obtain issues from (default: 'usnistgov/fipy')"),
        ('tokenvar=', None,
         "Environment variable holding GitHub personal access token "
         "with 'repo' scope (default: 'FIPY_GITHUB_TOKEN')"),
        ('username=', None,
         "GitHub username to authenticate as (default: None). "
         "Supersedes `tokenvar`. "
         "Note: GitHub limits the rate of unauthenticated queries: "
Example #34
0
 def _nativize(s):
     if isinstance(s, string_types):
         s = text_to_native_str(s)
     return s
Example #35
0
    def notify(
        self,
        to,
        message,
        title,
        smtp_host,
        smtp_port,
        smtp_username=None,
        smtp_password=None,
        smtp_tls=None,
        smtp_ssl=None,
        html=None,
        **kwargs
    ):
        """
        Send an email notification

        :param str to: email `to` address
        :param str message: message body
        :param str title: message subject
        :param str smtp_host: smtp_host to use
        :param int smtp_port: port to use
        :param str smtp_username: smtp username if authentication is required
        :param str smtp_password: smtp password if authentication is required
        :param bool smtp_tls: enable tls
        :param bool smtp_ssl: enable ssl
        :param bool html: set content type to `html`
        :param kwargs: contains the `from` attribute since that is a reserved keyword
        """

        if not isinstance(to, list):
            to = [to]

        email = MIMEMultipart("alternative")
        email["To"] = ",".join(to)
        email["From"] = kwargs["from"]
        email["Subject"] = title
        email["Date"] = formatdate(localtime=True)
        content_type = "html" if html else "plain"
        email.attach(MIMEText(message.encode("utf-8"), content_type, _charset="utf-8"))

        try:
            log.debug("sending email notification to %s:%s", smtp_host, smtp_port)
            mailServer = smtplib.SMTP_SSL if smtp_ssl else smtplib.SMTP
            mailServer = mailServer(smtp_host, smtp_port)
            if smtp_tls:
                mailServer.ehlo()
                mailServer.starttls()
                mailServer.ehlo()
        except (socket.error, OSError) as e:
            raise PluginWarning(str(e))

        try:
            if smtp_username:
                log.debug("logging in to smtp server using username: %s", smtp_username)
                mailServer.login(text_to_native_str(smtp_username), text_to_native_str(smtp_password))
            mailServer.sendmail(email["From"], to, email.as_string())
        except IOError as e:
            raise PluginWarning(str(e))

        mailServer.quit()
Example #36
0
from __future__ import unicode_literals
__docformat__ = 'restructuredtext'

import optparse
import sys, os

__all__ = ["parse"]
from future.utils import text_to_native_str
__all__ = [text_to_native_str(n) for n in __all__]

def parse(larg, action = None, type = None, default = None):
    """
    This is a wrapper function for the python `optparse` module.
    Unfortunately `optparse` does not allow command line arguments to
    be ignored. See the documentation for `optparse` for more
    details. Returns the argument value.

    :Parameters:
      - `larg`: The argument to be parsed.
      - `action`: `store` or `store_true` are possibilities
      - `type`: Type of the argument. `int` or `float` are possibilities.
      - `default`: Default value.

    """
    sarg = None
    tmpparser = optparse.OptionParser(option_list = [
        optparse.make_option(sarg, larg, action = action, type = type, dest = 'dest', default = default)],
                                      conflict_handler = 'resolve')

##    optparse.make_option('-e', '--numberOfElements', action = 'store', type = 'int', dest = 'Nele', default = numberOfElements),
##    optparse.make_option('-n', '--numberOfSteps', action = 'store', type = 'int', dest = 'steps', default = numberOfSteps),
Example #37
0
from __future__ import print_function
from __future__ import unicode_literals
from distutils.core import Command
import os
from future.utils import text_to_native_str

from ._nativize import nativize_all

__all__ = [text_to_native_str("upload_products")]

class upload_products(Command):
    description = "upload FiPy compressed archives to website(s)"
    
    user_options = [('pdf', None, "upload the PDF variant of the documentation"),
                    ('html', None, "upload the HTML variant of the documentation"),
                    ('tarball', None, "upload the .tar.gz source distribution"),
                    ('winzip', None, "upload the .win32.zip distribution"),
                   ]
    user_options = [nativize_all(u) for u in user_options]

    def initialize_options (self):
        self.pdf = 0
        self.html = 0
        self.tarball = 0
        self.winzip = 0

    def finalize_options (self):
        pass

    def run(self):
        if self.pdf: