Example #1
0
 def test_same_as_repr(self) -> None:
     # Simple objects, small containers and classes that overwrite __repr__
     # For those the result should be the same as repr().
     # Ahem.  The docs don't say anything about that -- this appears to
     # be testing an implementation quirk.  Starting in Python 2.5, it's
     # not true for dicts:  pprint always sorts dicts by key now; before,
     # it sorted a dict display if and only if the display required
     # multiple lines.  For that reason, dicts with more than one element
     # aren't tested here.
     for simple in (
             0,
             0,
             complex(0.0),
             0.0,
             "",
             b"",
         (),
             tuple2(),
             tuple3(),
         [],
             list2(),
             list3(),
         {},
             dict2(),
             dict3(),
             self.assertTrue,
             pprint,
             -6,
             -6,
             complex(-6., -6.),
             -1.5,
             "x",
             b"x",
         (3, ),
         [3],
         {
             3: 6
         },
         (1, 2),
         [3, 4],
         {
             5: 6
         },
             tuple2((1, 2)),
             tuple3((1, 2)),
             tuple3(range(100)),
         [3, 4],
             list2(Any([3, 4])),
             list3(Any([3, 4])),
             list3(Any(range(100))),
             dict2(Any({5: 6})),
             dict3(Any({5: 6})),  # JLe: work around mypy issue #233
             range(10, -11, -1)):
         native = repr(simple)
         for function in "pformat", "saferepr":
             f = getattr(pprint, function)
             got = f(simple)
             self.assertEqual(
                 native, got, "expected %s got %s from pprint.%s" %
                 (native, got, function))
Example #2
0
 def wrap(*args: Any, **kwargs: Any) -> Any:
     try:
         builtin_rename = shutil.rename
         shutil.rename = Any(_fake_rename)
         return func(*args, **kwargs)
     finally:
         shutil.rename = Any(builtin_rename)
Example #3
0
def main() -> None:
    """Small main program"""
    import sys, getopt
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'deut')
    except getopt.error as msg:
        sys.stdout = sys.stderr
        print(msg)
        print("""usage: %s [-d|-e|-u|-t] [file|-]
        -d, -u: decode
        -e: encode (default)
        -t: encode and decode string 'Aladdin:open sesame'""" % sys.argv[0])
        sys.exit(2)
    func = encode
    for o, a in opts:
        if o == '-e': func = encode
        if o == '-d': func = decode
        if o == '-u': func = decode
        if o == '-t':
            test()
            return
    if args and args[0] != '-':
        with open(args[0], 'rb') as f:
            func(f, Any(sys.stdout).buffer)
    else:
        func(Any(sys.stdin).buffer, Any(sys.stdout).buffer)
Example #4
0
def relpath(path: AnyStr, start: AnyStr = None) -> AnyStr:
    """Return a relative version of a path"""

    if not path:
        raise ValueError("no path specified")

    if isinstance(path, bytes):
        curdir = b'.'
        sep = b'/'
        pardir = b'..'
    else:
        curdir = '.'
        sep = '/'
        pardir = '..'

    if start is None:
        start = curdir

    start_list = [x for x in abspath(start).split(sep) if x]
    path_list = [x for x in abspath(path).split(sep) if x]

    # Work out how much of the filepath is shared by start and path.
    i = len(commonprefix(Any([start_list, path_list])))

    rel_list = [pardir] * (len(start_list) - i) + path_list[i:]
    if not rel_list:
        return curdir
    return join(*rel_list)
Example #5
0
 def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
     self.line = o.line
     if (isinstance(o.rvalue, nodes.CallExpr) and isinstance(
             cast(nodes.CallExpr, o.rvalue).analyzed, nodes.TypeVarExpr)):
         # Type variable definition -- not a real assignment.
         return
     if o.type:
         self.type(o.type)
     elif self.inferred:
         for lvalue in o.lvalues:
             lvalue_ref = lvalue
             if isinstance(lvalue_ref, nodes.ParenExpr):
                 lvalue = lvalue_ref.expr
             if isinstance(lvalue, nodes.TupleExpr):
                 items = lvalue.items
             elif isinstance(lvalue, nodes.ListExpr):
                 items = lvalue.items
             else:
                 items = [lvalue]
             for item in items:
                 if hasattr(item, 'is_def') and Any(item).is_def:
                     t = self.typemap.get(item)
                     if t:
                         self.type(t)
                     else:
                         self.log('  !! No inferred type on line %d' %
                                  self.line)
                         self.record_line(self.line, TYPE_ANY)
     super().visit_assignment_stmt(o)
Example #6
0
    def do_create(self, pre: str = "", suf: str = "") -> mktemped:
        try:
            file = Any(self).mktemped(self.dir, pre, suf)  # see #259
        except:
            self.failOnException("mktemp")

        self.nameCheck(file.name, self.dir, pre, suf)
        return file
Example #7
0
    def transform_class_def(self, tdef: ClassDef) -> List[Node]:        
        """Transform a type definition.

        The result may be one or two definitions.  The first is the
        transformation of the original ClassDef. The second is a
        wrapper type, which is generated for generic types only.
        """
        defs = [] # type: List[Node]
        
        if tdef.info.type_vars:
            # This is a generic type. Insert type variable slots in
            # the class definition for new type variables, i.e. type
            # variables not mapped to superclass type variables.
            defs.extend(self.make_tvar_representation(tdef.info))
        
        # Iterate over definitions and transform each of them.
        vars = set() # type: Set[Var]
        for d in tdef.defs.body:
            if isinstance(d, FuncDef):
                # Implicit cast from FuncDef[] to Node[] is safe below.
                defs.extend(Any(self.func_tf.transform_method(d)))
            elif isinstance(d, VarDef):
                defs.extend(self.transform_var_def(d))
                for n in d.items:
                    vars.add(n)
            elif isinstance(d, AssignmentStmt):
                self.transform_assignment(d)
                defs.append(d)

        # Add accessors for implicitly defined attributes.
        for node in tdef.info.names.values():
            if isinstance(node.node, Var):
                v = cast(Var, node.node)
                if v.info == tdef.info and v not in vars:
                    defs.extend(self.make_accessors(v))
        
        # For generic classes, add an implicit __init__ wrapper.
        defs.extend(self.make_init_wrapper(tdef))
        
        if tdef.is_generic() or (tdef.info.bases and
                                 tdef.info.mro[1].is_generic()):
            self.make_instance_tvar_initializer(
                cast(FuncDef, tdef.info.get_method('__init__')))

        if not defs:
            defs.append(PassStmt())

        if tdef.is_generic():
            gen_wrapper = self.generic_class_wrapper(tdef)

        tdef.defs = Block(defs)

        dyn_wrapper = self.make_type_object_wrapper(tdef)
        
        if not tdef.is_generic():
            return [tdef, dyn_wrapper]
        else:
            return [tdef, dyn_wrapper, gen_wrapper]
Example #8
0
 def do_convert(input):
     if isinstance(input, str):
         if input == "*":
             return Any()
         else:
             return Eq(input)
     elif isinstance(input, Rule):
         return input
     return None
Example #9
0
 def test_setstate_middle_arg(self) -> None:
     # Wrong type, s/b tuple
     self.assertRaises(TypeError, self.gen.setstate, (2, None, None))
     # Wrong length, s/b 625
     self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None))
     # Wrong type, s/b tuple of 625 ints
     self.assertRaises(TypeError, self.gen.setstate, (2, tuple(['a',]*625), None))
     # Last element s/b an int also
     self.assertRaises(TypeError, self.gen.setstate, (2, Any((0,))*624+('a',), None))
Example #10
0
 def match(
         self, val: Any(), get: Callable[[str], Any],
         exe: Callable[[dict[str, Any], U],
                       bool]) -> tuple[T, dict[str, Any]]:
     for p, l, g in self.cases:
         res = p.match(val, get)
         if res is not None:
             if g is None or exe(res, g):
                 return l, res
     return self.otherwise, {}
def any_instance_of(cls: Type[RequiredInstanceT]) -> RequiredInstanceT:
    class Any:
        def __eq__(self, other):
            return isinstance(other, cls)

        def __repr__(self):
            return f"[Any {cls.__name__}]"

        def __str__(self):
            return self.__repr__()

    return cast(RequiredInstanceT, Any())
Example #12
0
    def do_create(self,
                  dir: str = None,
                  pre: str = "",
                  suf: str = "",
                  bin: int = 1) -> mkstemped:
        if dir is None:
            dir = tempfile.gettempdir()
        try:
            file = Any(self).mkstemped(dir, pre, suf, bin)  # see #259
        except:
            self.failOnException("_mkstemp_inner")

        self.nameCheck(file.name, dir, pre, suf)
        return file
Example #13
0
    def test_samestat_on_links(self) -> None:
        test_fn1 = support.TESTFN + "1"
        test_fn2 = support.TESTFN + "2"
        self._create_file(test_fn1)
        test_fns = [test_fn1, test_fn2]
        Any(os.symlink)(*test_fns)
        stats = map(os.stat, test_fns)
        self.assertTrue(posixpath.samestat(*stats))
        os.remove(test_fn2)

        self._create_file(test_fn2)
        stats = map(os.stat, test_fns)
        self.assertFalse(posixpath.samestat(*stats))

        self.assertRaises(TypeError, posixpath.samestat)
Example #14
0
from typing import Any, Dict, List, Optional, Union
from dataclasses import dataclass, field

# noinspection PyUnreachableCode
if False:
    # noinspection PyUnresolvedReferences
    from _stubs import *
    from _stubs.ArcBallExt import ArcBallExt
    ext.Inspector = Inspector()
    ipar = Any()
    ipar.inspectorCore = Any()

# the drop function takes the following arguments and according to the dropped type
# calls a function in the /sys attached DragDrop extension
#
# dropName: dropped node name or filename
# [x/y]Pos: position in network pane
# index: index of dragged item
# totalDragged: total amount of dragged items
# dropExt: operator type or file extension of dragged item
# baseName: dragged node parent network or parent directory of dragged file
# destPath: dropped network


def onDrop(dropName, xPos, yPos, index, totalDragged, dropExt, baseName,
           destPath):
    # print(parent().path, 'DROP ' + repr(locals()))
    parentOp = op(baseName)
    o = parentOp.op(dropName)
    ext.Inspector.AttachTo(o)
class ShowTcpDetailPcbAllSchema(MetaParser):
    """Schema for show tcp detail pcb all

    """
    schema = {
        'pcb_address': {
            Any(): {
                'connection_state': str,
                'io_status': int,
                'socket_status': int,
                'established_datetime': str,
                'tcp_connection_data': {
                    'pcb': str,
                    'so': str,
                    'tcpcb': str,
                    'vrfid': str,
                    'pak_prio': str,
                    'tos': int,
                    'ttl': int,
                    'hash_index': int,
                    'local_host': str,
                    'local_port': int,
                    'local_app_pid': int,
                    'foreign_host': str,
                    'foreign_port': int,
                    'local_app': {
                        'pid': int,
                        'instance': int,
                        'spl_id': int
                    }
                },
                'current_queue': {
                    'send': {
                        'send_size_bytes': int,
                        'max_send_size_bytes': int
                    },
                    'receive': {
                        Optional('receive_size_bytes'): int,
                        Optional('max_receive_size_bytes'): int,
                        Optional('mis_ordered_bytes'): int,
                        Optional('receive_size_packages'): int,
                        Optional('max_receive_size_packages'): int
                    }
                },
                'event_timers': {
                    Any(): {
                        'starts': int,
                        'wakeups': int,
                        'next_msec': int
                    }
                },
                'sequences': {
                    'iss': int,
                    'snduna': int,
                    'sndnxt': int,
                    'sndmax': int,
                    'sndwnd': int,
                    'sndcwnd': int,
                    'irs': int,
                    'rcvnxt': int,
                    'rcvwnd': int,
                    'rcvadv': int
                },
                'round_trip_delay': {
                    'srtt_ms': int,
                    'rtto_ms': int,
                    'rtv_ms': int,
                    'krtt_ms': int,
                    'min_rtt_ms': int,
                    'max_rtt_ms': int
                },
                'times': {
                    'ack_hold_ms': int,
                    'keepalive_sec': int,
                    'syn_waittime_sec': int,
                    'giveup_ms': int,
                    'retransmission_retries': int,
                    'retransmit_forever': str,
                    'connect_retries_remaining': int,
                    'connect_retry_interval_sec': int
                },
                'flags': {
                    'state': str,
                    'feature': str,
                    'request': str
                },
                'datagrams': {
                    'mss_bytes': int,
                    'peer_mss_bytes': int,
                    'min_mss_bytes': int,
                    'max_mss_bytes': int
                },
                'window_scales': {
                    'rcv': int,
                    'snd': int,
                    'request_rcv': int,
                    'request_snd': int
                },
                'timestamp_option': {
                    'recent': int,
                    'recent_age': int,
                    'last_ack_sent': int
                },
                'sack_blocks': {
                    'start': str,
                    'end': str
                },
                'sack_holes': {
                    'start': str,
                    'end': str,
                    'dups': str,
                    'rxmit': str
                },
                'socket_options': str,
                'socket_states': str,
                'socket_receive_buffer_states': str,
                'socket_send_buffer_states': str,
                'socket_receive_buffer': {
                    'watermarks': {
                        'low': int,
                        'high': int
                    }
                },
                'socket_send_buffer': {
                    'watermarks': {
                        'low': int,
                        'high': int
                    },
                    Optional('notify_threshold'): int
                },
                'socket_misc_info': {
                    'rcv_data_size': int,
                    'so_qlen': int,
                    'so_q0len': int,
                    'so_qlimit': int,
                    'so_error': int,
                    'so_auto_rearm': int
                },
                'pdu_information': {
                    'pdu_buffer': int,
                    'fib_lookup_cache': {
                        'ifh': str,
                        'pd_ctx': {
                            'size': int,
                            'data': str
                        }
                    },
                    Optional('num_label'): int,
                    Optional('label_stack'): int,
                    Optional('num_peers_with_authentication'): int
                }
            }

        }
    }
Example #16
0
 def test_Any(self):
     o = object()
     self.assertIs(Any(o), o)
     s = 'x'
     self.assertIs(Any(s), s)
Example #17
0
 def assertError(self, *args: Any, **kwargs: Any) -> None:
     # JLe: work around mypy bug #229
     Any(self.assertRaises)(getopt.GetoptError, *args, **kwargs)
Example #18
0
class TestShutil(unittest.TestCase):
    def setUp(self) -> None:
        super().setUp()
        self.tempdirs = List[str]()

    def tearDown(self) -> None:
        super().tearDown()
        while self.tempdirs:
            d = self.tempdirs.pop()
            shutil.rmtree(d, os.name in ('nt', 'cygwin'))

    @overload
    def write_file(self, path: str, content: str = 'xxx') -> None:
        """Writes a file in the given path.


        path can be a string or a sequence.
        """
        f = open(path, 'w')
        try:
            f.write(content)
        finally:
            f.close()

    @overload
    def write_file(self, path: Sequence[str], content: str = 'xxx') -> None:
        # JLe: work around mypy issue #238
        self.write_file(os.path.join(*list(path)), content)

    def mkdtemp(self) -> str:
        """Create a temporary directory that will be cleaned up.

        Returns the path of the directory.
        """
        d = tempfile.mkdtemp()
        self.tempdirs.append(d)
        return d

    def test_rmtree_errors(self) -> None:
        # filename is guaranteed not to exist
        filename = tempfile.mktemp()
        self.assertRaises(OSError, shutil.rmtree, filename)

    # See bug #1071513 for why we don't run this on cygwin
    # and bug #1076467 for why we don't run this as root.
    if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
            and not (hasattr(os, 'geteuid') and (Any(os)).geteuid() == 0)):

        def test_on_error(self) -> None:
            self.errorState = 0
            os.mkdir(TESTFN)
            self.childpath = os.path.join(TESTFN, 'a')
            f = open(self.childpath, 'w')
            f.close()
            old_dir_mode = os.stat(TESTFN).st_mode
            old_child_mode = os.stat(self.childpath).st_mode
            # Make unwritable.
            os.chmod(self.childpath, stat.S_IREAD)
            os.chmod(TESTFN, stat.S_IREAD)

            shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
            # Test whether onerror has actually been called.
            self.assertEqual(
                self.errorState, 2,
                "Expected call to onerror function did not happen.")

            # Make writable again.
            os.chmod(TESTFN, old_dir_mode)
            os.chmod(self.childpath, old_child_mode)

            # Clean up.
            shutil.rmtree(TESTFN)

    def check_args_to_onerror(self, func: Any, arg: str, exc: Any) -> None:
        # test_rmtree_errors deliberately runs rmtree
        # on a directory that is chmod 400, which will fail.
        # This function is run when shutil.rmtree fails.
        # 99.9% of the time it initially fails to remove
        # a file in the directory, so the first time through
        # func is os.remove.
        # However, some Linux machines running ZFS on
        # FUSE experienced a failure earlier in the process
        # at os.listdir.  The first failure may legally
        # be either.
        if self.errorState == 0:
            if func is os.remove:
                self.assertEqual(arg, self.childpath)
            else:
                self.assertIs(func, os.listdir,
                              "func must be either os.remove or os.listdir")
                self.assertEqual(arg, TESTFN)
            self.assertTrue(issubclass(exc[0], OSError))
            self.errorState = 1
        else:
            self.assertEqual(func, os.rmdir)
            self.assertEqual(arg, TESTFN)
            self.assertTrue(issubclass(exc[0], OSError))
            self.errorState = 2

    def test_rmtree_dont_delete_file(self) -> None:
        # When called on a file instead of a directory, don't delete it.
        handle, path = tempfile.mkstemp()
        os.fdopen(handle).close()
        self.assertRaises(OSError, shutil.rmtree, path)
        os.remove(path)

    def _write_data(self, path: str, data: str) -> None:
        f = open(path, "w")
        f.write(data)
        f.close()

    def test_copytree_simple(self) -> None:
        def read_data(path: str) -> str:
            f = open(path)
            data = f.read()
            f.close()
            return data

        src_dir = tempfile.mkdtemp()
        dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
        os.mkdir(os.path.join(src_dir, 'test_dir'))
        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')

        try:
            shutil.copytree(src_dir, dst_dir)
            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
            self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
            self.assertTrue(
                os.path.isfile(os.path.join(dst_dir, 'test_dir', 'test.txt')))
            actual = read_data(os.path.join(dst_dir, 'test.txt'))
            self.assertEqual(actual, '123')
            actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
            self.assertEqual(actual, '456')
        finally:
            for path in (
                    os.path.join(src_dir, 'test.txt'),
                    os.path.join(dst_dir, 'test.txt'),
                    os.path.join(src_dir, 'test_dir', 'test.txt'),
                    os.path.join(dst_dir, 'test_dir', 'test.txt'),
            ):
                if os.path.exists(path):
                    os.remove(path)
            for path in (src_dir, os.path.dirname(dst_dir)):
                if os.path.exists(path):
                    shutil.rmtree(path)

    def test_copytree_with_exclude(self) -> None:
        def read_data(path: str) -> str:
            f = open(path)
            data = f.read()
            f.close()
            return data

        # creating data
        join = os.path.join
        exists = os.path.exists
        src_dir = tempfile.mkdtemp()
        try:
            dst_dir = join(tempfile.mkdtemp(), 'destination')
            self._write_data(join(src_dir, 'test.txt'), '123')
            self._write_data(join(src_dir, 'test.tmp'), '123')
            os.mkdir(join(src_dir, 'test_dir'))
            self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
            os.mkdir(join(src_dir, 'test_dir2'))
            self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
            os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
            os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
            self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'),
                             '456')
            self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'),
                             '456')

            # testing glob-like patterns
            try:
                patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
                shutil.copytree(src_dir, dst_dir, ignore=patterns)
                # checking the result: some elements should not be copied
                self.assertTrue(exists(join(dst_dir, 'test.txt')))
                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
                self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)
            try:
                patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
                shutil.copytree(src_dir, dst_dir, ignore=patterns)
                # checking the result: some elements should not be copied
                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir2')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir')))
            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)

            # testing callable-style
            try:

                def _filter(src: str, names: Sequence[str]) -> List[str]:
                    res = List[str]()
                    for name in names:
                        path = os.path.join(src, name)

                        if (os.path.isdir(path)
                                and path.split()[-1] == 'subdir'):
                            res.append(name)
                        elif os.path.splitext(path)[1] in ('.py'):
                            res.append(name)
                    return res

                shutil.copytree(src_dir, dst_dir, ignore=_filter)

                # checking the result: some elements should not be copied
                self.assertTrue(not exists(
                    join(dst_dir, 'test_dir2', 'subdir2', 'test.py')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir')))

            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)
        finally:
            shutil.rmtree(src_dir)
            shutil.rmtree(os.path.dirname(dst_dir))

    @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link')
    def test_dont_copy_file_onto_link_to_itself(self) -> None:
        # Temporarily disable test on Windows.
        if os.name == 'nt':
            return
        # bug 851123.
        os.mkdir(TESTFN)
        src = os.path.join(TESTFN, 'cheese')
        dst = os.path.join(TESTFN, 'shop')
        try:
            with open(src, 'w') as f:
                f.write('cheddar')
            os.link(src, dst)
            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
            with open(src, 'r') as f:
                self.assertEqual(f.read(), 'cheddar')
            os.remove(dst)
        finally:
            shutil.rmtree(TESTFN, ignore_errors=True)

    @support.skip_unless_symlink
    def test_dont_copy_file_onto_symlink_to_itself(self) -> None:
        # bug 851123.
        os.mkdir(TESTFN)
        src = os.path.join(TESTFN, 'cheese')
        dst = os.path.join(TESTFN, 'shop')
        try:
            with open(src, 'w') as f:
                f.write('cheddar')
            # Using `src` here would mean we end up with a symlink pointing
            # to TESTFN/TESTFN/cheese, while it should point at
            # TESTFN/cheese.
            os.symlink('cheese', dst)
            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
            with open(src, 'r') as f:
                self.assertEqual(f.read(), 'cheddar')
            os.remove(dst)
        finally:
            shutil.rmtree(TESTFN, ignore_errors=True)

    @support.skip_unless_symlink
    def test_rmtree_on_symlink(self) -> None:
        # bug 1669.
        os.mkdir(TESTFN)
        try:
            src = os.path.join(TESTFN, 'cheese')
            dst = os.path.join(TESTFN, 'shop')
            os.mkdir(src)
            os.symlink(src, dst)
            self.assertRaises(OSError, shutil.rmtree, dst)
        finally:
            shutil.rmtree(TESTFN, ignore_errors=True)

    if hasattr(os, "mkfifo"):
        # Issue #3002: copyfile and copytree block indefinitely on named pipes
        def test_copyfile_named_pipe(self) -> None:
            os.mkfifo(TESTFN)
            try:
                self.assertRaises(shutil.SpecialFileError, shutil.copyfile,
                                  TESTFN, TESTFN2)
                self.assertRaises(shutil.SpecialFileError, shutil.copyfile,
                                  __file__, TESTFN)
            finally:
                os.remove(TESTFN)

        @support.skip_unless_symlink
        def test_copytree_named_pipe(self) -> None:
            os.mkdir(TESTFN)
            try:
                subdir = os.path.join(TESTFN, "subdir")
                os.mkdir(subdir)
                pipe = os.path.join(subdir, "mypipe")
                os.mkfifo(pipe)
                try:
                    shutil.copytree(TESTFN, TESTFN2)
                except shutil.Error as e:
                    errors = e.args[0]
                    self.assertEqual(len(errors), 1)
                    src, dst, error_msg = errors[0]
                    self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
                else:
                    self.fail("shutil.Error should have been raised")
            finally:
                shutil.rmtree(TESTFN, ignore_errors=True)
                shutil.rmtree(TESTFN2, ignore_errors=True)

    def test_copytree_special_func(self) -> None:

        src_dir = self.mkdtemp()
        dst_dir = os.path.join(self.mkdtemp(), 'destination')
        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
        os.mkdir(os.path.join(src_dir, 'test_dir'))
        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')

        copied = List[Tuple[str, str]]()

        def _copy(src: str, dst: str) -> None:
            copied.append((src, dst))

        shutil.copytree(src_dir, dst_dir, copy_function=_copy)
        self.assertEqual(len(copied), 2)

    @support.skip_unless_symlink
    def test_copytree_dangling_symlinks(self) -> None:

        # a dangling symlink raises an error at the end
        src_dir = self.mkdtemp()
        dst_dir = os.path.join(self.mkdtemp(), 'destination')
        os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
        os.mkdir(os.path.join(src_dir, 'test_dir'))
        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
        self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)

        # a dangling symlink is ignored with the proper flag
        dst_dir = os.path.join(self.mkdtemp(), 'destination2')
        shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True)
        self.assertNotIn('test.txt', os.listdir(dst_dir))

        # a dangling symlink is copied if symlinks=True
        dst_dir = os.path.join(self.mkdtemp(), 'destination3')
        shutil.copytree(src_dir, dst_dir, symlinks=True)
        self.assertIn('test.txt', os.listdir(dst_dir))

    def _copy_file(self, method: Function[[str, str],
                                          None]) -> Tuple[str, str]:
        fname = 'test.txt'
        tmpdir = self.mkdtemp()
        self.write_file([tmpdir, fname])
        file1 = os.path.join(tmpdir, fname)
        tmpdir2 = self.mkdtemp()
        method(file1, tmpdir2)
        file2 = os.path.join(tmpdir2, fname)
        return (file1, file2)

    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
    def test_copy(self) -> None:
        # Ensure that the copied file exists and has the same mode bits.
        file1, file2 = self._copy_file(shutil.copy)
        self.assertTrue(os.path.exists(file2))
        self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode)

    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
    @unittest.skipUnless(hasattr(os, 'utime'), 'requires os.utime')
    def test_copy2(self) -> None:
        # Ensure that the copied file exists and has the same mode and
        # modification time bits.
        file1, file2 = self._copy_file(shutil.copy2)
        self.assertTrue(os.path.exists(file2))
        file1_stat = os.stat(file1)
        file2_stat = os.stat(file2)
        self.assertEqual(file1_stat.st_mode, file2_stat.st_mode)
        for attr in 'st_atime', 'st_mtime':
            # The modification times may be truncated in the new file.
            self.assertLessEqual(getattr(file1_stat, attr),
                                 getattr(file2_stat, attr) + 1)
        if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'):
            self.assertEqual(getattr(file1_stat, 'st_flags'),
                             getattr(file2_stat, 'st_flags'))

    @unittest.skipUnless(zlib, "requires zlib")
    def test_make_tarball(self) -> None:
        # creating something to tar
        tmpdir = self.mkdtemp()
        self.write_file([tmpdir, 'file1'], 'xxx')
        self.write_file([tmpdir, 'file2'], 'xxx')
        os.mkdir(os.path.join(tmpdir, 'sub'))
        self.write_file([tmpdir, 'sub', 'file3'], 'xxx')

        tmpdir2 = self.mkdtemp()
        # force shutil to create the directory
        os.rmdir(tmpdir2)
        unittest.skipUnless(
            splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
            "source and target should be on same drive")

        base_name = os.path.join(tmpdir2, 'archive')

        # working with relative paths to avoid tar warnings
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(splitdrive(base_name)[1], '.')
        finally:
            os.chdir(old_dir)

        # check if the compressed tarball was created
        tarball = base_name + '.tar.gz'
        self.assertTrue(os.path.exists(tarball))

        # trying an uncompressed one
        base_name = os.path.join(tmpdir2, 'archive')
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(splitdrive(base_name)[1], '.', compress=None)
        finally:
            os.chdir(old_dir)
        tarball = base_name + '.tar'
        self.assertTrue(os.path.exists(tarball))

    def _tarinfo(self, path: str) -> tuple:
        tar = tarfile.open(path)
        try:
            names = tar.getnames()
            names.sort()
            return tuple(names)
        finally:
            tar.close()

    def _create_files(self) -> Tuple[str, str, str]:
        # creating something to tar
        tmpdir = self.mkdtemp()
        dist = os.path.join(tmpdir, 'dist')
        os.mkdir(dist)
        self.write_file([dist, 'file1'], 'xxx')
        self.write_file([dist, 'file2'], 'xxx')
        os.mkdir(os.path.join(dist, 'sub'))
        self.write_file([dist, 'sub', 'file3'], 'xxx')
        os.mkdir(os.path.join(dist, 'sub2'))
        tmpdir2 = self.mkdtemp()
        base_name = os.path.join(tmpdir2, 'archive')
        return tmpdir, tmpdir2, base_name

    @unittest.skipUnless(zlib, "Requires zlib")
    @unittest.skipUnless(
        find_executable('tar') and find_executable('gzip'),
        'Need the tar command to run')
    def test_tarfile_vs_tar(self) -> None:
        tmpdir, tmpdir2, base_name = self._create_files()
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(base_name, 'dist')
        finally:
            os.chdir(old_dir)

        # check if the compressed tarball was created
        tarball = base_name + '.tar.gz'
        self.assertTrue(os.path.exists(tarball))

        # now create another tarball using `tar`
        tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
        tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
        gzip_cmd = ['gzip', '-f9', 'archive2.tar']
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            with captured_stdout() as s:
                spawn(tar_cmd)
                spawn(gzip_cmd)
        finally:
            os.chdir(old_dir)

        self.assertTrue(os.path.exists(tarball2))
        # let's compare both tarballs
        self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))

        # trying an uncompressed one
        base_name = os.path.join(tmpdir2, 'archive')
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(base_name, 'dist', compress=None)
        finally:
            os.chdir(old_dir)
        tarball = base_name + '.tar'
        self.assertTrue(os.path.exists(tarball))

        # now for a dry_run
        base_name = os.path.join(tmpdir2, 'archive')
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(base_name, 'dist', compress=None, dry_run=True)
        finally:
            os.chdir(old_dir)
        tarball = base_name + '.tar'
        self.assertTrue(os.path.exists(tarball))

    @unittest.skipUnless(zlib, "Requires zlib")
    @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
    def test_make_zipfile(self) -> None:
        # creating something to tar
        tmpdir = self.mkdtemp()
        self.write_file([tmpdir, 'file1'], 'xxx')
        self.write_file([tmpdir, 'file2'], 'xxx')

        tmpdir2 = self.mkdtemp()
        # force shutil to create the directory
        os.rmdir(tmpdir2)
        base_name = os.path.join(tmpdir2, 'archive')
        _make_zipfile(base_name, tmpdir)

        # check if the compressed tarball was created
        tarball = base_name + '.zip'
        self.assertTrue(os.path.exists(tarball))

    def test_make_archive(self) -> None:
        tmpdir = self.mkdtemp()
        base_name = os.path.join(tmpdir, 'archive')
        self.assertRaises(ValueError, make_archive, base_name, 'xxx')

    @unittest.skipUnless(zlib, "Requires zlib")
    def test_make_archive_owner_group(self) -> None:
        # testing make_archive with owner and group, with various combinations
        # this works even if there's not gid/uid support
        if UID_GID_SUPPORT:
            group = grp.getgrgid(0)[0]
            owner = pwd.getpwuid(0)[0]
        else:
            group = owner = 'root'

        base_dir, root_dir, base_name = self._create_files()
        base_name = os.path.join(self.mkdtemp(), 'archive')
        res = make_archive(base_name,
                           'zip',
                           root_dir,
                           base_dir,
                           owner=owner,
                           group=group)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name, 'zip', root_dir, base_dir)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name,
                           'tar',
                           root_dir,
                           base_dir,
                           owner=owner,
                           group=group)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name,
                           'tar',
                           root_dir,
                           base_dir,
                           owner='kjhkjhkjg',
                           group='oihohoh')
        self.assertTrue(os.path.exists(res))

    @unittest.skipUnless(zlib, "Requires zlib")
    @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
    def test_tarfile_root_owner(self) -> None:
        tmpdir, tmpdir2, base_name = self._create_files()
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        group = grp.getgrgid(0)[0]
        owner = pwd.getpwuid(0)[0]
        try:
            archive_name = _make_tarball(base_name,
                                         'dist',
                                         compress=None,
                                         owner=owner,
                                         group=group)
        finally:
            os.chdir(old_dir)

        # check if the compressed tarball was created
        self.assertTrue(os.path.exists(archive_name))

        # now checks the rights
        archive = tarfile.open(archive_name)
        try:
            for member in archive.getmembers():
                self.assertEqual(member.uid, 0)
                self.assertEqual(member.gid, 0)
        finally:
            archive.close()

    def test_make_archive_cwd(self) -> None:
        current_dir = os.getcwd()

        def _breaks(*args: Any, **kw: Any) -> Any:
            raise RuntimeError()

        register_archive_format('xxx', _breaks, [], 'xxx file')
        try:
            try:
                make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
            except Exception:
                pass
            self.assertEqual(os.getcwd(), current_dir)
        finally:
            unregister_archive_format('xxx')

    def test_register_archive_format(self) -> None:

        self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
        self.assertRaises(TypeError, register_archive_format, 'xxx',
                          lambda: 1 / 0, 1)
        self.assertRaises(TypeError, register_archive_format, 'xxx',
                          lambda: 1 / 0, [(1, 2), (1, 2, 3)])

        register_archive_format('xxx', lambda: 1 / 0, [('x', 2)], 'xxx file')
        formats = [name for name, params in get_archive_formats()]
        self.assertIn('xxx', formats)

        unregister_archive_format('xxx')
        formats = [name for name, params in get_archive_formats()]
        self.assertNotIn('xxx', formats)

    def _compare_dirs(self, dir1: str, dir2: str) -> List[str]:
        # check that dir1 and dir2 are equivalent,
        # return the diff
        diff = List[str]()
        for root, dirs, files in os.walk(dir1):
            for file_ in files:
                path = os.path.join(root, file_)
                target_path = os.path.join(dir2, os.path.split(path)[1])
                if not os.path.exists(target_path):
                    diff.append(file_)
        return diff

    @unittest.skipUnless(zlib, "Requires zlib")
    def test_unpack_archive(self) -> None:
        formats = ['tar', 'gztar', 'zip']
        if BZ2_SUPPORTED:
            formats.append('bztar')

        for format in formats:
            tmpdir = self.mkdtemp()
            base_dir, root_dir, base_name = self._create_files()
            tmpdir2 = self.mkdtemp()
            filename = make_archive(base_name, format, root_dir, base_dir)

            # let's try to unpack it now
            unpack_archive(filename, tmpdir2)
            diff = self._compare_dirs(tmpdir, tmpdir2)
            self.assertEqual(diff, [])

            # and again, this time with the format specified
            tmpdir3 = self.mkdtemp()
            unpack_archive(filename, tmpdir3, format=format)
            diff = self._compare_dirs(tmpdir, tmpdir3)
            self.assertEqual(diff, [])
        self.assertRaises(shutil.ReadError, unpack_archive, TESTFN)
        self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx')

    def test_unpack_registery(self) -> None:

        formats = get_unpack_formats()

        def _boo(filename: str, extract_dir: str, extra: int) -> None:
            self.assertEqual(extra, 1)
            self.assertEqual(filename, 'stuff.boo')
            self.assertEqual(extract_dir, 'xx')

        register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)])
        unpack_archive('stuff.boo', 'xx')

        # trying to register a .boo unpacker again
        self.assertRaises(RegistryError, register_unpack_format, 'Boo2',
                          ['.boo'], _boo)

        # should work now
        unregister_unpack_format('Boo')
        register_unpack_format('Boo2', ['.boo'], _boo)
        self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats())
        self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats())

        # let's leave a clean state
        unregister_unpack_format('Boo2')
        self.assertEqual(get_unpack_formats(), formats)
Example #19
0
import json

# noinspection PyUnreachableCode
if False:
    # noinspection PyUnresolvedReferences
    from _stubs import *
    from typing import Any
    iop.hostedComp = COMP()
    ipar.compEditor = Any()
    from _stubs.TDCallbacksExt import CallbacksExt
    ext.Callbacks = CallbacksExt(None)


class Settings:
    def __init__(self, ownerComp):
        self.ownerComp = ownerComp  # type: COMP

    @staticmethod
    def BuildSettings(parameters: 'DAT'):
        settings = {}
        for i in range(1, parameters.numRows):
            if parameters[i,
                          'readonly'] == '1' or parameters[i,
                                                           'enabled'] == '0':
                continue
            path = parameters[i, 'path'].val
            if path not in settings:
                settings[path] = {}
            name = parameters[i, 'name'].val
            mode = parameters[i, 'mode']
            if mode == '':
Example #20
0
    def _format(self, object: object, stream: TextIO, indent: int,
                allowance: int, context: Dict[int, int], level: int) -> None:
        level = level + 1
        objid = _id(object)
        if objid in context:
            stream.write(_recursion(object))
            self._recursive = True
            self._readable = False
            return
        rep = self._repr(object, context, level - 1)
        typ = _type(object)
        sepLines = _len(rep) > (self._width - 1 - indent - allowance)
        write = stream.write

        if self._depth and level > self._depth:
            write(rep)
            return

        if sepLines:
            r = getattr(typ, "__repr__", None)
            if issubclass(typ, dict):
                dictobj = cast(dict, object)
                write('{')
                if self._indent_per_level > 1:
                    write((self._indent_per_level - 1) * ' ')
                length = _len(dictobj)
                if length:
                    context[objid] = 1
                    indent = indent + self._indent_per_level
                    if issubclass(typ, _OrderedDict):
                        items = list(dictobj.items())
                    else:
                        items = sorted(dictobj.items(), key=_safe_tuple)
                    key, ent = items[0]
                    rep = self._repr(key, context, level)
                    write(rep)
                    write(': ')
                    self._format(ent, stream, indent + _len(rep) + 2,
                                 allowance + 1, context, level)
                    if length > 1:
                        for key, ent in items[1:]:
                            rep = self._repr(key, context, level)
                            write(',\n%s%s: ' % (' ' * indent, rep))
                            self._format(ent, stream, indent + _len(rep) + 2,
                                         allowance + 1, context, level)
                    indent = indent - self._indent_per_level
                    del context[objid]
                write('}')
                return

            if ((issubclass(typ, list) and r is list.__repr__)
                    or (issubclass(typ, tuple) and r is tuple.__repr__)
                    or (issubclass(typ, set) and r is set.__repr__) or
                (issubclass(typ, frozenset) and r is frozenset.__repr__)):
                anyobj = Any(object)  # TODO Collection?
                length = _len(anyobj)
                if issubclass(typ, list):
                    write('[')
                    endchar = ']'
                    lst = anyobj
                elif issubclass(typ, set):
                    if not length:
                        write('set()')
                        return
                    write('{')
                    endchar = '}'
                    lst = sorted(anyobj, key=_safe_key)
                elif issubclass(typ, frozenset):
                    if not length:
                        write('frozenset()')
                        return
                    write('frozenset({')
                    endchar = '})'
                    lst = sorted(anyobj, key=_safe_key)
                    indent += 10
                else:
                    write('(')
                    endchar = ')'
                    lst = list(anyobj)
                if self._indent_per_level > 1:
                    write((self._indent_per_level - 1) * ' ')
                if length:
                    context[objid] = 1
                    indent = indent + self._indent_per_level
                    self._format(lst[0], stream, indent, allowance + 1,
                                 context, level)
                    if length > 1:
                        for ent in lst[1:]:
                            write(',\n' + ' ' * indent)
                            self._format(ent, stream, indent, allowance + 1,
                                         context, level)
                    indent = indent - self._indent_per_level
                    del context[objid]
                if issubclass(typ, tuple) and length == 1:
                    write(',')
                write(endchar)
                return

        write(rep)
Example #21
0
import re

# noinspection PyUnreachableCode
if False:
    # noinspection PyUnresolvedReferences
    from _stubs import *
    from typing import Any
    iop.hostedComp = COMP()
    ipar.compEditor = Any()
    ipar.workspace = Any()


class ComponentEditor:
    def __init__(self, ownerComp):
        self.ownerComp = ownerComp

    def LoadComponent(self):
        comp = iop.hostedComp
        tox = ipar.compEditor.Toxfile.eval()
        if not tox:
            comp.par.externaltox = ''
            comp.destroyCustomPars()
            for child in list(comp.children):
                if child.valid:
                    child.destroy()
        else:
            msg = f'Loading component {tdu.expandPath(tox)}'
            print(msg)
            ui.status = msg
            # comp = comp.loadTox(tox, unwired=True)
            comp.par.externaltox = tox
Example #22
0
    def setup_system_permissions(self):
        Roles = self._Roles
        # print(f'permissions - init - roles: {Roles.roles}')

        # define admin
        # print("11 permissions, about to define system admin.")
        permission_id = self._Hash.sha224_compact(f"auto-admin-{self.system_seed}")
        if permission_id not in self.permissions:
            # print(f"creating admin permission: {permission_id}")
            role = Roles.get("admin")
            yield self.new(
                role,
                machine_label="admin",
                label="Administrator",
                description="Grant access to everything for admins.",
                actions=[Any()],
                resources=[{"platform": Any(), "id": Any()}],
                # subjects=[f"role:{role.role_id}"],
                permission_id=permission_id,
                effect=vakt.ALLOW_ACCESS,
                request_by="permissions",
                request_by_type="library",
                request_context="setup_system_permissions",
                load_source="local",
                )

        for platform, data in AUTH_PLATFORMS.items():
            platform_parts = platform.split(".")

            # define platform admins
            platform_machine_label = f"{platform_parts[0]}_{platform_parts[1]}_{platform_parts[2]}_admin".lower()
            platform_label = f"{platform_parts[2]} {platform_parts[1]} {platform_parts[0]}"
            permission_id = self._Hash.sha224_compact(f"admin-{platform_machine_label} {self.system_seed}")
            role_id = self._Hash.sha224_compact(permission_id)
            try:
                role = self._Roles.get_advanced({"role_id": role_id, "machine_label": platform_machine_label},
                                                multiple=False)
            except KeyError:
                # print(f'data["actions"]["possible"]: {data["actions"]["possible"]}')
                actions_string = ", ".join(data["actions"]["possible"])
                description = f"Admin access to '{platform_label}', actions: {actions_string}"
                role = yield self._Roles.new(
                    role_id=role_id,
                    machine_label=platform_machine_label,
                    label=f"{platform_label} admin",
                    description=description,
                    request_by="permissions",
                    request_by_type="library",
                    request_context="setup_system_permissions",
                    load_source="local",
                )
            # print(f"22 permissions, about to define platform admin: {platform_label}")
            if permission_id not in self.permissions:
                yield self.new(
                    role,
                    machine_label=platform_machine_label,
                    label=f"{platform_label} admin",
                    description=description,
                    actions=data["actions"]["possible"],
                    resources=[{"platform": Eq(platform), "id": Any()}],
                    # subjects=[Eq(f"role:{role.role_id}")],
                    permission_id=permission_id,
                    effect=vakt.ALLOW_ACCESS,
                    request_by="permissions",
                    request_by_type="library",
                    request_context="setup_system_permissions",
                    load_source="local",
                    )
            # else:
            #     self.attach_

            # define platform actions for more fine grained controls
            for action in data["actions"]["possible"]:
                platform_machine_label = f"{platform_parts[0]}_{platform_parts[1]}_{platform_parts[2]}_{action}".lower()
                platform_label = f"{platform_parts[0]} {platform_parts[1]} {platform_parts[2]}"
                permission_id = self._Hash.sha224_compact(f"action-{platform_machine_label} {action} {self.system_seed}")
                role_id = self._Hash.sha224_compact(permission_id)
                try:
                    role = self._Roles.get_advanced({"role_id": role_id, "machine_label": platform_machine_label},
                                                    multiple=False)
                except KeyError:
                    description = f"Allow '{platform_label}', action: {action}"
                    role = yield self._Roles.new(
                        role_id=role_id,
                        label=f"{platform_label} {action}",
                        machine_label=platform_machine_label,
                        description=description,
                        request_by="permissions",
                        request_by_type="library",
                        request_context="setup_system_permissions",
                        load_source="local"
                    )
                if permission_id not in self.permissions:
                    yield self.new(
                        role,
                        machine_label=platform_machine_label,
                        label=f"{platform_label} {action}",
                        description=description,
                        actions=[action],
                        resources=[{"platform": Eq(platform), "id": Any()}],
                        # subjects=[f"role:{role.role_id}"],
                        permission_id=permission_id,
                        effect=vakt.ALLOW_ACCESS,
                        request_by="permissions",
                        request_by_type="library",
                        request_context="setup_system_permissions",
                        load_source="local"
                        )
                # else:
                #     attach....
            # Grant all users basic access rights. This can be revoked using new policies to negate this.
            platform_machine_label = f"{platform_parts[0]}_{platform_parts[1]}_{platform_parts[2]}_user".lower()
            platform_label = f"{platform_parts[2]} {platform_parts[1]} {platform_parts[0]}"
            permission_id = self._Hash.sha224_compact(f"user-{platform_machine_label} {self.system_seed}")
            if permission_id not in self.permissions:
                if len(data["actions"]["user"]):
                    role = Roles.get("users")
                    actions_string = ", ".join(data["actions"]["user"])
                    # print(f"44 permissions, about to define platform user: {platform_machine_label} - {actions_string}")
                yield self.new(
                    role,
                    machine_label=platform_machine_label,
                    label=platform_label,
                    description=f"All users access to '{platform_machine_label}', actions: {actions_string}",
                    actions=data["actions"]["user"],
                    resources=[{"platform": Eq(platform), "id": Any()}],
                    # subjects=[Eq(f"role:{role.role_id}")],
                    permission_id=permission_id,
                    effect=vakt.ALLOW_ACCESS,
                    request_by="permissions",
                    request_by_type="library",
                    request_context="setup_system_permissions",
                    load_source="local"
                )
Example #23
0
 def test_cannot_instantiate(self):
     with self.assertRaises(TypeError):
         Any()
Example #24
0
def _safe_repr(object: object, context: Dict[int, int], maxlevels: int,
               level: int) -> Tuple[str, bool, bool]:
    typ = _type(object)
    if typ is str:
        s = cast(str, object)
        if 'locale' not in _sys.modules:
            return repr(object), True, False
        if "'" in s and '"' not in s:
            closure = '"'
            quotes = {'"': '\\"'}
        else:
            closure = "'"
            quotes = {"'": "\\'"}
        qget = quotes.get
        sio = _StringIO()
        write = sio.write
        for char in s:
            if char.isalpha():
                write(char)
            else:
                write(qget(char, repr(char)[1:-1]))
        return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False

    r = getattr(typ, "__repr__", None)
    if issubclass(typ, dict) and r is dict.__repr__:
        if not object:
            return "{}", True, False
        objid = _id(object)
        if maxlevels and level >= maxlevels:
            return "{...}", False, objid in context
        if objid in context:
            return _recursion(object), False, True
        context[objid] = 1
        readable = True
        recursive = False
        components = List[str]()
        append = components.append
        level += 1
        saferepr = _safe_repr
        items = sorted((cast(dict, object)).items(), key=_safe_tuple)
        for k, v in items:
            krepr, kreadable, krecur = saferepr(k, context, maxlevels, level)
            vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level)
            append("%s: %s" % (krepr, vrepr))
            readable = readable and kreadable and vreadable
            if krecur or vrecur:
                recursive = True
        del context[objid]
        return "{%s}" % _commajoin(components), readable, recursive

    if (issubclass(typ, list) and r is list.__repr__) or \
       (issubclass(typ, tuple) and r is tuple.__repr__):
        anyobj = Any(object)  # TODO Sequence?
        if issubclass(typ, list):
            if not object:
                return "[]", True, False
            format = "[%s]"
        elif _len(anyobj) == 1:
            format = "(%s,)"
        else:
            if not object:
                return "()", True, False
            format = "(%s)"
        objid = _id(object)
        if maxlevels and level >= maxlevels:
            return format % "...", False, objid in context
        if objid in context:
            return _recursion(object), False, True
        context[objid] = 1
        readable = True
        recursive = False
        components = []
        append = components.append
        level += 1
        for o in anyobj:
            orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level)
            append(orepr)
            if not oreadable:
                readable = False
            if orecur:
                recursive = True
        del context[objid]
        return format % _commajoin(components), readable, recursive

    rep = repr(object)
    return rep, bool(rep and not rep.startswith('<')), False
Example #25
0
    def make_init_wrapper(self, tdef: ClassDef) -> List[Node]:
        """Make and return an implicit __init__ if class needs it.
        
        Otherwise, return an empty list. We include an implicit
        __init__ if the class is generic or if it extends a generic class
        and if it does not define __init__.
        
        The __init__ of a generic class requires one or more extra type
        variable arguments. The inherited __init__ may not accept these.

        For example, assume these definitions:
        
        . class A(Generic[T]): pass
        . class B(A[int]): pass
        
        The constructor for B will be (equivalent to)
        
        . def __init__(self: B) -> None:
        .     self.__tv = <int>
        .     super().__init__(<int>)
        """
        
        # FIX overloading, default args / varargs, keyword args

        info = tdef.info
        
        if '__init__' not in info.names and (
                tdef.is_generic() or (info.bases and
                                      info.mro[1].is_generic())):
            # Generic class with no explicit __init__ method
            # (i.e. __init__ inherited from superclass). Generate a
            # wrapper that initializes type variable slots and calls
            # the superclass __init__ method.

            base = info.mro[1]
            selftype = self_type(info)    
            callee_type = cast(Callable, analyse_member_access(
                '__init__', selftype, None, False, True, None, None,
                base))
            
            # Now the callee type may contain the type variables of a
            # grandparent as bound type variables, but we want the
            # type variables of the parent class. Explicitly set the
            # bound type variables.
            callee_type = self.fix_bound_init_tvars(callee_type,
                map_instance_to_supertype(selftype, base))
            
            super_init = cast(FuncDef, base.get_method('__init__'))
            
            # Build argument list.
            args = [Var('self')]
            for i in range(1, len(super_init.args)):
                args.append(Var(super_init.args[i].name()))
                args[-1].type = callee_type.arg_types[i - 1]

            selft = self_type(self.tf.type_context())
            callee_type = prepend_arg_type(callee_type, selft)
            
            creat = FuncDef('__init__', args,
                            super_init.arg_kinds, [None] * len(args),
                            Block([]))
            creat.info = tdef.info
            creat.type = callee_type
            creat.is_implicit = False
            tdef.info.names['__init__'] = SymbolTableNode(MDEF, creat,
                                                          typ=creat.type)
            
            # Insert a call to superclass constructor. If the
            # superclass is object, the constructor does nothing =>
            # omit the call.
            if base.fullname() != 'builtins.object':
                creat.body.body.append(
                    self.make_superclass_constructor_call(tdef.info,
                                                          callee_type))
            
            # Implicit cast from FuncDef[] to Node[] is safe below.
            return Any(self.func_tf.transform_method(creat))
        else:
            return []
Example #26
0
from datetime import datetime
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional

# noinspection PyUnreachableCode
if False:
	# noinspection PyUnresolvedReferences
	from _stubs import *
	from _stubs.PopDialogExt import PopDialogExt
	from .components.SettingsExt import UserSettings
	from .components.EditorViewsExt import EditorViews
	from .components.EditorToolsExt import EditorTools
	from ui.statusOverlayExt import StatusOverlay
	iop.hostedComp = COMP()
	ipar.editorState = Any()
	ipar.workspace = Any()
	ipar.compPicker = Any()
	ipar.editorUIState = Any()
	iop.libraryLoader = LibraryLoader(None)
	iop.userSettings = UserSettings(None)
	iop.editorViews = EditorViews(None)
	iop.editorTools = EditorTools(None)
	iop.statusOverlay = StatusOverlay(None)

try:
	from EditorCommon import *
except ImportError:
	from .components.EditorCommon import *

class Editor:
	def __init__(self, ownerComp):
Example #27
0
from pathlib import Path
from typing import List, Optional
from .SettingsExt import SettingsOp, SettingsExtBase

# noinspection PyUnreachableCode
if False:
    # noinspection PyUnresolvedReferences
    from _stubs import *
    from typing import Any
    iop.workspaceState = COMP()
    ipar.workspaceState = Any()


class Workspace(SettingsExtBase):
    def __init__(self, ownerComp):
        self.ownerComp = ownerComp  # type: COMP

    def getSettingsOps(self) -> List['SettingsOp']:
        settingsOps = [self.workspaceSettingsOp()]
        opTable = self.ownerComp.par.Settingsoptable.eval()  # type: DAT
        if opTable:
            for i in range(1, opTable.numRows):
                settingsOps.append(SettingsOp.fromDatRow(opTable, i))
        return settingsOps

    def PromptLoadWorkspaceFile(self):
        path = ui.chooseFile(load=True,
                             fileTypes=['json'],
                             title='Open Workspace File')
        if path:
            self.LoadWorkspaceFile(path)