Esempio n. 1
0
def _load_ips_netifaces():
    """load ip addresses with netifaces"""
    import netifaces
    global LOCALHOST
    local_ips = []
    public_ips = []
    
    # list of iface names, 'lo0', 'eth0', etc.
    for iface in netifaces.interfaces():
        # list of ipv4 addrinfo dicts
        ipv4s = netifaces.ifaddresses(iface).get(netifaces.AF_INET, [])
        for entry in ipv4s:
            addr = entry.get('addr')
            if not addr:
                continue
            if not (iface.startswith('lo') or addr.startswith('127.')):
                public_ips.append(addr)
            elif not LOCALHOST:
                LOCALHOST = addr
            local_ips.append(addr)
    if not LOCALHOST:
        # we never found a loopback interface (can this ever happen?), assume common default
        LOCALHOST = '127.0.0.1'
        local_ips.insert(0, LOCALHOST)
    local_ips.extend(['0.0.0.0', ''])
    LOCAL_IPS[:] = uniq_stable(local_ips)
    PUBLIC_IPS[:] = uniq_stable(public_ips)
Esempio n. 2
0
def _load_ips_gethostbyname():
    """load ip addresses with socket.gethostbyname_ex
    
    This can be slow.
    """
    global LOCALHOST
    try:
        LOCAL_IPS[:] = socket.gethostbyname_ex('localhost')[2]
    except socket.error:
        # assume common default
        LOCAL_IPS[:] = ['127.0.0.1']
    
    try:
        hostname = socket.gethostname()
        PUBLIC_IPS[:] = socket.gethostbyname_ex(hostname)[2]
        # try hostname.local, in case hostname has been short-circuited to loopback
        if not hostname.endswith('.local') and all(ip.startswith('127') for ip in PUBLIC_IPS):
            PUBLIC_IPS[:] = socket.gethostbyname_ex(socket.gethostname() + '.local')[2]
    except socket.error:
        pass
    finally:
        PUBLIC_IPS[:] = uniq_stable(PUBLIC_IPS)
        LOCAL_IPS.extend(PUBLIC_IPS)
    
    # include all-interface aliases: 0.0.0.0 and ''
    LOCAL_IPS.extend(['0.0.0.0', ''])

    LOCAL_IPS[:] = uniq_stable(LOCAL_IPS)

    LOCALHOST = LOCAL_IPS[0]
Esempio n. 3
0
def _load_ips_gethostbyname():
    """load ip addresses with socket.gethostbyname_ex
    
    This can be slow.
    """
    global LOCALHOST
    try:
        LOCAL_IPS[:] = socket.gethostbyname_ex('localhost')[2]
    except socket.error:
        # assume common default
        LOCAL_IPS[:] = ['127.0.0.1']

    try:
        hostname = socket.gethostname()
        PUBLIC_IPS[:] = socket.gethostbyname_ex(hostname)[2]
        # try hostname.local, in case hostname has been short-circuited to loopback
        if not hostname.endswith('.local') and all(
                ip.startswith('127') for ip in PUBLIC_IPS):
            PUBLIC_IPS[:] = socket.gethostbyname_ex(socket.gethostname() +
                                                    '.local')[2]
    except socket.error:
        pass
    finally:
        PUBLIC_IPS[:] = uniq_stable(PUBLIC_IPS)
        LOCAL_IPS.extend(PUBLIC_IPS)

    # include all-interface aliases: 0.0.0.0 and ''
    LOCAL_IPS.extend(['0.0.0.0', ''])

    LOCAL_IPS[:] = uniq_stable(LOCAL_IPS)

    LOCALHOST = LOCAL_IPS[0]
Esempio n. 4
0
def _load_ips_netifaces():
    """load ip addresses with netifaces"""
    import netifaces
    global LOCALHOST
    local_ips = []
    public_ips = []

    # list of iface names, 'lo0', 'eth0', etc.
    for iface in netifaces.interfaces():
        # list of ipv4 addrinfo dicts
        ipv4s = netifaces.ifaddresses(iface).get(netifaces.AF_INET, [])
        for entry in ipv4s:
            addr = entry.get('addr')
            if not addr:
                continue
            if not (iface.startswith('lo') or addr.startswith('127.')):
                public_ips.append(addr)
            elif not LOCALHOST:
                LOCALHOST = addr
            local_ips.append(addr)
    if not LOCALHOST:
        # we never found a loopback interface (can this ever happen?), assume common default
        LOCALHOST = '127.0.0.1'
        local_ips.insert(0, LOCALHOST)
    local_ips.extend(['0.0.0.0', ''])
    LOCAL_IPS[:] = uniq_stable(local_ips)
    PUBLIC_IPS[:] = uniq_stable(public_ips)
Esempio n. 5
0
    def _get_public_ip(self):
        """Avoid picking up docker and VM network interfaces in IPython 2.0.

        Adjusts _load_ips_netifaces from IPython.utils.localinterfaces. Changes
        submitted upstream so we can remove this when incorporated into released IPython.

        Prioritizes a set of common interfaces to try and make better decisions
        when choosing from multiple choices.
        """
        standard_ips = []
        priority_ips = []
        vm_ifaces = set(["docker0", "virbr0", "lxcbr0"])  # VM/container interfaces we do not want
        priority_ifaces = ("eth",)  # Interfaces we prefer to get IPs from

        # list of iface names, 'lo0', 'eth0', etc.
        for iface in netifaces.interfaces():
            if iface not in vm_ifaces:
                # list of ipv4 addrinfo dicts
                ipv4s = netifaces.ifaddresses(iface).get(netifaces.AF_INET, [])
                for entry in ipv4s:
                    addr = entry.get('addr')
                    if not addr:
                        continue
                    if not (iface.startswith('lo') or addr.startswith('127.')):
                        if iface.startswith(priority_ifaces):
                            priority_ips.append(addr)
                        else:
                            standard_ips.append(addr)
        public_ips = uniq_stable(standard_ips + priority_ips)
        return public_ips[-1]
Esempio n. 6
0
    def _get_public_ip(self):
        """Avoid picking up docker and VM network interfaces in IPython 2.0.

        Adjusts _load_ips_netifaces from IPython.utils.localinterfaces. Changes
        submitted upstream so we can remove this when incorporated into released IPython.

        Prioritizes a set of common interfaces to try and make better decisions
        when choosing from multiple choices.
        """
        standard_ips = []
        priority_ips = []
        vm_ifaces = set(["docker0", "virbr0", "lxcbr0"])  # VM/container interfaces we do not want
        priority_ifaces = ("eth",)  # Interfaces we prefer to get IPs from

        # list of iface names, 'lo0', 'eth0', etc.
        for iface in netifaces.interfaces():
            if iface not in vm_ifaces:
                # list of ipv4 addrinfo dicts
                ipv4s = netifaces.ifaddresses(iface).get(netifaces.AF_INET, [])
                for entry in ipv4s:
                    addr = entry.get('addr')
                    if not addr:
                        continue
                    if not (iface.startswith('lo') or addr.startswith('127.')):
                        if iface.startswith(priority_ifaces):
                            priority_ips.append(addr)
                        else:
                            standard_ips.append(addr)
        public_ips = uniq_stable(standard_ips + priority_ips)
        return public_ips[-1]
Esempio n. 7
0
def _populate_from_list(addrs):
    """populate local and public IPs from flat list of all IPs"""
    if not addrs:
        raise NoIPAddresses
    
    global LOCALHOST
    public_ips = []
    local_ips = []
    
    for ip in addrs:
        local_ips.append(ip)
        if not ip.startswith('127.'):
            public_ips.append(ip)
        elif not LOCALHOST:
            LOCALHOST = ip
    
    if not LOCALHOST:
        LOCALHOST = '127.0.0.1'
        local_ips.insert(0, LOCALHOST)
        
    local_ips.extend(['0.0.0.0', ''])
    
    LOCAL_IPS[:] = uniq_stable(local_ips)
    PUBLIC_IPS[:] = uniq_stable(public_ips)
Esempio n. 8
0
def _populate_from_list(addrs):
    """populate local and public IPs from flat list of all IPs"""
    if not addrs:
        raise NoIPAddresses

    global LOCALHOST
    public_ips = []
    local_ips = []

    for ip in addrs:
        local_ips.append(ip)
        if not ip.startswith('127.'):
            public_ips.append(ip)
        elif not LOCALHOST:
            LOCALHOST = ip

    if not LOCALHOST:
        LOCALHOST = '127.0.0.1'
        local_ips.insert(0, LOCALHOST)

    local_ips.extend(['0.0.0.0', ''])

    LOCAL_IPS[:] = uniq_stable(local_ips)
    PUBLIC_IPS[:] = uniq_stable(public_ips)
Esempio n. 9
0
    def _get_public_ip(self):
        """Avoid picking up docker and VM network interfaces in IPython 2.0.

        Adjusts _load_ips_netifaces from IPython.utils.localinterfaces. Changes
        submitted upstream so we can remove this when incorporated into released IPython.

        Prioritizes a set of common interfaces to try and make better decisions
        when choosing from multiple choices.
        """
        # First try to get address from domain name
        fqdn_ip = socket.gethostbyname(socket.getfqdn())
        if fqdn_ip and not fqdn_ip.startswith("127."):
            return fqdn_ip
        # otherwise retrieve from interfaces
        standard_ips = []
        priority_ips = []
        vm_ifaces = set(["docker0", "virbr0", "lxcbr0"])  # VM/container interfaces we do not want
        priority_ifaces = ("eth",)  # Interfaces we prefer to get IPs from

        # list of iface names, 'lo0', 'eth0', etc.
        # We get addresses and order based on priorty, with more preferred last
        for iface in netifaces.interfaces():
            if iface not in vm_ifaces:
                # list of ipv4 addrinfo dicts
                ipv4s = netifaces.ifaddresses(iface).get(netifaces.AF_INET, [])
                for entry in ipv4s:
                    addr = entry.get('addr')
                    if not addr:
                        continue
                    if not (iface.startswith('lo') or addr.startswith('127.')):
                        if iface.startswith(priority_ifaces):
                            priority_ips.append((iface, addr))
                        else:
                            standard_ips.append(addr)
        # Prefer earlier interfaces (eth0) over later (eth1)
        priority_ips.sort(reverse=True)
        priority_ips = [xs[1] for xs in priority_ips]
        public_ips = uniq_stable(standard_ips + priority_ips)
        return public_ips[-1]
Esempio n. 10
0
    def structured_traceback(self, etype, evalue, etb, tb_offset=None,
                             context=5):
        """Return a nice text document describing the traceback."""

        tb_offset = self.tb_offset if tb_offset is None else tb_offset

        # some locals
        try:
            etype = etype.__name__
        except AttributeError:
            pass
        Colors        = self.Colors   # just a shorthand + quicker name lookup
        ColorsNormal  = Colors.Normal  # used a lot
        col_scheme    = self.color_scheme_table.active_scheme_name
        indent        = ' '*INDENT_SIZE
        em_normal     = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal)
        undefined     = '%sundefined%s' % (Colors.em, ColorsNormal)
        exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal)

        # some internal-use functions
        def text_repr(value):
            """Hopefully pretty robust repr equivalent."""
            # this is pretty horrible but should always return *something*
            try:
                return pydoc.text.repr(value)
            except KeyboardInterrupt:
                raise
            except:
                try:
                    return repr(value)
                except KeyboardInterrupt:
                    raise
                except:
                    try:
                        # all still in an except block so we catch
                        # getattr raising
                        name = getattr(value, '__name__', None)
                        if name:
                            # ick, recursion
                            return text_repr(name)
                        klass = getattr(value, '__class__', None)
                        if klass:
                            return '%s instance' % text_repr(klass)
                    except KeyboardInterrupt:
                        raise
                    except:
                        return 'UNRECOVERABLE REPR FAILURE'
        def eqrepr(value, repr=text_repr): return '=%s' % repr(value)
        def nullrepr(value, repr=text_repr): return ''

        # meat of the code begins
        try:
            etype = etype.__name__
        except AttributeError:
            pass

        if self.long_header:
            # Header with the exception type, python version, and date
            pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
            date = time.ctime(time.time())

            head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal,
                                           exc, ' '*(75-len(str(etype))-len(pyver)),
                                           pyver, date.rjust(75) )
            head += "\nA problem occured executing Python code.  Here is the sequence of function"\
                    "\ncalls leading up to the error, with the most recent (innermost) call last."
        else:
            # Simplified header
            head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc,
                                     'Traceback (most recent call last)'.\
                                                  rjust(75 - len(str(etype)) ) )
        frames = []
        # Flush cache before calling inspect.  This helps alleviate some of the
        # problems with python 2.3's inspect.py.
        ##self.check_cache()
        # Drop topmost frames if requested
        try:
            # Try the default getinnerframes and Alex's: Alex's fixes some
            # problems, but it generates empty tracebacks for console errors
            # (5 blanks lines) where none should be returned.
            #records = inspect.getinnerframes(etb, context)[tb_offset:]
            #print 'python records:', records # dbg
            records = _fixed_getinnerframes(etb, context, tb_offset)
            #print 'alex   records:', records # dbg
        except:

            # FIXME: I've been getting many crash reports from python 2.3
            # users, traceable to inspect.py.  If I can find a small test-case
            # to reproduce this, I should either write a better workaround or
            # file a bug report against inspect (if that's the real problem).
            # So far, I haven't been able to find an isolated example to
            # reproduce the problem.
            inspect_error()
            traceback.print_exc(file=self.ostream)
            info('\nUnfortunately, your original traceback can not be constructed.\n')
            return ''

        # build some color string templates outside these nested loops
        tpl_link       = '%s%%s%s' % (Colors.filenameEm,ColorsNormal)
        tpl_call       = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
                                              ColorsNormal)
        tpl_call_fail  = 'in %s%%s%s(***failed resolving arguments***)%s' % \
                         (Colors.vName, Colors.valEm, ColorsNormal)
        tpl_local_var  = '%s%%s%s' % (Colors.vName, ColorsNormal)
        tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
                                                 Colors.vName, ColorsNormal)
        tpl_name_val   = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
        tpl_line       = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal)
        tpl_line_em    = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line,
                                            ColorsNormal)

        # now, loop over all records printing context and info
        abspath = os.path.abspath
        for frame, file, lnum, func, lines, index in records:
            #print '*** record:',file,lnum,func,lines,index  # dbg
            try:
                file = file and abspath(file) or '?'
            except OSError:
                # if file is '<console>' or something not in the filesystem,
                # the abspath call will throw an OSError.  Just ignore it and
                # keep the original file string.
                pass
            link = tpl_link % file
            try:
                args, varargs, varkw, locals = inspect.getargvalues(frame)
            except:
                # This can happen due to a bug in python2.3.  We should be
                # able to remove this try/except when 2.4 becomes a
                # requirement.  Bug details at http://python.org/sf/1005466
                inspect_error()
                traceback.print_exc(file=self.ostream)
                info("\nIPython's exception reporting continues...\n")

            if func == '?':
                call = ''
            else:
                # Decide whether to include variable details or not
                var_repr = self.include_vars and eqrepr or nullrepr
                try:
                    call = tpl_call % (func,inspect.formatargvalues(args,
                                                varargs, varkw,
                                                locals,formatvalue=var_repr))
                except KeyError:
                    # This happens in situations like errors inside generator
                    # expressions, where local variables are listed in the
                    # line, but can't be extracted from the frame.  I'm not
                    # 100% sure this isn't actually a bug in inspect itself,
                    # but since there's no info for us to compute with, the
                    # best we can do is report the failure and move on.  Here
                    # we must *not* call any traceback construction again,
                    # because that would mess up use of %debug later on.  So we
                    # simply report the failure and move on.  The only
                    # limitation will be that this frame won't have locals
                    # listed in the call signature.  Quite subtle problem...
                    # I can't think of a good way to validate this in a unit
                    # test, but running a script consisting of:
                    #  dict( (k,v.strip()) for (k,v) in range(10) )
                    # will illustrate the error, if this exception catch is
                    # disabled.
                    call = tpl_call_fail % func

            # Initialize a list of names on the current line, which the
            # tokenizer below will populate.
            names = []

            def tokeneater(token_type, token, start, end, line):
                """Stateful tokeneater which builds dotted names.

                The list of names it appends to (from the enclosing scope) can
                contain repeated composite names.  This is unavoidable, since
                there is no way to disambguate partial dotted structures until
                the full list is known.  The caller is responsible for pruning
                the final list of duplicates before using it."""

                # build composite names
                if token == '.':
                    try:
                        names[-1] += '.'
                        # store state so the next token is added for x.y.z names
                        tokeneater.name_cont = True
                        return
                    except IndexError:
                        pass
                if token_type == tokenize.NAME and token not in keyword.kwlist:
                    if tokeneater.name_cont:
                        # Dotted names
                        names[-1] += token
                        tokeneater.name_cont = False
                    else:
                        # Regular new names.  We append everything, the caller
                        # will be responsible for pruning the list later.  It's
                        # very tricky to try to prune as we go, b/c composite
                        # names can fool us.  The pruning at the end is easy
                        # to do (or the caller can print a list with repeated
                        # names if so desired.
                        names.append(token)
                elif token_type == tokenize.NEWLINE:
                    raise IndexError
            # we need to store a bit of state in the tokenizer to build
            # dotted names
            tokeneater.name_cont = False

            def linereader(file=file, lnum=[lnum], getline=linecache.getline):
                line = getline(file, lnum[0])
                lnum[0] += 1
                return line

            # Build the list of names on this line of code where the exception
            # occurred.
            try:
                # This builds the names list in-place by capturing it from the
                # enclosing scope.
                for token in generate_tokens(linereader):
                    tokeneater(*token)
            except IndexError:
                # signals exit of tokenizer
                pass
            except tokenize.TokenError,msg:
                _m = ("An unexpected error occurred while tokenizing input\n"
                      "The following traceback may be corrupted or invalid\n"
                      "The error message is: %s\n" % msg)
                error(_m)

            # prune names list of duplicates, but keep the right order
            unique_names = uniq_stable(names)

            # Start loop over vars
            lvals = []
            if self.include_vars:
                for name_full in unique_names:
                    name_base = name_full.split('.',1)[0]
                    if name_base in frame.f_code.co_varnames:
                        if locals.has_key(name_base):
                            try:
                                value = repr(eval(name_full,locals))
                            except:
                                value = undefined
                        else:
                            value = undefined
                        name = tpl_local_var % name_full
                    else:
                        if frame.f_globals.has_key(name_base):
                            try:
                                value = repr(eval(name_full,frame.f_globals))
                            except:
                                value = undefined
                        else:
                            value = undefined
                        name = tpl_global_var % name_full
                    lvals.append(tpl_name_val % (name,value))
            if lvals:
                lvals = '%s%s' % (indent,em_normal.join(lvals))
            else:
                lvals = ''

            level = '%s %s\n' % (link,call)

            if index is None:
                frames.append(level)
            else:
                frames.append('%s%s' % (level,''.join(
                    _format_traceback_lines(lnum,index,lines,Colors,lvals,
                                            col_scheme))))
Esempio n. 11
0
    def structured_traceback(self, etype, evalue, etb, tb_offset=None,
                             context=5):
        """Return a nice text document describing the traceback."""

        tb_offset = self.tb_offset if tb_offset is None else tb_offset

        # some locals
        try:
            etype = etype.__name__
        except AttributeError:
            pass
        Colors        = self.Colors   # just a shorthand + quicker name lookup
        ColorsNormal  = Colors.Normal  # used a lot
        col_scheme    = self.color_scheme_table.active_scheme_name
        indent        = ' '*INDENT_SIZE
        em_normal     = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal)
        undefined     = '%sundefined%s' % (Colors.em, ColorsNormal)
        exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal)

        # some internal-use functions
        def text_repr(value):
            """Hopefully pretty robust repr equivalent."""
            # this is pretty horrible but should always return *something*
            try:
                return pydoc.text.repr(value)
            except KeyboardInterrupt:
                raise
            except:
                try:
                    return repr(value)
                except KeyboardInterrupt:
                    raise
                except:
                    try:
                        # all still in an except block so we catch
                        # getattr raising
                        name = getattr(value, '__name__', None)
                        if name:
                            # ick, recursion
                            return text_repr(name)
                        klass = getattr(value, '__class__', None)
                        if klass:
                            return '%s instance' % text_repr(klass)
                    except KeyboardInterrupt:
                        raise
                    except:
                        return 'UNRECOVERABLE REPR FAILURE'
        def eqrepr(value, repr=text_repr): return '=%s' % repr(value)
        def nullrepr(value, repr=text_repr): return ''

        # meat of the code begins
        try:
            etype = etype.__name__
        except AttributeError:
            pass

        if self.long_header:
            # Header with the exception type, python version, and date
            pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
            date = time.ctime(time.time())

            head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal,
                                           exc, ' '*(75-len(str(etype))-len(pyver)),
                                           pyver, date.rjust(75) )
            head += "\nA problem occured executing Python code.  Here is the sequence of function"\
                    "\ncalls leading up to the error, with the most recent (innermost) call last."
        else:
            # Simplified header
            head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc,
                                     'Traceback (most recent call last)'.\
                                                  rjust(75 - len(str(etype)) ) )
        frames = []
        # Flush cache before calling inspect.  This helps alleviate some of the
        # problems with python 2.3's inspect.py.
        ##self.check_cache()
        # Drop topmost frames if requested
        try:
            # Try the default getinnerframes and Alex's: Alex's fixes some
            # problems, but it generates empty tracebacks for console errors
            # (5 blanks lines) where none should be returned.
            #records = inspect.getinnerframes(etb, context)[tb_offset:]
            #print 'python records:', records # dbg
            records = _fixed_getinnerframes(etb, context, tb_offset)
            #print 'alex   records:', records # dbg
        except:

            # FIXME: I've been getting many crash reports from python 2.3
            # users, traceable to inspect.py.  If I can find a small test-case
            # to reproduce this, I should either write a better workaround or
            # file a bug report against inspect (if that's the real problem).
            # So far, I haven't been able to find an isolated example to
            # reproduce the problem.
            inspect_error()
            traceback.print_exc(file=self.ostream)
            info('\nUnfortunately, your original traceback can not be constructed.\n')
            return ''

        # build some color string templates outside these nested loops
        tpl_link       = '%s%%s%s' % (Colors.filenameEm,ColorsNormal)
        tpl_call       = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
                                              ColorsNormal)
        tpl_call_fail  = 'in %s%%s%s(***failed resolving arguments***)%s' % \
                         (Colors.vName, Colors.valEm, ColorsNormal)
        tpl_local_var  = '%s%%s%s' % (Colors.vName, ColorsNormal)
        tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
                                                 Colors.vName, ColorsNormal)
        tpl_name_val   = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
        tpl_line       = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal)
        tpl_line_em    = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line,
                                            ColorsNormal)

        # now, loop over all records printing context and info
        abspath = os.path.abspath
        for frame, file, lnum, func, lines, index in records:
            #print '*** record:',file,lnum,func,lines,index  # dbg
            if not file:
                file = '?'
            elif not(file.startswith(str("<")) and file.endswith(str(">"))):
                # Guess that filenames like <string> aren't real filenames, so
                # don't call abspath on them.                    
                try:
                    file = abspath(file)
                except OSError:
                    # Not sure if this can still happen: abspath now works with
                    # file names like <string>
                    pass
            file = py3compat.cast_unicode(file, util_path.fs_encoding)
            link = tpl_link % file
            args, varargs, varkw, locals = inspect.getargvalues(frame)

            if func == '?':
                call = ''
            else:
                # Decide whether to include variable details or not
                var_repr = self.include_vars and eqrepr or nullrepr
                try:
                    call = tpl_call % (func,inspect.formatargvalues(args,
                                                varargs, varkw,
                                                locals,formatvalue=var_repr))
                except KeyError:
                    # This happens in situations like errors inside generator
                    # expressions, where local variables are listed in the
                    # line, but can't be extracted from the frame.  I'm not
                    # 100% sure this isn't actually a bug in inspect itself,
                    # but since there's no info for us to compute with, the
                    # best we can do is report the failure and move on.  Here
                    # we must *not* call any traceback construction again,
                    # because that would mess up use of %debug later on.  So we
                    # simply report the failure and move on.  The only
                    # limitation will be that this frame won't have locals
                    # listed in the call signature.  Quite subtle problem...
                    # I can't think of a good way to validate this in a unit
                    # test, but running a script consisting of:
                    #  dict( (k,v.strip()) for (k,v) in range(10) )
                    # will illustrate the error, if this exception catch is
                    # disabled.
                    call = tpl_call_fail % func
            
            # Don't attempt to tokenize binary files.
            if file.endswith(('.so', '.pyd', '.dll')):
                frames.append('%s %s\n' % (link,call))
                continue
            elif file.endswith(('.pyc','.pyo')):
                # Look up the corresponding source file.
                file = openpy.source_from_cache(file)

            def linereader(file=file, lnum=[lnum], getline=ulinecache.getline):
                line = getline(file, lnum[0])
                lnum[0] += 1
                return line

            # Build the list of names on this line of code where the exception
            # occurred.
            try:
                names = []
                name_cont = False
                
                for token_type, token, start, end, line in generate_tokens(linereader):
                    # build composite names
                    if token_type == tokenize.NAME and token not in keyword.kwlist:
                        if name_cont:
                            # Continuation of a dotted name
                            try:
                                names[-1].append(token)
                            except IndexError:
                                names.append([token])
                            name_cont = False
                        else:
                            # Regular new names.  We append everything, the caller
                            # will be responsible for pruning the list later.  It's
                            # very tricky to try to prune as we go, b/c composite
                            # names can fool us.  The pruning at the end is easy
                            # to do (or the caller can print a list with repeated
                            # names if so desired.
                            names.append([token])
                    elif token == '.':
                        name_cont = True
                    elif token_type == tokenize.NEWLINE:
                        break
                        
            except (IndexError, UnicodeDecodeError):
                # signals exit of tokenizer
                pass
            except tokenize.TokenError as msg:
                _m = ("An unexpected error occurred while tokenizing input\n"
                      "The following traceback may be corrupted or invalid\n"
                      "The error message is: %s\n" % msg)
                error(_m)

            # Join composite names (e.g. "dict.fromkeys")
            names = ['.'.join(n) for n in names]
            # prune names list of duplicates, but keep the right order
            unique_names = uniq_stable(names)

            # Start loop over vars
            lvals = []
            if self.include_vars:
                for name_full in unique_names:
                    name_base = name_full.split('.',1)[0]
                    if name_base in frame.f_code.co_varnames:
                        if name_base in locals:
                            try:
                                value = repr(eval(name_full,locals))
                            except:
                                value = undefined
                        else:
                            value = undefined
                        name = tpl_local_var % name_full
                    else:
                        if name_base in frame.f_globals:
                            try:
                                value = repr(eval(name_full,frame.f_globals))
                            except:
                                value = undefined
                        else:
                            value = undefined
                        name = tpl_global_var % name_full
                    lvals.append(tpl_name_val % (name,value))
            if lvals:
                lvals = '%s%s' % (indent,em_normal.join(lvals))
            else:
                lvals = ''

            level = '%s %s\n' % (link,call)

            if index is None:
                frames.append(level)
            else:
                frames.append('%s%s' % (level,''.join(
                    _format_traceback_lines(lnum,index,lines,Colors,lvals,
                                            col_scheme))))

        # Get (safely) a string form of the exception info
        try:
            etype_str,evalue_str = map(str,(etype,evalue))
        except:
            # User exception is improperly defined.
            etype,evalue = str,sys.exc_info()[:2]
            etype_str,evalue_str = map(str,(etype,evalue))
        # ... and format it
        exception = ['%s%s%s: %s' % (Colors.excName, etype_str,
                                     ColorsNormal, py3compat.cast_unicode(evalue_str))]
        if (not py3compat.PY3) and type(evalue) is types.InstanceType:
            try:
                names = [w for w in dir(evalue) if isinstance(w, basestring)]
            except:
                # Every now and then, an object with funny inernals blows up
                # when dir() is called on it.  We do the best we can to report
                # the problem and continue
                _m = '%sException reporting error (object with broken dir())%s:'
                exception.append(_m % (Colors.excName,ColorsNormal))
                etype_str,evalue_str = map(str,sys.exc_info()[:2])
                exception.append('%s%s%s: %s' % (Colors.excName,etype_str,
                                     ColorsNormal, py3compat.cast_unicode(evalue_str)))
                names = []
            for name in names:
                value = text_repr(getattr(evalue, name))
                exception.append('\n%s%s = %s' % (indent, name, value))

        # vds: >>
        if records:
             filepath, lnum = records[-1][1:3]
             #print "file:", str(file), "linenb", str(lnum) # dbg
             filepath = os.path.abspath(filepath)
             ipinst = get_ipython()
             if ipinst is not None:
                 ipinst.hooks.synchronize_with_editor(filepath, lnum, 0)
        # vds: <<

        # return all our info assembled as a single string
        # return '%s\n\n%s\n%s' % (head,'\n'.join(frames),''.join(exception[0]) )
        return [head] + frames + [''.join(exception[0])]
Esempio n. 12
0
    def structured_traceback(self, etype, evalue, etb, tb_offset=None,
                             context=5):
        """Return a nice text document describing the traceback."""

        tb_offset = self.tb_offset if tb_offset is None else tb_offset

        # some locals
        try:
            etype = etype.__name__
        except AttributeError:
            pass
        Colors        = self.Colors   # just a shorthand + quicker name lookup
        ColorsNormal  = Colors.Normal  # used a lot
        col_scheme    = self.color_scheme_table.active_scheme_name
        indent        = ' '*INDENT_SIZE
        em_normal     = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal)
        undefined     = '%sundefined%s' % (Colors.em, ColorsNormal)
        exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal)

        # some internal-use functions
        def text_repr(value):
            """Hopefully pretty robust repr equivalent."""
            # this is pretty horrible but should always return *something*
            try:
                return pydoc.text.repr(value)
            except KeyboardInterrupt:
                raise
            except:
                try:
                    return repr(value)
                except KeyboardInterrupt:
                    raise
                except:
                    try:
                        # all still in an except block so we catch
                        # getattr raising
                        name = getattr(value, '__name__', None)
                        if name:
                            # ick, recursion
                            return text_repr(name)
                        klass = getattr(value, '__class__', None)
                        if klass:
                            return '%s instance' % text_repr(klass)
                    except KeyboardInterrupt:
                        raise
                    except:
                        return 'UNRECOVERABLE REPR FAILURE'
        def eqrepr(value, repr=text_repr): return '=%s' % repr(value)
        def nullrepr(value, repr=text_repr): return ''

        # meat of the code begins
        try:
            etype = etype.__name__
        except AttributeError:
            pass

        if self.long_header:
            # Header with the exception type, python version, and date
            pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
            date = time.ctime(time.time())

            head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal,
                                           exc, ' '*(75-len(str(etype))-len(pyver)),
                                           pyver, date.rjust(75) )
            head += "\nA problem occured executing Python code.  Here is the sequence of function"\
                    "\ncalls leading up to the error, with the most recent (innermost) call last."
        else:
            # Simplified header
            head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc,
                                     'Traceback (most recent call last)'.\
                                                  rjust(75 - len(str(etype)) ) )
        frames = []
        # Flush cache before calling inspect.  This helps alleviate some of the
        # problems with python 2.3's inspect.py.
        ##self.check_cache()
        # Drop topmost frames if requested
        try:
            # Try the default getinnerframes and Alex's: Alex's fixes some
            # problems, but it generates empty tracebacks for console errors
            # (5 blanks lines) where none should be returned.
            #records = inspect.getinnerframes(etb, context)[tb_offset:]
            #print 'python records:', records # dbg
            records = _fixed_getinnerframes(etb, context, tb_offset)
            #print 'alex   records:', records # dbg
        except:

            # FIXME: I've been getting many crash reports from python 2.3
            # users, traceable to inspect.py.  If I can find a small test-case
            # to reproduce this, I should either write a better workaround or
            # file a bug report against inspect (if that's the real problem).
            # So far, I haven't been able to find an isolated example to
            # reproduce the problem.
            inspect_error()
            traceback.print_exc(file=self.ostream)
            info('\nUnfortunately, your original traceback can not be constructed.\n')
            return ''

        # build some color string templates outside these nested loops
        tpl_link       = '%s%%s%s' % (Colors.filenameEm,ColorsNormal)
        tpl_call       = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
                                              ColorsNormal)
        tpl_call_fail  = 'in %s%%s%s(***failed resolving arguments***)%s' % \
                         (Colors.vName, Colors.valEm, ColorsNormal)
        tpl_local_var  = '%s%%s%s' % (Colors.vName, ColorsNormal)
        tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
                                                 Colors.vName, ColorsNormal)
        tpl_name_val   = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
        tpl_line       = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal)
        tpl_line_em    = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line,
                                            ColorsNormal)

        # now, loop over all records printing context and info
        abspath = os.path.abspath
        for frame, file, lnum, func, lines, index in records:
            #print '*** record:',file,lnum,func,lines,index  # dbg
            if not file:
                file = '?'
            elif not(file.startswith(str("<")) and file.endswith(str(">"))):
                # Guess that filenames like <string> aren't real filenames, so
                # don't call abspath on them.                    
                try:
                    file = abspath(file)
                except OSError:
                    # Not sure if this can still happen: abspath now works with
                    # file names like <string>
                    pass
            file = py3compat.cast_unicode(file, util_path.fs_encoding)
            link = tpl_link % file
            args, varargs, varkw, locals = inspect.getargvalues(frame)

            if func == '?':
                call = ''
            else:
                # Decide whether to include variable details or not
                var_repr = self.include_vars and eqrepr or nullrepr
                try:
                    call = tpl_call % (func,inspect.formatargvalues(args,
                                                varargs, varkw,
                                                locals,formatvalue=var_repr))
                except KeyError:
                    # This happens in situations like errors inside generator
                    # expressions, where local variables are listed in the
                    # line, but can't be extracted from the frame.  I'm not
                    # 100% sure this isn't actually a bug in inspect itself,
                    # but since there's no info for us to compute with, the
                    # best we can do is report the failure and move on.  Here
                    # we must *not* call any traceback construction again,
                    # because that would mess up use of %debug later on.  So we
                    # simply report the failure and move on.  The only
                    # limitation will be that this frame won't have locals
                    # listed in the call signature.  Quite subtle problem...
                    # I can't think of a good way to validate this in a unit
                    # test, but running a script consisting of:
                    #  dict( (k,v.strip()) for (k,v) in range(10) )
                    # will illustrate the error, if this exception catch is
                    # disabled.
                    call = tpl_call_fail % func
            
            # Don't attempt to tokenize binary files.
            if file.endswith(('.so', '.pyd', '.dll')):
                frames.append('%s %s\n' % (link,call))
                continue
            elif file.endswith(('.pyc','.pyo')):
                # Look up the corresponding source file.
                file = pyfile.source_from_cache(file)

            def linereader(file=file, lnum=[lnum], getline=ulinecache.getline):
                line = getline(file, lnum[0])
                lnum[0] += 1
                return line

            # Build the list of names on this line of code where the exception
            # occurred.
            try:
                names = []
                name_cont = False
                
                for token_type, token, start, end, line in generate_tokens(linereader):
                    # build composite names
                    if token_type == tokenize.NAME and token not in keyword.kwlist:
                        if name_cont:
                            # Continuation of a dotted name
                            try:
                                names[-1].append(token)
                            except IndexError:
                                names.append([token])
                            name_cont = False
                        else:
                            # Regular new names.  We append everything, the caller
                            # will be responsible for pruning the list later.  It's
                            # very tricky to try to prune as we go, b/c composite
                            # names can fool us.  The pruning at the end is easy
                            # to do (or the caller can print a list with repeated
                            # names if so desired.
                            names.append([token])
                    elif token == '.':
                        name_cont = True
                    elif token_type == tokenize.NEWLINE:
                        break
                        
            except (IndexError, UnicodeDecodeError):
                # signals exit of tokenizer
                pass
            except tokenize.TokenError as msg:
                _m = ("An unexpected error occurred while tokenizing input\n"
                      "The following traceback may be corrupted or invalid\n"
                      "The error message is: %s\n" % msg)
                error(_m)

            # Join composite names (e.g. "dict.fromkeys")
            names = ['.'.join(n) for n in names]
            # prune names list of duplicates, but keep the right order
            unique_names = uniq_stable(names)

            # Start loop over vars
            lvals = []
            if self.include_vars:
                for name_full in unique_names:
                    name_base = name_full.split('.',1)[0]
                    if name_base in frame.f_code.co_varnames:
                        if name_base in locals:
                            try:
                                value = repr(eval(name_full,locals))
                            except:
                                value = undefined
                        else:
                            value = undefined
                        name = tpl_local_var % name_full
                    else:
                        if name_base in frame.f_globals:
                            try:
                                value = repr(eval(name_full,frame.f_globals))
                            except:
                                value = undefined
                        else:
                            value = undefined
                        name = tpl_global_var % name_full
                    lvals.append(tpl_name_val % (name,value))
            if lvals:
                lvals = '%s%s' % (indent,em_normal.join(lvals))
            else:
                lvals = ''

            level = '%s %s\n' % (link,call)

            if index is None:
                frames.append(level)
            else:
                frames.append('%s%s' % (level,''.join(
                    _format_traceback_lines(lnum,index,lines,Colors,lvals,
                                            col_scheme))))

        # Get (safely) a string form of the exception info
        try:
            etype_str,evalue_str = map(str,(etype,evalue))
        except:
            # User exception is improperly defined.
            etype,evalue = str,sys.exc_info()[:2]
            etype_str,evalue_str = map(str,(etype,evalue))
        # ... and format it
        exception = ['%s%s%s: %s' % (Colors.excName, etype_str,
                                     ColorsNormal, py3compat.cast_unicode(evalue_str))]
        if (not py3compat.PY3) and type(evalue) is types.InstanceType:
            try:
                names = [w for w in dir(evalue) if isinstance(w, basestring)]
            except:
                # Every now and then, an object with funny inernals blows up
                # when dir() is called on it.  We do the best we can to report
                # the problem and continue
                _m = '%sException reporting error (object with broken dir())%s:'
                exception.append(_m % (Colors.excName,ColorsNormal))
                etype_str,evalue_str = map(str,sys.exc_info()[:2])
                exception.append('%s%s%s: %s' % (Colors.excName,etype_str,
                                     ColorsNormal, py3compat.cast_unicode(evalue_str)))
                names = []
            for name in names:
                value = text_repr(getattr(evalue, name))
                exception.append('\n%s%s = %s' % (indent, name, value))

        # vds: >>
        if records:
             filepath, lnum = records[-1][1:3]
             #print "file:", str(file), "linenb", str(lnum) # dbg
             filepath = os.path.abspath(filepath)
             ipinst = ipapi.get()
             if ipinst is not None:
                 ipinst.hooks.synchronize_with_editor(filepath, lnum, 0)
        # vds: <<

        # return all our info assembled as a single string
        # return '%s\n\n%s\n%s' % (head,'\n'.join(frames),''.join(exception[0]) )
        return [head] + frames + [''.join(exception[0])]
Esempio n. 13
0
class APITest(NotebookTestBase):
    """Test the kernels web service API"""
    dirs_nbs = [('', 'inroot'),
                ('Directory with spaces in', 'inspace'),
                (u'unicodé', 'innonascii'),
                ('foo', 'a'),
                ('foo', 'b'),
                ('foo', 'name with spaces'),
                ('foo', u'unicodé'),
                ('foo/bar', 'baz'),
                (u'å b', u'ç d')
               ]

    dirs = uniq_stable([d for (d,n) in dirs_nbs])
    del dirs[0]  # remove ''

    def setUp(self):
        nbdir = self.notebook_dir.name

        for d in self.dirs:
            d.replace('/', os.sep)
            if not os.path.isdir(pjoin(nbdir, d)):
                os.mkdir(pjoin(nbdir, d))

        for d, name in self.dirs_nbs:
            d = d.replace('/', os.sep)
            with io.open(pjoin(nbdir, d, '%s.ipynb' % name), 'w',
                         encoding='utf-8') as f:
                nb = new_notebook(name=name)
                write(nb, f, format='ipynb')

        self.nb_api = NBAPI(self.base_url())

    def tearDown(self):
        nbdir = self.notebook_dir.name

        for dname in ['foo', 'Directory with spaces in', u'unicodé', u'å b']:
            shutil.rmtree(pjoin(nbdir, dname), ignore_errors=True)

        if os.path.isfile(pjoin(nbdir, 'inroot.ipynb')):
            os.unlink(pjoin(nbdir, 'inroot.ipynb'))

    def test_list_notebooks(self):
        nbs = self.nb_api.list().json()
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'inroot.ipynb')

        nbs = self.nb_api.list('/Directory with spaces in/').json()
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'inspace.ipynb')

        nbs = self.nb_api.list(u'/unicodé/').json()
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'innonascii.ipynb')
        self.assertEqual(nbs[0]['path'], u'unicodé')

        nbs = self.nb_api.list('/foo/bar/').json()
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'baz.ipynb')
        self.assertEqual(nbs[0]['path'], 'foo/bar')

        nbs = self.nb_api.list('foo').json()
        self.assertEqual(len(nbs), 4)
        nbnames = { normalize('NFC', n['name']) for n in nbs }
        expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodé.ipynb']
        expected = { normalize('NFC', name) for name in expected }
        self.assertEqual(nbnames, expected)

    def test_list_nonexistant_dir(self):
        with assert_http_error(404):
            self.nb_api.list('nonexistant')

    def test_get_contents(self):
        for d, name in self.dirs_nbs:
            nb = self.nb_api.read('%s.ipynb' % name, d+'/').json()
            self.assertEqual(nb['name'], u'%s.ipynb' % name)
            self.assertIn('content', nb)
            self.assertIn('metadata', nb['content'])
            self.assertIsInstance(nb['content']['metadata'], dict)

        # Name that doesn't exist - should be a 404
        with assert_http_error(404):
            self.nb_api.read('q.ipynb', 'foo')

    def _check_nb_created(self, resp, name, path):
        self.assertEqual(resp.status_code, 201)
        location_header = py3compat.str_to_unicode(resp.headers['Location'])
        self.assertEqual(location_header, url_escape(url_path_join(u'/api/notebooks', path, name)))
        self.assertEqual(resp.json()['name'], name)
        assert os.path.isfile(pjoin(
            self.notebook_dir.name,
            path.replace('/', os.sep),
            name,
        ))

    def test_create_untitled(self):
        resp = self.nb_api.create_untitled(path=u'å b')
        self._check_nb_created(resp, 'Untitled0.ipynb', u'å b')

        # Second time
        resp = self.nb_api.create_untitled(path=u'å b')
        self._check_nb_created(resp, 'Untitled1.ipynb', u'å b')

        # And two directories down
        resp = self.nb_api.create_untitled(path='foo/bar')
        self._check_nb_created(resp, 'Untitled0.ipynb', 'foo/bar')

    def test_upload_untitled(self):
        nb = new_notebook(name='Upload test')
        nbmodel = {'content': nb}
        resp = self.nb_api.upload_untitled(path=u'å b',
                                              body=json.dumps(nbmodel))
        self._check_nb_created(resp, 'Untitled0.ipynb', u'å b')

    def test_upload(self):
        nb = new_notebook(name=u'ignored')
        nbmodel = {'content': nb}
        resp = self.nb_api.upload(u'Upload tést.ipynb', path=u'å b',
                                              body=json.dumps(nbmodel))
        self._check_nb_created(resp, u'Upload tést.ipynb', u'å b')

    def test_upload_v2(self):
        nb = v2.new_notebook()
        ws = v2.new_worksheet()
        nb.worksheets.append(ws)
        ws.cells.append(v2.new_code_cell(input='print("hi")'))
        nbmodel = {'content': nb}
        resp = self.nb_api.upload(u'Upload tést.ipynb', path=u'å b',
                                              body=json.dumps(nbmodel))
        self._check_nb_created(resp, u'Upload tést.ipynb', u'å b')
        resp = self.nb_api.read(u'Upload tést.ipynb', u'å b')
        data = resp.json()
        self.assertEqual(data['content']['nbformat'], current.nbformat)
        self.assertEqual(data['content']['orig_nbformat'], 2)

    def test_copy_untitled(self):
        resp = self.nb_api.copy_untitled(u'ç d.ipynb', path=u'å b')
        self._check_nb_created(resp, u'ç d-Copy0.ipynb', u'å b')

    def test_copy(self):
        resp = self.nb_api.copy(u'ç d.ipynb', u'cøpy.ipynb', path=u'å b')
        self._check_nb_created(resp, u'cøpy.ipynb', u'å b')

    def test_delete(self):
        for d, name in self.dirs_nbs:
            resp = self.nb_api.delete('%s.ipynb' % name, d)
            self.assertEqual(resp.status_code, 204)

        for d in self.dirs + ['/']:
            nbs = self.nb_api.list(d).json()
            self.assertEqual(len(nbs), 0)

    def test_rename(self):
        resp = self.nb_api.rename('a.ipynb', 'foo', 'z.ipynb')
        self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb')
        self.assertEqual(resp.json()['name'], 'z.ipynb')
        assert os.path.isfile(pjoin(self.notebook_dir.name, 'foo', 'z.ipynb'))

        nbs = self.nb_api.list('foo').json()
        nbnames = set(n['name'] for n in nbs)
        self.assertIn('z.ipynb', nbnames)
        self.assertNotIn('a.ipynb', nbnames)

    def test_rename_existing(self):
        with assert_http_error(409):
            self.nb_api.rename('a.ipynb', 'foo', 'b.ipynb')

    def test_save(self):
        resp = self.nb_api.read('a.ipynb', 'foo')
        nbcontent = json.loads(resp.text)['content']
        nb = to_notebook_json(nbcontent)
        ws = new_worksheet()
        nb.worksheets = [ws]
        ws.cells.append(new_heading_cell(u'Created by test ³'))

        nbmodel= {'name': 'a.ipynb', 'path':'foo', 'content': nb}
        resp = self.nb_api.save('a.ipynb', path='foo', body=json.dumps(nbmodel))

        nbfile = pjoin(self.notebook_dir.name, 'foo', 'a.ipynb')
        with io.open(nbfile, 'r', encoding='utf-8') as f:
            newnb = read(f, format='ipynb')
        self.assertEqual(newnb.worksheets[0].cells[0].source,
                         u'Created by test ³')
        nbcontent = self.nb_api.read('a.ipynb', 'foo').json()['content']
        newnb = to_notebook_json(nbcontent)
        self.assertEqual(newnb.worksheets[0].cells[0].source,
                         u'Created by test ³')

        # Save and rename
        nbmodel= {'name': 'a2.ipynb', 'path':'foo/bar', 'content': nb}
        resp = self.nb_api.save('a.ipynb', path='foo', body=json.dumps(nbmodel))
        saved = resp.json()
        self.assertEqual(saved['name'], 'a2.ipynb')
        self.assertEqual(saved['path'], 'foo/bar')
        assert os.path.isfile(pjoin(self.notebook_dir.name,'foo','bar','a2.ipynb'))
        assert not os.path.isfile(pjoin(self.notebook_dir.name, 'foo', 'a.ipynb'))
        with assert_http_error(404):
            self.nb_api.read('a.ipynb', 'foo')

    def test_checkpoints(self):
        resp = self.nb_api.read('a.ipynb', 'foo')
        r = self.nb_api.new_checkpoint('a.ipynb', 'foo')
        self.assertEqual(r.status_code, 201)
        cp1 = r.json()
        self.assertEqual(set(cp1), {'id', 'last_modified'})
        self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])

        # Modify it
        nbcontent = json.loads(resp.text)['content']
        nb = to_notebook_json(nbcontent)
        ws = new_worksheet()
        nb.worksheets = [ws]
        hcell = new_heading_cell('Created by test')
        ws.cells.append(hcell)
        # Save
        nbmodel= {'name': 'a.ipynb', 'path':'foo', 'content': nb}
        resp = self.nb_api.save('a.ipynb', path='foo', body=json.dumps(nbmodel))

        # List checkpoints
        cps = self.nb_api.get_checkpoints('a.ipynb', 'foo').json()
        self.assertEqual(cps, [cp1])

        nbcontent = self.nb_api.read('a.ipynb', 'foo').json()['content']
        nb = to_notebook_json(nbcontent)
        self.assertEqual(nb.worksheets[0].cells[0].source, 'Created by test')

        # Restore cp1
        r = self.nb_api.restore_checkpoint('a.ipynb', 'foo', cp1['id'])
        self.assertEqual(r.status_code, 204)
        nbcontent = self.nb_api.read('a.ipynb', 'foo').json()['content']
        nb = to_notebook_json(nbcontent)
        self.assertEqual(nb.worksheets, [])

        # Delete cp1
        r = self.nb_api.delete_checkpoint('a.ipynb', 'foo', cp1['id'])
        self.assertEqual(r.status_code, 204)
        cps = self.nb_api.get_checkpoints('a.ipynb', 'foo').json()
        self.assertEqual(cps, [])
Esempio n. 14
0
class APITest(NotebookTestBase):
    """Test the kernels web service API"""
    dirs_nbs = [('', 'inroot'),
                ('Directory with spaces in', 'inspace'),
                (u'unicodé', 'innonascii'),
                ('foo', 'a'),
                ('foo', 'b'),
                ('foo', 'name with spaces'),
                ('foo', u'unicodé'),
                ('foo/bar', 'baz'),
                ('ordering', 'A'),
                ('ordering', 'b'),
                ('ordering', 'C'),
                (u'å b', u'ç d'),
               ]
    hidden_dirs = ['.hidden', '__pycache__']

    # Don't include root dir.
    dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]])
    top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs}

    @staticmethod
    def _blob_for_name(name):
        return name.encode('utf-8') + b'\xFF'

    @staticmethod
    def _txt_for_name(name):
        return u'%s text file' % name
    
    def to_os_path(self, api_path):
        return to_os_path(api_path, root=self.notebook_dir.name)
    
    def make_dir(self, api_path):
        """Create a directory at api_path"""
        os_path = self.to_os_path(api_path)
        try:
            os.makedirs(os_path)
        except OSError:
            print("Directory already exists: %r" % os_path)

    def make_txt(self, api_path, txt):
        """Make a text file at a given api_path"""
        os_path = self.to_os_path(api_path)
        with io.open(os_path, 'w', encoding='utf-8') as f:
            f.write(txt)
    
    def make_blob(self, api_path, blob):
        """Make a binary file at a given api_path"""
        os_path = self.to_os_path(api_path)
        with io.open(os_path, 'wb') as f:
            f.write(blob)
    
    def make_nb(self, api_path, nb):
        """Make a notebook file at a given api_path"""
        os_path = self.to_os_path(api_path)
        
        with io.open(os_path, 'w', encoding='utf-8') as f:
            write(nb, f, version=4)

    def delete_dir(self, api_path):
        """Delete a directory at api_path, removing any contents."""
        os_path = self.to_os_path(api_path)
        shutil.rmtree(os_path, ignore_errors=True)

    def delete_file(self, api_path):
        """Delete a file at the given path if it exists."""
        if self.isfile(api_path):
            os.unlink(self.to_os_path(api_path))
    
    def isfile(self, api_path):
        return os.path.isfile(self.to_os_path(api_path))
    
    def isdir(self, api_path):
        return os.path.isdir(self.to_os_path(api_path))
    
    def setUp(self):

        for d in (self.dirs + self.hidden_dirs):
            self.make_dir(d)

        for d, name in self.dirs_nbs:
            # create a notebook
            nb = new_notebook()
            self.make_nb(u'{}/{}.ipynb'.format(d, name), nb)
            
            # create a text file
            txt = self._txt_for_name(name)
            self.make_txt(u'{}/{}.txt'.format(d, name), txt)
            
            # create a binary file
            blob = self._blob_for_name(name)
            self.make_blob(u'{}/{}.blob'.format(d, name), blob)

        self.api = API(self.base_url())

    def tearDown(self):
        for dname in (list(self.top_level_dirs) + self.hidden_dirs):
            self.delete_dir(dname)
        self.delete_file('inroot.ipynb')

    def test_list_notebooks(self):
        nbs = notebooks_only(self.api.list().json())
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'inroot.ipynb')

        nbs = notebooks_only(self.api.list('/Directory with spaces in/').json())
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'inspace.ipynb')

        nbs = notebooks_only(self.api.list(u'/unicodé/').json())
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'innonascii.ipynb')
        self.assertEqual(nbs[0]['path'], u'unicodé/innonascii.ipynb')

        nbs = notebooks_only(self.api.list('/foo/bar/').json())
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'baz.ipynb')
        self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb')

        nbs = notebooks_only(self.api.list('foo').json())
        self.assertEqual(len(nbs), 4)
        nbnames = { normalize('NFC', n['name']) for n in nbs }
        expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodé.ipynb']
        expected = { normalize('NFC', name) for name in expected }
        self.assertEqual(nbnames, expected)

        nbs = notebooks_only(self.api.list('ordering').json())
        nbnames = [n['name'] for n in nbs]
        expected = ['A.ipynb', 'b.ipynb', 'C.ipynb']
        self.assertEqual(nbnames, expected)

    def test_list_dirs(self):
        dirs = dirs_only(self.api.list().json())
        dir_names = {normalize('NFC', d['name']) for d in dirs}
        self.assertEqual(dir_names, self.top_level_dirs)  # Excluding hidden dirs

    def test_list_nonexistant_dir(self):
        with assert_http_error(404):
            self.api.list('nonexistant')

    def test_get_nb_contents(self):
        for d, name in self.dirs_nbs:
            path = url_path_join(d, name + '.ipynb')
            nb = self.api.read(path).json()
            self.assertEqual(nb['name'], u'%s.ipynb' % name)
            self.assertEqual(nb['path'], path)
            self.assertEqual(nb['type'], 'notebook')
            self.assertIn('content', nb)
            self.assertEqual(nb['format'], 'json')
            self.assertIn('content', nb)
            self.assertIn('metadata', nb['content'])
            self.assertIsInstance(nb['content']['metadata'], dict)

    def test_get_contents_no_such_file(self):
        # Name that doesn't exist - should be a 404
        with assert_http_error(404):
            self.api.read('foo/q.ipynb')

    def test_get_text_file_contents(self):
        for d, name in self.dirs_nbs:
            path = url_path_join(d, name + '.txt')
            model = self.api.read(path).json()
            self.assertEqual(model['name'], u'%s.txt' % name)
            self.assertEqual(model['path'], path)
            self.assertIn('content', model)
            self.assertEqual(model['format'], 'text')
            self.assertEqual(model['type'], 'file')
            self.assertEqual(model['content'], self._txt_for_name(name))

        # Name that doesn't exist - should be a 404
        with assert_http_error(404):
            self.api.read('foo/q.txt')

        # Specifying format=text should fail on a non-UTF-8 file
        with assert_http_error(400):
            self.api.read('foo/bar/baz.blob', type='file', format='text')

    def test_get_binary_file_contents(self):
        for d, name in self.dirs_nbs:
            path = url_path_join(d, name + '.blob')
            model = self.api.read(path).json()
            self.assertEqual(model['name'], u'%s.blob' % name)
            self.assertEqual(model['path'], path)
            self.assertIn('content', model)
            self.assertEqual(model['format'], 'base64')
            self.assertEqual(model['type'], 'file')
            self.assertEqual(
                base64.decodestring(model['content'].encode('ascii')),
                self._blob_for_name(name),
            )

        # Name that doesn't exist - should be a 404
        with assert_http_error(404):
            self.api.read('foo/q.txt')

    def test_get_bad_type(self):
        with assert_http_error(400):
            self.api.read(u'unicodé', type='file')  # this is a directory

        with assert_http_error(400):
            self.api.read(u'unicodé/innonascii.ipynb', type='directory')

    def _check_created(self, resp, path, type='notebook'):
        self.assertEqual(resp.status_code, 201)
        location_header = py3compat.str_to_unicode(resp.headers['Location'])
        self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path)))
        rjson = resp.json()
        self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1])
        self.assertEqual(rjson['path'], path)
        self.assertEqual(rjson['type'], type)
        isright = self.isdir if type == 'directory' else self.isfile
        assert isright(path)

    def test_create_untitled(self):
        resp = self.api.create_untitled(path=u'å b')
        self._check_created(resp, u'å b/Untitled.ipynb')

        # Second time
        resp = self.api.create_untitled(path=u'å b')
        self._check_created(resp, u'å b/Untitled1.ipynb')

        # And two directories down
        resp = self.api.create_untitled(path='foo/bar')
        self._check_created(resp, 'foo/bar/Untitled.ipynb')

    def test_create_untitled_txt(self):
        resp = self.api.create_untitled(path='foo/bar', ext='.txt')
        self._check_created(resp, 'foo/bar/untitled.txt', type='file')

        resp = self.api.read(path='foo/bar/untitled.txt')
        model = resp.json()
        self.assertEqual(model['type'], 'file')
        self.assertEqual(model['format'], 'text')
        self.assertEqual(model['content'], '')

    def test_upload(self):
        nb = new_notebook()
        nbmodel = {'content': nb, 'type': 'notebook'}
        path = u'å b/Upload tést.ipynb'
        resp = self.api.upload(path, body=json.dumps(nbmodel))
        self._check_created(resp, path)

    def test_mkdir_untitled(self):
        resp = self.api.mkdir_untitled(path=u'å b')
        self._check_created(resp, u'å b/Untitled Folder', type='directory')

        # Second time
        resp = self.api.mkdir_untitled(path=u'å b')
        self._check_created(resp, u'å b/Untitled Folder 1', type='directory')

        # And two directories down
        resp = self.api.mkdir_untitled(path='foo/bar')
        self._check_created(resp, 'foo/bar/Untitled Folder', type='directory')

    def test_mkdir(self):
        path = u'å b/New ∂ir'
        resp = self.api.mkdir(path)
        self._check_created(resp, path, type='directory')

    def test_mkdir_hidden_400(self):
        with assert_http_error(400):
            resp = self.api.mkdir(u'å b/.hidden')

    def test_upload_txt(self):
        body = u'ünicode téxt'
        model = {
            'content' : body,
            'format'  : 'text',
            'type'    : 'file',
        }
        path = u'å b/Upload tést.txt'
        resp = self.api.upload(path, body=json.dumps(model))

        # check roundtrip
        resp = self.api.read(path)
        model = resp.json()
        self.assertEqual(model['type'], 'file')
        self.assertEqual(model['format'], 'text')
        self.assertEqual(model['content'], body)

    def test_upload_b64(self):
        body = b'\xFFblob'
        b64body = base64.encodestring(body).decode('ascii')
        model = {
            'content' : b64body,
            'format'  : 'base64',
            'type'    : 'file',
        }
        path = u'å b/Upload tést.blob'
        resp = self.api.upload(path, body=json.dumps(model))

        # check roundtrip
        resp = self.api.read(path)
        model = resp.json()
        self.assertEqual(model['type'], 'file')
        self.assertEqual(model['path'], path)
        self.assertEqual(model['format'], 'base64')
        decoded = base64.decodestring(model['content'].encode('ascii'))
        self.assertEqual(decoded, body)

    def test_upload_v2(self):
        nb = v2.new_notebook()
        ws = v2.new_worksheet()
        nb.worksheets.append(ws)
        ws.cells.append(v2.new_code_cell(input='print("hi")'))
        nbmodel = {'content': nb, 'type': 'notebook'}
        path = u'å b/Upload tést.ipynb'
        resp = self.api.upload(path, body=json.dumps(nbmodel))
        self._check_created(resp, path)
        resp = self.api.read(path)
        data = resp.json()
        self.assertEqual(data['content']['nbformat'], 4)

    def test_copy(self):
        resp = self.api.copy(u'å b/ç d.ipynb', u'å b')
        self._check_created(resp, u'å b/ç d-Copy1.ipynb')
        
        resp = self.api.copy(u'å b/ç d.ipynb', u'å b')
        self._check_created(resp, u'å b/ç d-Copy2.ipynb')
    
    def test_copy_copy(self):
        resp = self.api.copy(u'å b/ç d.ipynb', u'å b')
        self._check_created(resp, u'å b/ç d-Copy1.ipynb')
        
        resp = self.api.copy(u'å b/ç d-Copy1.ipynb', u'å b')
        self._check_created(resp, u'å b/ç d-Copy2.ipynb')
    
    def test_copy_path(self):
        resp = self.api.copy(u'foo/a.ipynb', u'å b')
        self._check_created(resp, u'å b/a.ipynb')
        
        resp = self.api.copy(u'foo/a.ipynb', u'å b')
        self._check_created(resp, u'å b/a-Copy1.ipynb')

    def test_copy_put_400(self):
        with assert_http_error(400):
            resp = self.api.copy_put(u'å b/ç d.ipynb', u'å b/cøpy.ipynb')

    def test_copy_dir_400(self):
        # can't copy directories
        with assert_http_error(400):
            resp = self.api.copy(u'å b', u'foo')

    def test_delete(self):
        for d, name in self.dirs_nbs:
            print('%r, %r' % (d, name))
            resp = self.api.delete(url_path_join(d, name + '.ipynb'))
            self.assertEqual(resp.status_code, 204)

        for d in self.dirs + ['/']:
            nbs = notebooks_only(self.api.list(d).json())
            print('------')
            print(d)
            print(nbs)
            self.assertEqual(nbs, [])

    def test_delete_dirs(self):
        # depth-first delete everything, so we don't try to delete empty directories
        for name in sorted(self.dirs + ['/'], key=len, reverse=True):
            listing = self.api.list(name).json()['content']
            for model in listing:
                self.api.delete(model['path'])
        listing = self.api.list('/').json()['content']
        self.assertEqual(listing, [])

    def test_delete_non_empty_dir(self):
        """delete non-empty dir raises 400"""
        with assert_http_error(400):
            self.api.delete(u'å b')

    def test_rename(self):
        resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb')
        self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb')
        self.assertEqual(resp.json()['name'], 'z.ipynb')
        self.assertEqual(resp.json()['path'], 'foo/z.ipynb')
        assert self.isfile('foo/z.ipynb')

        nbs = notebooks_only(self.api.list('foo').json())
        nbnames = set(n['name'] for n in nbs)
        self.assertIn('z.ipynb', nbnames)
        self.assertNotIn('a.ipynb', nbnames)

    def test_rename_existing(self):
        with assert_http_error(409):
            self.api.rename('foo/a.ipynb', 'foo/b.ipynb')

    def test_save(self):
        resp = self.api.read('foo/a.ipynb')
        nbcontent = json.loads(resp.text)['content']
        nb = from_dict(nbcontent)
        nb.cells.append(new_markdown_cell(u'Created by test ³'))

        nbmodel= {'content': nb, 'type': 'notebook'}
        resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))

        nbcontent = self.api.read('foo/a.ipynb').json()['content']
        newnb = from_dict(nbcontent)
        self.assertEqual(newnb.cells[0].source,
                         u'Created by test ³')


    def test_checkpoints(self):
        resp = self.api.read('foo/a.ipynb')
        r = self.api.new_checkpoint('foo/a.ipynb')
        self.assertEqual(r.status_code, 201)
        cp1 = r.json()
        self.assertEqual(set(cp1), {'id', 'last_modified'})
        self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])

        # Modify it
        nbcontent = json.loads(resp.text)['content']
        nb = from_dict(nbcontent)
        hcell = new_markdown_cell('Created by test')
        nb.cells.append(hcell)
        # Save
        nbmodel= {'content': nb, 'type': 'notebook'}
        resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))

        # List checkpoints
        cps = self.api.get_checkpoints('foo/a.ipynb').json()
        self.assertEqual(cps, [cp1])

        nbcontent = self.api.read('foo/a.ipynb').json()['content']
        nb = from_dict(nbcontent)
        self.assertEqual(nb.cells[0].source, 'Created by test')

        # Restore cp1
        r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id'])
        self.assertEqual(r.status_code, 204)
        nbcontent = self.api.read('foo/a.ipynb').json()['content']
        nb = from_dict(nbcontent)
        self.assertEqual(nb.cells, [])

        # Delete cp1
        r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id'])
        self.assertEqual(r.status_code, 204)
        cps = self.api.get_checkpoints('foo/a.ipynb').json()
        self.assertEqual(cps, [])
Esempio n. 15
0
class APITest(NotebookTestBase):
    """Test the kernels web service API"""
    dirs_nbs = [
        ('', 'inroot'),
        ('Directory with spaces in', 'inspace'),
        (u'unicodé', 'innonascii'),
        ('foo', 'a'),
        ('foo', 'b'),
        ('foo', 'name with spaces'),
        ('foo', u'unicodé'),
        ('foo/bar', 'baz'),
        ('ordering', 'A'),
        ('ordering', 'b'),
        ('ordering', 'C'),
        (u'å b', u'ç d'),
    ]
    hidden_dirs = ['.hidden', '__pycache__']

    dirs = uniq_stable([py3compat.cast_unicode(d) for (d, n) in dirs_nbs])
    del dirs[0]  # remove ''
    top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs}

    @staticmethod
    def _blob_for_name(name):
        return name.encode('utf-8') + b'\xFF'

    @staticmethod
    def _txt_for_name(name):
        return u'%s text file' % name

    def setUp(self):
        nbdir = self.notebook_dir.name
        self.blob = os.urandom(100)
        self.b64_blob = base64.encodestring(self.blob).decode('ascii')

        for d in (self.dirs + self.hidden_dirs):
            d.replace('/', os.sep)
            if not os.path.isdir(pjoin(nbdir, d)):
                os.mkdir(pjoin(nbdir, d))

        for d, name in self.dirs_nbs:
            d = d.replace('/', os.sep)
            # create a notebook
            with io.open(pjoin(nbdir, d, '%s.ipynb' % name),
                         'w',
                         encoding='utf-8') as f:
                nb = new_notebook(name=name)
                write(nb, f, format='ipynb')

            # create a text file
            with io.open(pjoin(nbdir, d, '%s.txt' % name),
                         'w',
                         encoding='utf-8') as f:
                f.write(self._txt_for_name(name))

            # create a binary file
            with io.open(pjoin(nbdir, d, '%s.blob' % name), 'wb') as f:
                f.write(self._blob_for_name(name))

        self.api = API(self.base_url())

    def tearDown(self):
        nbdir = self.notebook_dir.name

        for dname in (list(self.top_level_dirs) + self.hidden_dirs):
            shutil.rmtree(pjoin(nbdir, dname), ignore_errors=True)

        if os.path.isfile(pjoin(nbdir, 'inroot.ipynb')):
            os.unlink(pjoin(nbdir, 'inroot.ipynb'))

    def test_list_notebooks(self):
        nbs = notebooks_only(self.api.list().json())
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'inroot.ipynb')

        nbs = notebooks_only(
            self.api.list('/Directory with spaces in/').json())
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'inspace.ipynb')

        nbs = notebooks_only(self.api.list(u'/unicodé/').json())
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'innonascii.ipynb')
        self.assertEqual(nbs[0]['path'], u'unicodé')

        nbs = notebooks_only(self.api.list('/foo/bar/').json())
        self.assertEqual(len(nbs), 1)
        self.assertEqual(nbs[0]['name'], 'baz.ipynb')
        self.assertEqual(nbs[0]['path'], 'foo/bar')

        nbs = notebooks_only(self.api.list('foo').json())
        self.assertEqual(len(nbs), 4)
        nbnames = {normalize('NFC', n['name']) for n in nbs}
        expected = [
            u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodé.ipynb'
        ]
        expected = {normalize('NFC', name) for name in expected}
        self.assertEqual(nbnames, expected)

        nbs = notebooks_only(self.api.list('ordering').json())
        nbnames = [n['name'] for n in nbs]
        expected = ['A.ipynb', 'b.ipynb', 'C.ipynb']
        self.assertEqual(nbnames, expected)

    def test_list_dirs(self):
        dirs = dirs_only(self.api.list().json())
        dir_names = {normalize('NFC', d['name']) for d in dirs}
        self.assertEqual(dir_names,
                         self.top_level_dirs)  # Excluding hidden dirs

    def test_list_nonexistant_dir(self):
        with assert_http_error(404):
            self.api.list('nonexistant')

    def test_get_nb_contents(self):
        for d, name in self.dirs_nbs:
            nb = self.api.read('%s.ipynb' % name, d + '/').json()
            self.assertEqual(nb['name'], u'%s.ipynb' % name)
            self.assertEqual(nb['type'], 'notebook')
            self.assertIn('content', nb)
            self.assertEqual(nb['format'], 'json')
            self.assertIn('content', nb)
            self.assertIn('metadata', nb['content'])
            self.assertIsInstance(nb['content']['metadata'], dict)

    def test_get_contents_no_such_file(self):
        # Name that doesn't exist - should be a 404
        with assert_http_error(404):
            self.api.read('q.ipynb', 'foo')

    def test_get_text_file_contents(self):
        for d, name in self.dirs_nbs:
            model = self.api.read(u'%s.txt' % name, d + '/').json()
            self.assertEqual(model['name'], u'%s.txt' % name)
            self.assertIn('content', model)
            self.assertEqual(model['format'], 'text')
            self.assertEqual(model['type'], 'file')
            self.assertEqual(model['content'], self._txt_for_name(name))

        # Name that doesn't exist - should be a 404
        with assert_http_error(404):
            self.api.read('q.txt', 'foo')

    def test_get_binary_file_contents(self):
        for d, name in self.dirs_nbs:
            model = self.api.read(u'%s.blob' % name, d + '/').json()
            self.assertEqual(model['name'], u'%s.blob' % name)
            self.assertIn('content', model)
            self.assertEqual(model['format'], 'base64')
            self.assertEqual(model['type'], 'file')
            b64_data = base64.encodestring(
                self._blob_for_name(name)).decode('ascii')
            self.assertEqual(model['content'], b64_data)

        # Name that doesn't exist - should be a 404
        with assert_http_error(404):
            self.api.read('q.txt', 'foo')

    def _check_created(self, resp, name, path, type='notebook'):
        self.assertEqual(resp.status_code, 201)
        location_header = py3compat.str_to_unicode(resp.headers['Location'])
        self.assertEqual(
            location_header,
            url_escape(url_path_join(u'/api/contents', path, name)))
        rjson = resp.json()
        self.assertEqual(rjson['name'], name)
        self.assertEqual(rjson['path'], path)
        self.assertEqual(rjson['type'], type)
        isright = os.path.isdir if type == 'directory' else os.path.isfile
        assert isright(
            pjoin(
                self.notebook_dir.name,
                path.replace('/', os.sep),
                name,
            ))

    def test_create_untitled(self):
        resp = self.api.create_untitled(path=u'å b')
        self._check_created(resp, 'Untitled0.ipynb', u'å b')

        # Second time
        resp = self.api.create_untitled(path=u'å b')
        self._check_created(resp, 'Untitled1.ipynb', u'å b')

        # And two directories down
        resp = self.api.create_untitled(path='foo/bar')
        self._check_created(resp, 'Untitled0.ipynb', 'foo/bar')

    def test_create_untitled_txt(self):
        resp = self.api.create_untitled(path='foo/bar', ext='.txt')
        self._check_created(resp, 'untitled0.txt', 'foo/bar', type='file')

        resp = self.api.read(path='foo/bar', name='untitled0.txt')
        model = resp.json()
        self.assertEqual(model['type'], 'file')
        self.assertEqual(model['format'], 'text')
        self.assertEqual(model['content'], '')

    def test_upload_untitled(self):
        nb = new_notebook(name='Upload test')
        nbmodel = {'content': nb, 'type': 'notebook'}
        resp = self.api.upload_untitled(path=u'å b', body=json.dumps(nbmodel))
        self._check_created(resp, 'Untitled0.ipynb', u'å b')

    def test_upload(self):
        nb = new_notebook(name=u'ignored')
        nbmodel = {'content': nb, 'type': 'notebook'}
        resp = self.api.upload(u'Upload tést.ipynb',
                               path=u'å b',
                               body=json.dumps(nbmodel))
        self._check_created(resp, u'Upload tést.ipynb', u'å b')

    def test_mkdir(self):
        resp = self.api.mkdir(u'New ∂ir', path=u'å b')
        self._check_created(resp, u'New ∂ir', u'å b', type='directory')

    def test_mkdir_hidden_400(self):
        with assert_http_error(400):
            resp = self.api.mkdir(u'.hidden', path=u'å b')

    def test_upload_txt(self):
        body = u'ünicode téxt'
        model = {
            'content': body,
            'format': 'text',
            'type': 'file',
        }
        resp = self.api.upload(u'Upload tést.txt',
                               path=u'å b',
                               body=json.dumps(model))

        # check roundtrip
        resp = self.api.read(path=u'å b', name=u'Upload tést.txt')
        model = resp.json()
        self.assertEqual(model['type'], 'file')
        self.assertEqual(model['format'], 'text')
        self.assertEqual(model['content'], body)

    def test_upload_b64(self):
        body = b'\xFFblob'
        b64body = base64.encodestring(body).decode('ascii')
        model = {
            'content': b64body,
            'format': 'base64',
            'type': 'file',
        }
        resp = self.api.upload(u'Upload tést.blob',
                               path=u'å b',
                               body=json.dumps(model))

        # check roundtrip
        resp = self.api.read(path=u'å b', name=u'Upload tést.blob')
        model = resp.json()
        self.assertEqual(model['type'], 'file')
        self.assertEqual(model['format'], 'base64')
        decoded = base64.decodestring(model['content'].encode('ascii'))
        self.assertEqual(decoded, body)

    def test_upload_v2(self):
        nb = v2.new_notebook()
        ws = v2.new_worksheet()
        nb.worksheets.append(ws)
        ws.cells.append(v2.new_code_cell(input='print("hi")'))
        nbmodel = {'content': nb, 'type': 'notebook'}
        resp = self.api.upload(u'Upload tést.ipynb',
                               path=u'å b',
                               body=json.dumps(nbmodel))
        self._check_created(resp, u'Upload tést.ipynb', u'å b')
        resp = self.api.read(u'Upload tést.ipynb', u'å b')
        data = resp.json()
        self.assertEqual(data['content']['nbformat'], current.nbformat)
        self.assertEqual(data['content']['orig_nbformat'], 2)

    def test_copy_untitled(self):
        resp = self.api.copy_untitled(u'ç d.ipynb', path=u'å b')
        self._check_created(resp, u'ç d-Copy0.ipynb', u'å b')

    def test_copy(self):
        resp = self.api.copy(u'ç d.ipynb', u'cøpy.ipynb', path=u'å b')
        self._check_created(resp, u'cøpy.ipynb', u'å b')

    def test_copy_path(self):
        resp = self.api.copy(u'foo/a.ipynb', u'cøpyfoo.ipynb', path=u'å b')
        self._check_created(resp, u'cøpyfoo.ipynb', u'å b')

    def test_copy_dir_400(self):
        # can't copy directories
        with assert_http_error(400):
            resp = self.api.copy(u'å b', u'å c')

    def test_delete(self):
        for d, name in self.dirs_nbs:
            resp = self.api.delete('%s.ipynb' % name, d)
            self.assertEqual(resp.status_code, 204)

        for d in self.dirs + ['/']:
            nbs = notebooks_only(self.api.list(d).json())
            self.assertEqual(len(nbs), 0)

    def test_delete_dirs(self):
        # depth-first delete everything, so we don't try to delete empty directories
        for name in sorted(self.dirs + ['/'], key=len, reverse=True):
            listing = self.api.list(name).json()['content']
            for model in listing:
                self.api.delete(model['name'], model['path'])
        listing = self.api.list('/').json()['content']
        self.assertEqual(listing, [])

    def test_delete_non_empty_dir(self):
        """delete non-empty dir raises 400"""
        with assert_http_error(400):
            self.api.delete(u'å b')

    def test_rename(self):
        resp = self.api.rename('a.ipynb', 'foo', 'z.ipynb')
        self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb')
        self.assertEqual(resp.json()['name'], 'z.ipynb')
        assert os.path.isfile(pjoin(self.notebook_dir.name, 'foo', 'z.ipynb'))

        nbs = notebooks_only(self.api.list('foo').json())
        nbnames = set(n['name'] for n in nbs)
        self.assertIn('z.ipynb', nbnames)
        self.assertNotIn('a.ipynb', nbnames)

    def test_rename_existing(self):
        with assert_http_error(409):
            self.api.rename('a.ipynb', 'foo', 'b.ipynb')

    def test_save(self):
        resp = self.api.read('a.ipynb', 'foo')
        nbcontent = json.loads(resp.text)['content']
        nb = to_notebook_json(nbcontent)
        ws = new_worksheet()
        nb.worksheets = [ws]
        ws.cells.append(new_heading_cell(u'Created by test ³'))

        nbmodel = {
            'name': 'a.ipynb',
            'path': 'foo',
            'content': nb,
            'type': 'notebook'
        }
        resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel))

        nbfile = pjoin(self.notebook_dir.name, 'foo', 'a.ipynb')
        with io.open(nbfile, 'r', encoding='utf-8') as f:
            newnb = read(f, format='ipynb')
        self.assertEqual(newnb.worksheets[0].cells[0].source,
                         u'Created by test ³')
        nbcontent = self.api.read('a.ipynb', 'foo').json()['content']
        newnb = to_notebook_json(nbcontent)
        self.assertEqual(newnb.worksheets[0].cells[0].source,
                         u'Created by test ³')

        # Save and rename
        nbmodel = {
            'name': 'a2.ipynb',
            'path': 'foo/bar',
            'content': nb,
            'type': 'notebook'
        }
        resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel))
        saved = resp.json()
        self.assertEqual(saved['name'], 'a2.ipynb')
        self.assertEqual(saved['path'], 'foo/bar')
        assert os.path.isfile(
            pjoin(self.notebook_dir.name, 'foo', 'bar', 'a2.ipynb'))
        assert not os.path.isfile(
            pjoin(self.notebook_dir.name, 'foo', 'a.ipynb'))
        with assert_http_error(404):
            self.api.read('a.ipynb', 'foo')

    def test_checkpoints(self):
        resp = self.api.read('a.ipynb', 'foo')
        r = self.api.new_checkpoint('a.ipynb', 'foo')
        self.assertEqual(r.status_code, 201)
        cp1 = r.json()
        self.assertEqual(set(cp1), {'id', 'last_modified'})
        self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])

        # Modify it
        nbcontent = json.loads(resp.text)['content']
        nb = to_notebook_json(nbcontent)
        ws = new_worksheet()
        nb.worksheets = [ws]
        hcell = new_heading_cell('Created by test')
        ws.cells.append(hcell)
        # Save
        nbmodel = {
            'name': 'a.ipynb',
            'path': 'foo',
            'content': nb,
            'type': 'notebook'
        }
        resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel))

        # List checkpoints
        cps = self.api.get_checkpoints('a.ipynb', 'foo').json()
        self.assertEqual(cps, [cp1])

        nbcontent = self.api.read('a.ipynb', 'foo').json()['content']
        nb = to_notebook_json(nbcontent)
        self.assertEqual(nb.worksheets[0].cells[0].source, 'Created by test')

        # Restore cp1
        r = self.api.restore_checkpoint('a.ipynb', 'foo', cp1['id'])
        self.assertEqual(r.status_code, 204)
        nbcontent = self.api.read('a.ipynb', 'foo').json()['content']
        nb = to_notebook_json(nbcontent)
        self.assertEqual(nb.worksheets, [])

        # Delete cp1
        r = self.api.delete_checkpoint('a.ipynb', 'foo', cp1['id'])
        self.assertEqual(r.status_code, 204)
        cps = self.api.get_checkpoints('a.ipynb', 'foo').json()
        self.assertEqual(cps, [])