def passwd_check(hashed_passphrase, passphrase): """Verify that a given passphrase matches its hashed version. Parameters ---------- hashed_passphrase : str Hashed password, in the format returned by `passwd`. passphrase : str Passphrase to validate. Returns ------- valid : bool True if the passphrase matches the hash. Examples -------- >>> myhash = passwd('mypassword') >>> passwd_check(myhash, 'mypassword') True >>> passwd_check(myhash, 'otherpassword') False >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', ... 'mypassword') True """ if hashed_passphrase.startswith('argon2:'): import argon2 import argon2.exceptions ph = argon2.PasswordHasher() try: return ph.verify(hashed_passphrase[7:], passphrase) except argon2.exceptions.VerificationError: return False try: algorithm, salt, pw_digest = hashed_passphrase.split(':', 2) except (ValueError, TypeError): return False try: h = hashlib.new(algorithm) except ValueError: return False if len(pw_digest) == 0: return False h.update(cast_bytes(passphrase, 'utf-8') + cast_bytes(salt, 'ascii')) return h.hexdigest() == pw_digest
def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. Returns True if it has served a zip file, False if there are no resource files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip output_files = resources.get("outputs", None) if not output_files: return False # Headers zip_filename = os.path.splitext(name)[0] + ".zip" handler.set_attachment_header(zip_filename) handler.set_header("Content-Type", "application/zip") handler.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0") # Prepare the zip file buffer = io.BytesIO() zipf = zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) output_filename = os.path.splitext(name)[0] + resources["output_extension"] zipf.writestr(output_filename, cast_bytes(output, "utf-8")) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() handler.finish(buffer.getvalue()) return True
def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. Returns True if it has served a zip file, False if there are no resource files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip output_files = resources.get('outputs', None) if not output_files: return False # Headers zip_filename = os.path.splitext(name)[0] + '.zip' handler.set_attachment_header(zip_filename) handler.set_header('Content-Type', 'application/zip') # Prepare the zip file buffer = io.BytesIO() zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) output_filename = os.path.splitext(name)[0] + resources['output_extension'] zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() handler.finish(buffer.getvalue()) return True
def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. Returns True if it has served a zip file, False if there are no resource files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip output_files = resources.get("outputs", None) if not output_files: return False # Headers zip_filename = os.path.splitext(name)[0] + ".zip" handler.set_header("Content-Disposition", 'attachment; filename="%s"' % escape.url_escape(zip_filename)) handler.set_header("Content-Type", "application/zip") # Prepare the zip file buffer = io.BytesIO() zipf = zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) output_filename = os.path.splitext(name)[0] + resources["output_extension"] zipf.writestr(output_filename, cast_bytes(output, "utf-8")) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() handler.finish(buffer.getvalue()) return True
def load_connection_info(self, info): """Load connection info from a dict containing connection info. Typically this data comes from a connection file and is called by load_connection_file. Parameters ---------- info: dict Dictionary containing connection_info. See the connection_file spec for details. """ self.transport = info.get('transport', self.transport) self.ip = info.get('ip', self._ip_default()) self._record_random_port_names() for name in port_names: if getattr(self, name) == 0 and name in info: # not overridden by config or cl_args setattr(self, name, info[name]) if 'key' in info: self.session.key = cast_bytes(info['key']) if 'signature_scheme' in info: self.session.signature_scheme = info['signature_scheme']
def __init__(self, session, pub_thread, name, pipe=None): # type: (object, object, object, object) -> object if pipe is not None: warnings.warn( "pipe argument to OutStream is deprecated and ignored", DeprecationWarning) # This is necessary for compatibility with Python built-in streams self.session = session if not isinstance(pub_thread, IOPubThread): # Backward-compat: given socket, not thread. Wrap in a thread. warnings.warn( "OutStream should be created with IOPubThread, not %r" % pub_thread, DeprecationWarning, stacklevel=2) pub_thread = IOPubThread(pub_thread) pub_thread.start() self.pub_thread = pub_thread self.name = name self.topic = b'stream.' + py3compat.cast_bytes(name) self.parent_header = {} self._master_pid = os.getpid() self._flush_pending = False self._io_loop = pub_thread.io_loop self._new_buffer()
def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. Returns True if it has served a zip file, False if there are no resource files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip output_files = resources.get('outputs', None) if not output_files: return False # Headers zip_filename = os.path.splitext(name)[0] + '.zip' handler.set_header( 'Content-Disposition', 'attachment; filename="%s"' % escape.url_escape(zip_filename)) handler.set_header('Content-Type', 'application/zip') # Prepare the zip file buffer = io.BytesIO() zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) output_filename = os.path.splitext(name)[0] + resources['output_extension'] zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() handler.finish(buffer.getvalue()) return True
def __init__(self, session, pub_thread, name, pipe=None, echo=None): if pipe is not None: warnings.warn("pipe argument to OutStream is deprecated and ignored", DeprecationWarning) # This is necessary for compatibility with Python built-in streams self.session = session if not isinstance(pub_thread, IOPubThread): # Backward-compat: given socket, not thread. Wrap in a thread. warnings.warn("OutStream should be created with IOPubThread, not %r" % pub_thread, DeprecationWarning, stacklevel=2) pub_thread = IOPubThread(pub_thread) pub_thread.start() self.pub_thread = pub_thread self.name = name self.topic = b'stream.' + py3compat.cast_bytes(name) self.parent_header = {} self._master_pid = os.getpid() self._flush_pending = False self._io_loop = pub_thread.io_loop self._new_buffer() self.echo = None if echo: if hasattr(echo, 'read') and hasattr(echo, 'write'): self.echo = echo else: raise ValueError("echo argument must be a file like object")
def dispatch_notification(self, msg): """dispatch register/unregister events.""" try: idents, msg = self.session.feed_identities(msg) except ValueError: self.log.warn("task::Invalid Message: %r", msg) return try: msg = self.session.deserialize(msg) except ValueError: self.log.warn("task::Unauthorized message from: %r" % idents) return msg_type = msg['header']['msg_type'] handler = self._notification_handlers.get(msg_type, None) if handler is None: self.log.error("Unhandled message type: %r" % msg_type) else: try: handler(cast_bytes(msg['content']['uuid'])) except Exception: self.log.error("task::Invalid notification msg: %r", msg, exc_info=True)
def complete_request(self, text): line = str_to_unicode(readline.get_line_buffer()) byte_cursor_pos = readline.get_endidx() # get_endidx is a byte offset # account for multi-byte characters to get correct cursor_pos bytes_before_cursor = cast_bytes(line)[:byte_cursor_pos] cursor_pos = len(cast_unicode(bytes_before_cursor)) # send completion request to kernel # Give the kernel up to 5s to respond msg_id = self.client.complete( code=line, cursor_pos=cursor_pos, ) msg = self.client.shell_channel.get_msg(timeout=self.timeout) if msg['parent_header']['msg_id'] == msg_id: content = msg['content'] cursor_start = content['cursor_start'] matches = [line[:cursor_start] + m for m in content['matches']] if content["cursor_end"] < cursor_pos: extra = line[content["cursor_end"]:cursor_pos] matches = [m + extra for m in matches] matches = [unicode_to_str(m) for m in matches] return matches return []
def __init__(self, session, pub_thread, name, pipe=None, echo=None): if pipe is not None: warnings.warn("pipe argument to OutStream is deprecated and ignored", DeprecationWarning) # This is necessary for compatibility with Python built-in streams self.session = session if not isinstance(pub_thread, IOPubThread): # Backward-compat: given socket, not thread. Wrap in a thread. warnings.warn("OutStream should be created with IOPubThread, not %r" % pub_thread, DeprecationWarning, stacklevel=2) pub_thread = IOPubThread(pub_thread) pub_thread.start() self.pub_thread = pub_thread self.name = name self.topic = b'stream.' + py3compat.cast_bytes(name) self.parent_header = {} self._master_pid = os.getpid() self._flush_pending = False self._subprocess_flush_pending = False self._io_loop = pub_thread.io_loop self._new_buffer() self.echo = None if echo: if hasattr(echo, 'read') and hasattr(echo, 'write'): self.echo = echo else: raise ValueError("echo argument must be a file like object")
def __init__(self, session, pub_thread, name, pipe=None): if pipe is not None: warnings.warn( "pipe argument to OutStream is deprecated and ignored", DeprecationWarning) self.encoding = 'UTF-8' self.session = session if not isinstance(pub_thread, IOPubThread): # Backward-compat: given socket, not thread. Wrap in a thread. warnings.warn( "OutStream should be created with IOPubThread, not %r" % pub_thread, DeprecationWarning, stacklevel=2) pub_thread = IOPubThread(pub_thread) pub_thread.start() self.pub_thread = pub_thread self.name = name self.topic = b'stream.' + py3compat.cast_bytes(name) self.parent_header = {} self.cell_uuid = '' self._master_pid = os.getpid() self._flush_lock = threading.Lock() self._flush_timeout = None self._io_loop = pub_thread.io_loop self._buffer_lock = threading.Lock() self._new_buffer()
def complete_request(self, text): line = str_to_unicode(readline.get_line_buffer()) byte_cursor_pos = readline.get_endidx() # get_endidx is a byte offset # account for multi-byte characters to get correct cursor_pos bytes_before_cursor = cast_bytes(line)[:byte_cursor_pos] cursor_pos = len(cast_unicode(bytes_before_cursor)) # send completion request to kernel # Give the kernel up to 5s to respond msg_id = self.client.complete( code=line, cursor_pos=cursor_pos, ) msg = self.client.shell_channel.get_msg(timeout=self.timeout) if msg['parent_header']['msg_id'] == msg_id: content = msg['content'] cursor_start = content['cursor_start'] matches = [ line[:cursor_start] + m for m in content['matches'] ] if content["cursor_end"] < cursor_pos: extra = line[content["cursor_end"]: cursor_pos] matches = [m + extra for m in matches] matches = [ unicode_to_str(m) for m in matches ] return matches return []
def _topic(self, topic): """prefixed topic for IOPub messages""" if self.int_id >= 0: base = "engine.%i" % self.int_id else: base = "kernel.%s" % self.ident return py3compat.cast_bytes("%s.%s" % (base, topic))
def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. Returns True if it has served a zip file, False if there are no resource files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip output_files = resources.get('outputs', None) if not output_files: return False # Headers zip_filename = os.path.splitext(name)[0] + '.zip' handler.set_attachment_header(zip_filename) handler.set_header('Content-Type', 'application/zip') handler.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') # iOS: download the file that was created locally and return: if (sys.platform == 'darwin' and os.uname().machine.startswith('iP')): zipf = zipfile.ZipFile(zip_filename, mode='w', compression=zipfile.ZIP_DEFLATED) output_filename = os.path.splitext( name)[0] + resources['output_extension'] zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() handler.finish( zip_filename ) # send back to the application the name of the file we have created return True # Prepare the zip file buffer = io.BytesIO() zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) output_filename = os.path.splitext(name)[0] + resources['output_extension'] zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() handler.finish(buffer.getvalue()) return True
def pandoc(source, fmt, to, extra_args=None, encoding='utf-8'): """Convert an input string using pandoc. Pandoc converts an input string `from` a format `to` a target format. Parameters ---------- source : string Input string, assumed to be valid format `from`. fmt : string The name of the input format (markdown, etc.) to : string The name of the output format (html, etc.) Returns ------- out : unicode Output as returned by pandoc. Raises ------ PandocMissing If pandoc is not installed. Any error messages generated by pandoc are printed to stderr. """ cmd = ['pandoc', '-f', fmt, '-t', to] if extra_args: cmd.extend(extra_args) # iOS: we cannot call pandoc, so we just don't convert markdown cells. # This is not perfect (...) but it lets the conversion machine work. # iOS: we replaced pandoc with a mistune plugin. It's not as good but it works # iOS, TODO: tables in LaTeX, html in LaTeX if (sys.platform == 'darwin' and platform.machine().startswith('iP')): if (fmt.startswith('markdown') and to.startswith('latex')): markdown_to_latex = mistune.Markdown(renderer=LatexRenderer()) return markdown_to_latex(source) elif (fmt.startswith('markdown') and to.startswith('rst')): return convert(source) # m2r markdown to rst conversion elif (fmt.startswith('markdown') and to.startswith('asciidoc')): markdown_to_asciidoc = mistune.Markdown(renderer=AsciidocRenderer()) return markdown_to_asciidoc(source) else: return source # this will raise an exception that will pop us out of here check_pandoc_version() # we can safely continue p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE) out, _ = p.communicate(cast_bytes(source, encoding)) out = TextIOWrapper(BytesIO(out), encoding, 'replace').read() return out.rstrip('\n')
def passwd_check(hashed_passphrase, passphrase): """Verify that a given passphrase matches its hashed version. Parameters ---------- hashed_passphrase : str Hashed password, in the format returned by `passwd`. passphrase : str Passphrase to validate. Returns ------- valid : bool True if the passphrase matches the hash. Examples -------- >>> from notebook.auth.security import passwd_check >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', ... 'mypassword') True >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', ... 'anotherpassword') False """ try: algorithm, salt, pw_digest = hashed_passphrase.split(':', 2) except (ValueError, TypeError): return False try: h = hashlib.new(algorithm) except ValueError: return False if len(pw_digest) == 0: return False h.update(cast_bytes(passphrase, 'utf-8') + cast_bytes(salt, 'ascii')) return h.hexdigest() == pw_digest
def passwd_check(hashed_passphrase, passphrase): """Verify that a given passphrase matches its hashed version. Parameters ---------- hashed_passphrase : str Hashed password, in the format returned by `passwd`. passphrase : str Passphrase to validate. Returns ------- valid : bool True if the passphrase matches the hash. Examples -------- >>> from jupyter_server.auth.security import passwd_check >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', ... 'mypassword') True >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', ... 'anotherpassword') False """ try: algorithm, salt, pw_digest = hashed_passphrase.split(":", 2) except (ValueError, TypeError): return False try: h = hashlib.new(algorithm) except ValueError: return False if len(pw_digest) == 0: return False h.update(cast_bytes(passphrase, "utf-8") + cast_bytes(salt, "ascii")) return h.hexdigest() == pw_digest
def passwd(passphrase=None, algorithm='argon2'): """Generate hashed password and salt for use in notebook configuration. In the notebook configuration, set `c.NotebookApp.password` to the generated string. Parameters ---------- passphrase : str Password to hash. If unspecified, the user is asked to input and verify a password. algorithm : str Hashing algorithm to use (e.g, 'sha1' or any argument supported by :func:`hashlib.new`, or 'argon2'). Returns ------- hashed_passphrase : str Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'. Examples -------- >>> passwd('mypassword') 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12' """ if passphrase is None: for i in range(3): p0 = getpass.getpass('Enter password: '******'Verify password: '******'Passwords do not match.') else: raise ValueError('No matching passwords found. Giving up.') if algorithm == 'argon2': from argon2 import PasswordHasher ph = PasswordHasher( memory_cost=10240, time_cost=10, parallelism=8, ) h = ph.hash(passphrase) return ':'.join((algorithm, cast_unicode(h, 'ascii'))) else: h = hashlib.new(algorithm) salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii')) return ':'.join((algorithm, salt, h.hexdigest()))
def markdown2html_marked(source, encoding='utf-8'): """Convert a markdown string to HTML via marked""" command = [_find_nodejs(), marked] try: p = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) except OSError as e: raise NodeJSMissing("The command '%s' returned an error: %s.\n" % (" ".join(command), e) + "Please check that Node.js is installed.") out, _ = p.communicate(cast_bytes(source, encoding)) out = TextIOWrapper(BytesIO(out), encoding, 'replace').read() return out.rstrip('\n')
def jupyter_password(passphrase='',salt='0'*12,algorithm='sha1'): import hashlib from ipython_genutils.py3compat import cast_bytes,str_to_bytes if py.isbytes(salt): bsalt=salt salt=bsalt.decode('ascii') elif py.istr(salt): bsalt=str_to_bytes(salt, 'ascii') else: raise py.ArgumentError(salt) h=hashlib.new(algorithm) h.update(cast_bytes(passphrase, 'utf-8') + bsalt) return ':'.join((algorithm, salt, h.hexdigest()))
def __init__(self, session, pub_socket, name, pipe=True): self.encoding = 'UTF-8' self.session = session self.pub_socket = pub_socket self.name = name self.topic = b'stream.' + py3compat.cast_bytes(name) self.parent_header = {} self._new_buffer() self._buffer_lock = threading.Lock() self._master_pid = os.getpid() self._master_thread = threading.current_thread().ident self._pipe_pid = os.getpid() self._pipe_flag = pipe if pipe: self._setup_pipe_in()
def markdown2html_marked(source, encoding='utf-8'): """Convert a markdown string to HTML via marked""" command = [_find_nodejs(), marked] try: p = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE ) except OSError as e: raise NodeJSMissing( "The command '%s' returned an error: %s.\n" % (" ".join(command), e) + "Please check that Node.js is installed." ) out, _ = p.communicate(cast_bytes(source, encoding)) out = TextIOWrapper(BytesIO(out), encoding, 'replace').read() return out.rstrip('\n')
def dispatch_query_reply(self, msg): """handle reply to our initial connection request""" try: idents, msg = self.session.feed_identities(msg) except ValueError: self.log.warn("task::Invalid Message: %r", msg) return try: msg = self.session.deserialize(msg) except ValueError: self.log.warn("task::Unauthorized message from: %r" % idents) return content = msg['content'] for uuid in content.get('engines', {}).values(): self._register_engine(cast_bytes(uuid))
def dispatch_query_reply(self, msg): """handle reply to our initial connection request""" try: idents,msg = self.session.feed_identities(msg) except ValueError: self.log.warn("task::Invalid Message: %r",msg) return try: msg = self.session.deserialize(msg) except ValueError: self.log.warn("task::Unauthorized message from: %r"%idents) return content = msg['content'] for uuid in content.get('engines', {}).values(): self._register_engine(cast_bytes(uuid))
def load_connector_file(self): """load config from a JSON connector file, at a *lower* priority than command-line/config files. """ self.log.info("Loading url_file %r", self.url_file) config = self.config with open(self.url_file) as f: num_tries = 0 max_tries = 5 d = "" while not d: try: d = json.loads(f.read()) except ValueError: if num_tries > max_tries: raise num_tries += 1 time.sleep(0.5) # allow hand-override of location for disambiguation # and ssh-server if 'EngineFactory.location' not in config: config.EngineFactory.location = d['location'] if 'EngineFactory.sshserver' not in config: config.EngineFactory.sshserver = d.get('ssh') location = config.EngineFactory.location proto, ip = d['interface'].split('://') ip = disambiguate_ip_address(ip, location) d['interface'] = '%s://%s' % (proto, ip) # DO NOT allow override of basic URLs, serialization, or key # JSON file takes top priority there config.Session.key = cast_bytes(d['key']) config.Session.signature_scheme = d['signature_scheme'] config.EngineFactory.url = d['interface'] + ':%i' % d['registration'] config.Session.packer = d['pack'] config.Session.unpacker = d['unpack'] self.log.debug("Config changed:") self.log.debug("%r", config) self.connection_info = d
def rlcomplete(self, text, state): if state == 0: line = str_to_unicode(readline.get_line_buffer()) byte_cursor_pos = readline.get_endidx() # get_endidx is a byte offset # account for multi-byte characters to get correct cursor_pos bytes_before_cursor = cast_bytes(line)[:byte_cursor_pos] cursor_pos = len(cast_unicode(bytes_before_cursor)) try: content = self.complete_request(line, cursor_pos) self.matches = _construct_readline_matches(line, cursor_pos, content) except Empty: #print('WARNING: Kernel timeout on tab completion.') pass try: return self.matches[state] except IndexError: return None
def passwd(passphrase=None, algorithm='sha1'): """Generate hashed password and salt for use in notebook configuration. In the notebook configuration, set `c.NotebookApp.password` to the generated string. Parameters ---------- passphrase : str Password to hash. If unspecified, the user is asked to input and verify a password. algorithm : str Hashing algorithm to use (e.g, 'sha1' or any argument supported by :func:`hashlib.new`). Returns ------- hashed_passphrase : str Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'. Examples -------- >>> passwd('mypassword') 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12' """ if passphrase is None: for i in range(3): p0 = getpass.getpass('Enter password: '******'Verify password: '******'Passwords do not match.') else: raise ValueError('No matching passwords found. Giving up.') h = hashlib.new(algorithm) salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii')) return ':'.join((algorithm, salt, h.hexdigest()))
def passwd(passphrase=None, algorithm="sha1"): """Generate hashed password and salt for use in server configuration. In the server configuration, set `c.ServerApp.password` to the generated string. Parameters ---------- passphrase : str Password to hash. If unspecified, the user is asked to input and verify a password. algorithm : str Hashing algorithm to use (e.g, 'sha1' or any argument supported by :func:`hashlib.new`). Returns ------- hashed_passphrase : str Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'. Examples -------- >>> passwd('mypassword') 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12' """ if passphrase is None: for i in range(3): p0 = getpass.getpass("Enter password: "******"Verify password: "******"Passwords do not match.") else: raise ValueError("No matching passwords found. Giving up.") h = hashlib.new(algorithm) salt = ("%0" + str(salt_len) + "x") % random.getrandbits(4 * salt_len) h.update(cast_bytes(passphrase, "utf-8") + str_to_bytes(salt, "ascii")) return ":".join((algorithm, salt, h.hexdigest()))
def rlcomplete(self, text, state): if state == 0: line = str_to_unicode(readline.get_line_buffer()) byte_cursor_pos = readline.get_endidx() # get_endidx is a byte offset # account for multi-byte characters to get correct cursor_pos bytes_before_cursor = cast_bytes(line)[:byte_cursor_pos] cursor_pos = len(cast_unicode(bytes_before_cursor)) try: content = self.complete_request(line, cursor_pos) self.matches = _construct_readline_matches( line, cursor_pos, content) except Empty: #print('WARNING: Kernel timeout on tab completion.') pass try: return self.matches[state] except IndexError: return None
def yield_everything(obj): """Yield every item in a container as bytes Allows any JSONable object to be passed to an HMAC digester without having to serialize the whole thing. """ if isinstance(obj, dict): for key in sorted(obj): value = obj[key] yield cast_bytes(key) for b in yield_everything(value): yield b elif isinstance(obj, (list, tuple)): for element in obj: for b in yield_everything(element): yield b elif isinstance(obj, unicode_type): yield obj.encode('utf8') else: yield unicode_type(obj).encode('utf8')
def __init__(self, session, pub_thread, name, pipe=None): # type: (object, object, object, object) -> object if pipe is not None: warnings.warn("pipe argument to OutStream is deprecated and ignored", DeprecationWarning) # This is necessary for compatibility with Python built-in streams self.session = session if not isinstance(pub_thread, IOPubThread): # Backward-compat: given socket, not thread. Wrap in a thread. warnings.warn("OutStream should be created with IOPubThread, not %r" % pub_thread, DeprecationWarning, stacklevel=2) pub_thread = IOPubThread(pub_thread) pub_thread.start() self.pub_thread = pub_thread self.name = name self.topic = b'stream.' + py3compat.cast_bytes(name) self.parent_header = {} self._master_pid = os.getpid() self._flush_pending = False self._io_loop = pub_thread.io_loop self._new_buffer()
def __init__(self, session, pub_thread, name, pipe=None): if pipe is not None: warnings.warn("pipe argument to OutStream is deprecated and ignored", DeprecationWarning) self.encoding = 'UTF-8' self.session = session if not isinstance(pub_thread, IOPubThread): # Backward-compat: given socket, not thread. Wrap in a thread. warnings.warn("OutStream should be created with IOPubThread, not %r" % pub_thread, DeprecationWarning, stacklevel=2) pub_thread = IOPubThread(pub_thread) pub_thread.start() self.pub_thread = pub_thread self.name = name self.topic = b'stream.' + py3compat.cast_bytes(name) self.parent_header = {} self._master_pid = os.getpid() self._flush_lock = threading.Lock() self._flush_timeout = None self._io_loop = pub_thread.io_loop self._new_buffer()
def pandoc(source, fmt, to, extra_args=None, encoding='utf-8'): """Convert an input string using pandoc. Pandoc converts an input string `from` a format `to` a target format. Parameters ---------- source : string Input string, assumed to be valid format `from`. fmt : string The name of the input format (markdown, etc.) to : string The name of the output format (html, etc.) Returns ------- out : unicode Output as returned by pandoc. Raises ------ PandocMissing If pandoc is not installed. Any error messages generated by pandoc are printed to stderr. """ cmd = ['pandoc', '-f', fmt, '-t', to] if extra_args: cmd.extend(extra_args) # this will raise an exception that will pop us out of here check_pandoc_version() # we can safely continue p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE) out, _ = p.communicate(cast_bytes(source, encoding)) out = TextIOWrapper(BytesIO(out), encoding, 'replace').read() return out.rstrip('\n')
def dispatch_notification(self, msg): """dispatch register/unregister events.""" try: idents,msg = self.session.feed_identities(msg) except ValueError: self.log.warn("task::Invalid Message: %r",msg) return try: msg = self.session.deserialize(msg) except ValueError: self.log.warn("task::Unauthorized message from: %r"%idents) return msg_type = msg['header']['msg_type'] handler = self._notification_handlers.get(msg_type, None) if handler is None: self.log.error("Unhandled message type: %r"%msg_type) else: try: handler(cast_bytes(msg['content']['uuid'])) except Exception: self.log.error("task::Invalid notification msg: %r", msg, exc_info=True)
import fileinput import hashlib import random import re from ipython_genutils.py3compat import cast_bytes, str_to_bytes # Get the password from the environment password_environment_variable = sys.argv[1] # Hash the password, this is taken from https://github.com/jupyter/notebook/blob/master/notebook/auth/security.py salt_len = 12 algorithm = 'sha1' h = hashlib.new(algorithm) salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) h.update( cast_bytes(password_environment_variable, 'utf-8') + str_to_bytes(salt, 'ascii')) password = '******'.join((algorithm, salt, h.hexdigest())) # Store the password in the configuration setup_line = "c.ServerApp.password = "******"u'" + password + "'" new_setup_line = new_setup_line.replace("# ", "") setup_file = os.getenv("HOME") + "/.jupyter/jupyter_server_config.py" if not os.path.exists(setup_file): os.system('jupyter server --generate-config') for line in fileinput.input(setup_file, inplace=True): m = re.search(setup_line, line) if m:
def _topic(self, topic): """prefixed topic for IOPub messages""" base = "engine.%s" % self.engine_id return cast_bytes("%s.%s" % (base, topic))
def preprocess_cell(self, cell, resources, cell_index): """ Apply a transformation on each cell, Parameters ---------- cell : NotebookNode cell Notebook cell being processed resources : dictionary Additional resources used in the conversion process. Allows preprocessors to pass variables into the Jinja engine. cell_index : int Index of the cell being processed (see base.py) """ #Get the unique key from the resource dict if it exists. If it does not #exist, use 'output' as the default. Also, get files directory if it #has been specified unique_key = resources.get('unique_key', 'output') output_files_dir = resources.get('output_files_dir', None) #Make sure outputs key exists if not isinstance(resources['outputs'], dict): resources['outputs'] = {} #Loop through all of the outputs in the cell for index, out in enumerate(cell.get('outputs', [])): if out.output_type not in {'display_data', 'execute_result'}: continue #Get the output in data formats that the template needs extracted for mime_type in self.extract_output_types: if mime_type in out.data: data = out.data[mime_type] #Binary files are base64-encoded, SVG is already XML if mime_type in {'image/png', 'image/jpeg', 'application/pdf'}: # data is b64-encoded as text (str, unicode) # decodestring only accepts bytes data = py3compat.cast_bytes(data) data = base64.decodestring(data) elif sys.platform == 'win32': data = data.replace('\n', '\r\n').encode("UTF-8") else: data = data.encode("UTF-8") ext = guess_extension(mime_type) if ext is None: ext = '.' + mime_type.rsplit('/')[-1] filename = self.output_filename_template.format( unique_key=unique_key, cell_index=cell_index, index=index, extension=ext) # On the cell, make the figure available via # cell.outputs[i].metadata.filenames['mime/type'] # where # cell.outputs[i].data['mime/type'] contains the data if output_files_dir is not None: filename = os.path.join(output_files_dir, filename) out.metadata.setdefault('filenames', {}) out.metadata['filenames'][mime_type] = filename #In the resources, make the figure available via # resources['outputs']['filename'] = data resources['outputs'][filename] = data return cell, resources
def _topic(self, topic): """prefixed topic for IOPub messages""" base = "kernel.%s" % self.ident return py3compat.cast_bytes("%s.%s" % (base, topic))
def _ident_changed(self, change): self.bident = cast_bytes(change['new'])
def _ident_changed(self, name, old, new): self.bident = cast_bytes(new)
def complete_registration(self, msg, connect, maybe_tunnel): # print msg self.loop.remove_timeout(self._abort_timeout) ctx = self.context loop = self.loop identity = self.bident idents,msg = self.session.feed_identities(msg) msg = self.session.deserialize(msg) content = msg['content'] info = self.connection_info def url(key): """get zmq url for given channel""" return str(info["interface"] + ":%i" % info[key]) if content['status'] == 'ok': self.id = int(content['id']) # launch heartbeat # possibly forward hb ports with tunnels hb_ping = maybe_tunnel(url('hb_ping')) hb_pong = maybe_tunnel(url('hb_pong')) hb_monitor = None if self.max_heartbeat_misses > 0: # Add a monitor socket which will record the last time a ping was seen mon = self.context.socket(zmq.SUB) mport = mon.bind_to_random_port('tcp://%s' % localhost()) mon.setsockopt(zmq.SUBSCRIBE, b"") self._hb_listener = zmqstream.ZMQStream(mon, self.loop) self._hb_listener.on_recv(self._report_ping) hb_monitor = "tcp://%s:%i" % (localhost(), mport) heart = Heart(hb_ping, hb_pong, hb_monitor , heart_id=identity) heart.start() # create Shell Connections (MUX, Task, etc.): shell_addrs = url('mux'), url('task') # Use only one shell stream for mux and tasks stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop) stream.setsockopt(zmq.IDENTITY, identity) shell_streams = [stream] for addr in shell_addrs: connect(stream, addr) # control stream: control_addr = url('control') control_stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop) control_stream.setsockopt(zmq.IDENTITY, identity) connect(control_stream, control_addr) # create iopub stream: iopub_addr = url('iopub') iopub_socket = ctx.socket(zmq.PUB) iopub_socket.setsockopt(zmq.IDENTITY, identity) connect(iopub_socket, iopub_addr) # disable history: self.config.HistoryManager.hist_file = ':memory:' # Redirect input streams and set a display hook. if self.out_stream_factory: sys.stdout = self.out_stream_factory(self.session, iopub_socket, u'stdout') sys.stdout.topic = cast_bytes('engine.%i.stdout' % self.id) sys.stderr = self.out_stream_factory(self.session, iopub_socket, u'stderr') sys.stderr.topic = cast_bytes('engine.%i.stderr' % self.id) if self.display_hook_factory: sys.displayhook = self.display_hook_factory(self.session, iopub_socket) sys.displayhook.topic = cast_bytes('engine.%i.execute_result' % self.id) self.kernel = Kernel(parent=self, int_id=self.id, ident=self.ident, session=self.session, control_stream=control_stream, shell_streams=shell_streams, iopub_socket=iopub_socket, loop=loop, user_ns=self.user_ns, log=self.log) self.kernel.shell.display_pub.topic = cast_bytes('engine.%i.displaypub' % self.id) # periodically check the heartbeat pings of the controller # Should be started here and not in "start()" so that the right period can be taken # from the hubs HeartBeatMonitor.period if self.max_heartbeat_misses > 0: # Use a slightly bigger check period than the hub signal period to not warn unnecessary self.hb_check_period = int(content['hb_period'])+10 self.log.info("Starting to monitor the heartbeat signal from the hub every %i ms." , self.hb_check_period) self._hb_reporter = ioloop.PeriodicCallback(self._hb_monitor, self.hb_check_period, self.loop) self._hb_reporter.start() else: self.log.info("Monitoring of the heartbeat signal from the hub is not enabled.") # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged app = IPKernelApp(parent=self, shell=self.kernel.shell, kernel=self.kernel, log=self.log) app.init_profile_dir() app.init_code() self.kernel.start() else: self.log.fatal("Registration Failed: %s"%msg) raise Exception("Registration Failed: %s"%msg) self.log.info("Completed registration with id %i"%self.id)
def append_new_msg_id_to_msg(self, new_id, target_id, idents, msg): new_idents = [cast_bytes(target_id)] + idents msg['header']['msg_id'] = new_id new_msg_list = self.session.serialize(msg, ident=new_idents) new_msg_list.extend(msg['buffers']) return new_msg_list
def complete_registration(self, msg, connect, maybe_tunnel): # print msg self.loop.remove_timeout(self._abort_timeout) ctx = self.context loop = self.loop identity = self.bident idents, msg = self.session.feed_identities(msg) msg = self.session.deserialize(msg) content = msg['content'] info = self.connection_info def url(key): """get zmq url for given channel""" return str(info["interface"] + ":%i" % info[key]) if content['status'] == 'ok': self.id = int(content['id']) # launch heartbeat # possibly forward hb ports with tunnels hb_ping = maybe_tunnel(url('hb_ping')) hb_pong = maybe_tunnel(url('hb_pong')) hb_monitor = None if self.max_heartbeat_misses > 0: # Add a monitor socket which will record the last time a ping was seen mon = self.context.socket(zmq.SUB) mport = mon.bind_to_random_port('tcp://%s' % localhost()) mon.setsockopt(zmq.SUBSCRIBE, b"") self._hb_listener = zmqstream.ZMQStream(mon, self.loop) self._hb_listener.on_recv(self._report_ping) hb_monitor = "tcp://%s:%i" % (localhost(), mport) heart = Heart(hb_ping, hb_pong, hb_monitor, heart_id=identity) heart.start() # create Shell Connections (MUX, Task, etc.): shell_addrs = url('mux'), url('task') # Use only one shell stream for mux and tasks stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop) stream.setsockopt(zmq.IDENTITY, identity) shell_streams = [stream] for addr in shell_addrs: connect(stream, addr) # control stream: control_addr = url('control') control_stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop) control_stream.setsockopt(zmq.IDENTITY, identity) connect(control_stream, control_addr) # create iopub stream: iopub_addr = url('iopub') iopub_socket = ctx.socket(zmq.PUB) iopub_socket.setsockopt(zmq.IDENTITY, identity) connect(iopub_socket, iopub_addr) # disable history: self.config.HistoryManager.hist_file = ':memory:' # Redirect input streams and set a display hook. if self.out_stream_factory: sys.stdout = self.out_stream_factory(self.session, iopub_socket, u'stdout') sys.stdout.topic = cast_bytes('engine.%i.stdout' % self.id) sys.stderr = self.out_stream_factory(self.session, iopub_socket, u'stderr') sys.stderr.topic = cast_bytes('engine.%i.stderr' % self.id) if self.display_hook_factory: sys.displayhook = self.display_hook_factory( self.session, iopub_socket) sys.displayhook.topic = cast_bytes('engine.%i.execute_result' % self.id) self.kernel = Kernel(parent=self, int_id=self.id, ident=self.ident, session=self.session, control_stream=control_stream, shell_streams=shell_streams, iopub_socket=iopub_socket, loop=loop, user_ns=self.user_ns, log=self.log) self.kernel.shell.display_pub.topic = cast_bytes( 'engine.%i.displaypub' % self.id) # periodically check the heartbeat pings of the controller # Should be started here and not in "start()" so that the right period can be taken # from the hubs HeartBeatMonitor.period if self.max_heartbeat_misses > 0: # Use a slightly bigger check period than the hub signal period to not warn unnecessary self.hb_check_period = int(content['hb_period']) + 10 self.log.info( "Starting to monitor the heartbeat signal from the hub every %i ms.", self.hb_check_period) self._hb_reporter = ioloop.PeriodicCallback( self._hb_monitor, self.hb_check_period, self.loop) self._hb_reporter.start() else: self.log.info( "Monitoring of the heartbeat signal from the hub is not enabled." ) # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged app = IPKernelApp(parent=self, shell=self.kernel.shell, kernel=self.kernel, log=self.log) app.init_profile_dir() app.init_code() self.kernel.start() else: self.log.fatal("Registration Failed: %s" % msg) raise Exception("Registration Failed: %s" % msg) self.log.info("Completed registration with id %i" % self.id)