def capture(msg): # We expect a display_data with the version table. if msg["msg_type"] == "display_data": data = unmap_tuples( json.loads(msg["content"]["data"]["application/json"])) for component, version in data["rows"]: versions[component] = LooseVersion(version)
def _execute(self, input, return_full_result=False, raise_on_stderr : bool = False, output_hook=None, _quiet_ : bool = False, **kwargs): logger.debug(f"sending:\n{input}") # make sure the server is still running: try: self.check_status() except: raise IQSharpError(["IQ# is not running."]) results = [] errors = [] def log_error(msg): errors.append(msg) handlers = { 'execute_result': (lambda msg: results.append(msg)) } if not _quiet_: handlers['display_data'] = ( lambda msg: display_raw(msg['content']['data']) ) _output_hook = partial( self._handle_message, error_callback=log_error if raise_on_stderr else None, fallback_hook=output_hook, handlers=handlers ) try: if self._busy: # Trying to execute while already executing can corrupt the # ordering of messages internally to ZeroMQ # (see https://github.com/Microsoft/QuantumLibraries/issues/69), # so we need to throw early rather than letting the problem # propagate to a Jupyter protocol error. raise AlreadyExecutingError("Cannot execute through the IQ# client while another execution is completing.") self._busy = True reply = self.kernel_client.execute_interactive(input, output_hook=_output_hook, **kwargs) finally: self._busy = False logger.debug(f"received:\n{reply}") # There should be either zero or one execute_result messages. if errors: raise IQSharpError(errors) if results: assert len(results) == 1 content = results[0]['content'] if 'application/json' in content['data']: obj = unmap_tuples(json.loads(content['data']['application/json'])) else: obj = None return (obj, content) if return_full_result else obj else: return None
def test_roundtrip_very_deep_tuple(self): actual = { 'a': { 'b': ({ 'c': ('d', ['e', ('f', 'g', 12, False)]) }, ['h', { 'g': ('i', 'j') }]) } } self.assertEqual(unmap_tuples(map_tuples(actual)), actual)
def _execute(self, input, return_full_result=False, raise_on_stderr=False, output_hook=None, **kwargs): logger.debug(f"sending:\n{input}") # make sure the server is still running: try: self.check_status() except: raise IQSharpError(["IQ# is not running."]) results = [] errors = [] if output_hook is None: output_hook = self.kernel_client._output_hook_default def _output_hook(msg): if msg['msg_type'] == 'execute_result': results.append(msg) else: if raise_on_stderr and msg['msg_type'] == 'stream' and msg['content']['name'] == 'stderr': errors.append(msg['content']['text']) else: output_hook(msg) try: if self._busy: # Trying to execute while already executing can corrupt the # ordering of messages internally to ZeroMQ # (see https://github.com/Microsoft/QuantumLibraries/issues/69), # so we need to throw early rather than letting the problem # propagate to a Jupyter protocol error. raise AlreadyExecutingError("Cannot execute through the IQ# client while another execution is completing.") self._busy = True reply = self.kernel_client.execute_interactive(input, output_hook=_output_hook, **kwargs) finally: self._busy = False logger.debug(f"received:\n{reply}") # There should be either zero or one execute_result messages. if errors: raise IQSharpError(errors) if results: assert len(results) == 1 content = results[0]['content'] if 'application/json' in content['data']: obj = unmap_tuples(json.loads(content['data']['application/json'])) else: obj = None return (obj, content) if return_full_result else obj else: return None
def execute(self, input, return_full_result=False, raise_on_stderr=False, output_hook=None, **kwargs): logger.debug(f"sending:\n{input}") # make sure the server is still running: try: self.check_status() except: raise IQSharpError(["IQ# is not running."]) results = [] errors = [] if output_hook is None: output_hook = self.kernel_client._output_hook_default def _output_hook(msg): if msg['msg_type'] == 'execute_result': results.append(msg) else: if raise_on_stderr and msg['msg_type'] == 'stream' and msg[ 'content']['name'] == 'stderr': errors.append(msg['content']['text']) else: output_hook(msg) reply = self.kernel_client.execute_interactive( input, output_hook=_output_hook, **kwargs) logger.debug(f"received:\n{reply}") # There should be either zero or one execute_result messages. if errors: raise IQSharpError(errors) if results: assert len(results) == 1 content = results[0]['content'] if 'application/json' in content['data']: obj = unmap_tuples( json.loads(content['data']['application/json'])) else: obj = None return (obj, content) if return_full_result else obj else: return None
def _execute(self, input, return_full_result=False, raise_on_stderr : bool = False, output_hook=None, display_data_handler=None, _timeout_=DEFAULT_TIMEOUT, _quiet_ : bool = False, **kwargs): logger.debug(f"sending:\n{input}") logger.debug(f"timeout: {_timeout_}") # make sure the server is still running: try: self.check_status() except: raise IQSharpError(["IQ# is not running."]) results = [] errors = [] def log_error(msg): errors.append(msg) # Set up handlers for various kinds of messages, making sure to # fallback through to output_hook as appropriate, so that the IPython # package can send display data through to Jupyter clients. handlers = { 'execute_result': (lambda msg: results.append(msg)), 'render_execution_path': (lambda msg: results.append(msg)), 'display_data': display_data_handler if display_data_handler is not None else lambda msg: ... } # Pass display data through to IPython if we're not in quiet mode. if not _quiet_: handlers['display_data'] = ( lambda msg: display_raw(msg['content']['data']) ) # Finish setting up handlers by allowing the display_data_callback # to intercept display data first, only sending messages through to # other handlers if it returns True. if self.display_data_callback is not None: inner_handler = handlers['display_data'] def filter_display_data(msg): if self.display_data_callback(msg): return inner_handler(msg) handlers['display_data'] = filter_display_data _output_hook = partial( self._handle_message, error_callback=log_error if raise_on_stderr else None, fallback_hook=output_hook, handlers=handlers ) try: if self._busy: # Trying to execute while already executing can corrupt the # ordering of messages internally to ZeroMQ # (see https://github.com/Microsoft/QuantumLibraries/issues/69), # so we need to throw early rather than letting the problem # propagate to a Jupyter protocol error. raise AlreadyExecutingError("Cannot execute through the IQ# client while another execution is completing.") self._busy = True reply = self.kernel_client.execute_interactive(input, timeout=_timeout_, output_hook=_output_hook, **kwargs) finally: self._busy = False logger.debug(f"received:\n{reply}") # There should be either zero or one execute_result messages. if errors: raise IQSharpError(errors) if results: assert len(results) == 1 content = results[0]['content'] if 'executionPath' in content: obj = content['executionPath'] else: qsharp_data = self._get_qsharp_data(content) if qsharp_data: obj = unmap_tuples(json.loads(qsharp_data)) else: obj = None return (obj, content) if return_full_result else obj else: return None
def capture(msg): # We expect a display_data with the version table. if msg["msg_type"] == "display_data": data = unmap_tuples(json.loads(self._get_qsharp_data(msg["content"]))) for component, version in data["rows"]: versions[component] = LooseVersion(version)
def test_roundtrip_deep_tuple(self): actual = ('a', ('b', 'c')) self.assertEqual(unmap_tuples(map_tuples(actual)), actual)
def test_roundtrip_dict(self): actual = {'a': 'b', 'c': ('d', 'e')} self.assertEqual(unmap_tuples(map_tuples(actual)), actual)
def test_roundtrip_shallow_tuple(self): actual = ('a', 3.14, False) self.assertEqual(unmap_tuples(map_tuples(actual)), actual)