def handle_hierarchy_info(self, call_id, payload): self.env.logger.debug("handle_hierarchy_info: in {}".format(Pretty(payload))) classinfos = payload["inheritors"] if len(classinfos) == 0: sublime.set_timeout(bind(self.env.window.active_view().show_popup, "No implementations found.", sublime.HIDE_ON_MOUSE_MOVE), 0) return location_list = [] item_list = [] for cli in classinfos: pos = cli["sourcePosition"] file = pos["file"] line = pos["line"] path = encode_path(relative_path(self.env.project_root, str(file))) path_to_display = path if path is not None else str(file) file_line_info = file_and_line_info(path_to_display, line) name = cli.get("scalaName", cli["fqn"]) declAs = cli["declAs"]["typehint"] location_list.append((file, line)) item_list.append(["{} {}".format(declAs, name), file_line_info]) def open_item(index): if index == -1: return loc = location_list[index] self.env.editor.open_and_scroll(loc[0], loc[1]) sublime.set_timeout(bind(self.env.window.show_quick_panel, item_list, open_item, sublime.MONOSPACE_FONT), 0)
def handle_source_positions(self, call_id, payload): self.env.logger.debug("handle_source_positions: in {}".format(Pretty(payload))) sourcePositions = payload["positions"] if len(sourcePositions) == 0: sublime.set_timeout(bind(self.env.window.active_view().show_popup, "No usages found.", sublime.HIDE_ON_MOUSE_MOVE), 0) return location_list = [] item_list = [] for hint in sourcePositions: pos = hint["position"] file = pos["file"] line = pos["line"] path = encode_path(relative_path(self.env.project_root, str(file))) path_to_display = path if path is not None else str(file) file_line_info = file_and_line_info(path_to_display, line) location_list.append((file, line)) item_list.append([hint.get("preview", "no preview available"), file_line_info]) def open_item(index): if index == -1: return loc = location_list[index] self.env.editor.open_and_scroll(loc[0], loc[1]) sublime.set_timeout(bind(self.env.window.show_quick_panel, item_list, open_item, sublime.MONOSPACE_FONT), 0)
def main(args): recognizer = prepare_speech_recognizer(args) outs = prepare_outputs(args) def stop(outs=outs): mark_stopped() recognizer.stop_continuous_recognition() for out in outs: out.close() def on_event(e, name="Event"): print("Azure Speech %s: %s" % (name, e)) def on_stopped(e, name="Stopped"): on_event(name, e) stop() def on_recognizing(e): # on_event("Recognizing", e) text = e.result.text for out in outs: try: out.recognizing(text) except Exception as err: print("Error in %r.recognizing: %s" % (out, err)) def on_recognized(e): # on_event("Recognized", e) text = e.result.text for out in outs: try: out.recognized(text) except Exception as err: print("Error in %r.recognized: %s" % (out, err)) recognizer.recognizing.connect(on_recognizing) recognizer.recognized.connect(on_recognized) recognizer.session_started.connect(bind(on_event, name="Started")) recognizer.session_stopped.connect(on_stopped) recognizer.canceled.connect(bind(on_stopped, name="Canceled")) recognizer.start_continuous_recognition() try: while not is_stopped(): time.sleep(0.5) except KeyboardInterrupt: print("Stopping on Ctrl+C") stop()
def handle_symbol_search(self, call_id, payload): """Handler for symbol search results""" self.env.logger.debug("handle_symbol_search: in {}".format(Pretty(payload))) item_list = [] location_list = [] syms = payload["syms"] for sym in syms: p = sym.get("pos") if p: location_list.append((p["file"], p["line"])) path = encode_path(relative_path(self.env.project_root, str(p["file"]))) path_to_display = path if path is not None else str(p["file"]) file_line_info = file_and_line_info(path_to_display, p["line"]) item_list.append(["{}".format(str(sym["name"]).replace("$", ".")), file_line_info]) def open_item(index): if index == -1: return loc = location_list[index] self.env.editor.open_and_scroll(loc[0], loc[1]) sublime.set_timeout(bind(self.env.window.show_quick_panel, item_list, open_item, sublime.MONOSPACE_FONT), 0)
def handle_completion_info_list(self, call_id, payload): """Handler for a completion response.""" prefix = payload.get("prefix") if (self.env.editor.current_prefix is not None and self.env.editor.current_prefix == prefix): self.env.logger.debug('handle_completion_info_list: in async') def _hack(prefix): if (sublime.active_window().active_view().is_auto_complete_visible() and self.env.editor.current_prefix == prefix): sublime.active_window().run_command("hide_auto_complete") completions = [c for c in payload["completions"] if "typeInfo" in c] self.env.editor.suggestions = [completion_to_suggest(c) for c in completions] def hack2(): sublime.active_window().active_view().run_command("auto_complete") sublime.set_timeout(hack2, 1) sublime.set_timeout(bind(_hack, prefix), 0) else: self.env.editor.current_prefix = payload.get("prefix") self.env.logger.debug('handle_completion_info_list: in sync') # filter out completions without `typeInfo` field to avoid server bug. See #324 completions = [c for c in payload["completions"] if "typeInfo" in c] self.env.editor.suggestions = [completion_to_suggest(c) for c in completions] self.env.logger.debug('handle_completion_info_list: {}' .format(Pretty(self.env.editor.suggestions)))
def apply_refactor(self, call_id, payload): supported_refactorings = ["AddImport", "OrganizeImports", "Rename", "InlineLocal"] if payload["refactorType"]["typehint"] in supported_refactorings: diff_file = payload["diff"] patch_set = fromfile(diff_file) if not patch_set: self.env.logger.warning("Couldn't parse diff_file: {}" .format(diff_file)) return self.env.logger.debug("Refactoring get root from: {}" .format(self.refactorings[payload['procedureId']])) root = root_as_str_from_abspath(self.refactorings[payload['procedureId']]) self.env.logger.debug("Refactoring set root: {}" .format(root)) result = patch_set.apply(0, root) if result: file = self.refactorings[payload['procedureId']] sublime.set_timeout(bind(self.env.editor.reload_file, file), 0) self.env.logger.info("Refactoring succeeded, patch file: {}" .format(diff_file)) self.env.status_message("Refactoring succeeded") else: self.env.logger.error("Patch refactoring failed, patch file: {}" .format(diff_file)) self.env.status_message("Refactor failed: {}".format(diff_file))
def show_type(self, call_id, payload): tpe = type_to_show(payload) self.env.logger.info('Found type {}'.format(tpe)) content = """ <body id=show-scope> <style> p { margin-top: 0; margin-bottom: 0; } a { font-family: sans-serif; font-size: .7rem; } </style> <p>%s</p> <a href="%s">Copy</a> </body> """ % (html.escape(tpe, quote=False), html.escape(tpe, quote=True)) def copy(view, text): sublime.set_clipboard(html.unescape(text)) view.hide_popup() sublime.status_message('Type name copied to clipboard') sublime.set_timeout(bind(self.env.window.active_view().show_popup, content, max_width=512, on_navigate=lambda x: copy(self.env.window.active_view(), x)), 0)
def show_type(self, call_id, payload): tpe = type_to_show(payload) self.env.logger.info('Found type {}'.format(tpe)) content = """ <body id=show-scope> <style> p { margin-top: 0; margin-bottom: 0; } a { font-family: sans-serif; font-size: .7rem; } </style> <p>%s</p> <a href="%s">Copy</a> </body> """ % (html.escape(tpe, quote=False), html.escape(tpe, quote=True)) def copy(view, text): sublime.set_clipboard(html.unescape(text)) view.hide_popup() sublime.status_message('Type name copied to clipboard') sublime.set_timeout( bind(self.env.window.active_view().show_popup, content, max_width=512, on_navigate=lambda x: copy(self.env.window.active_view(), x)), 0)
def do_create(self): if len(self.w.folders()) == 0: message = "To use Ensime you need to have an active non-empty Sublime project, " message += "therefore Sublime is going to initialize one for you. " message += "\n\n" message += "You will be shown a dialog that will let you select a root folder for the project. " message += "After the root folder is selected, Sublime will create a configuration file in it." self.confirm_and_proceed(message, self.create_for_zero_folders) elif len(self.w.folders()) == 1: folder = self.w.folders()[0] message = "Since you have a single folder in the project, namely: " + folder + ", " message += "Sublime will use it to host the Ensime configuration file without further ado." self.confirm_and_proceed(message, bind(self.create_for_single_folder, folder)) else: message = "Since you have multiple folders in the project, pick an appropriate folder in the dialog that will follow." self.confirm_and_proceed(message, bind(self.create_for_multiple_folders, self.w.folders()))
def set_peer_ids(clients, ids=None) -> data_types.BulkOperationResult: """Set peer ids for all clients. Assume order of clients passed as ids Arguments: clients {List[VNS3Client]} Returns: BulkOperationResult """ def _set_peer_id(_client, i): get_resp = _client.peering.get_peering_status() if get_resp.response.id == i: _client.add_to_state(VNS3Attr.peer_id, i) return get_resp resp = _client.peering.put_self_peering_id(id=i) _client.add_to_state(VNS3Attr.peer_id, i) return resp ordered_ids = ids or range(1, len(clients) + 1) bound_api_calls = [ bind(_set_peer_id, client, ordered_ids[index]) for index, client in enumerate(clients) ] Logger.debug("Setting peer IDS: %s" % ordered_ids) return api_ops.__bulk_call_api(bound_api_calls, parallelize=True)
def set_breakpoints(breakpoints, status): # gets the length of the file of interest def callback(filelength): if filelength: self.env.file_lengths[filelength.file_name] = filelength.length print "file " + filelength.file_name + " is " + str(filelength.length) set_breakpoints(breakpoints, status) if status: if breakpoints: # macro expansions can modify the positions of breakpoints. If we expand a macro - in the GUI and the gutter # the breakpoints should and will be also modified. However these modified positions can't be send to the # server/debugger as such since the shown code in the buffer is not known to the debugger bp = breakpoints[0] if bp.file_name in self.env.file_lengths: # often to be able to take into account the expanded code we must know the original length of the program # if the length is not known we ask the server for it bp_line = self._adapt_breakpoint(bp) self.debug_set_break(bp.file_name, bp_line, bind(set_breakpoints, breakpoints[1:])) else: self.get_file_length(bp.file_name, callback) else: if launch.main_class: self._debug_start(launch.command_line, on_complete) elif launch.remote_address: self._debug_attach(launch.remote_host, launch.remote_port, on_complete) else: raise Exception("unsupported launch: " + str(launch)) elif on_complete: on_complete(status)
def update_status(self, v): if v and v.file_name() and v.window() and v.window().active_view() and v.window().active_view().id() == v.id(): threshold_ms = 2000 if time.time() - self.last_update > threshold_ms / 1000: self.last_update = time.time() MyGitBranchGetter(v.file_name(), v).start() else: sublime.set_timeout(bind(self.update_status, v), threshold_ms)
def fill_in_dot_ensime_with_mock_config(self, project_root, content=None): v = self.w.open_file(project_root + os.sep + ".ensime") if not v.size(): # we have to do this in such a perverse way # because direct v.begin_edit() won't work sublime.set_timeout( bind(self.fill_in_dot_ensime_with_mock_config_cont, project_root, v.file_name(), content), 0 )
def folder_selected(self, selected_index): if selected_index != -1: target_folder = self.w.folders()[selected_index] v = self.w.open_file(target_folder + os.sep + ".ensime") if not v.size(): # we have to do this in such a perverse way # because direct v.begin_edit() won't work sublime.set_timeout(bind(self.fill_in_with_mock_config, target_folder, v.file_name()), 0)
def handle_debug_break(self, call_id, payload): """Handle responses `DebugBreakEvent`.""" line = payload['line'] path = os.path.relpath(payload['file'], self.env.project_root) sublime.set_timeout(bind(sublime.message_dialog, feedback['notify_break'].format(line, path)), 0) self.debug_thread_id = payload["threadId"]
def handle_debug_break(self, call_id, payload): """Handle responses `DebugBreakEvent`.""" line = payload['line'] path = os.path.relpath(payload['file'], self.env.project_root) sublime.set_timeout( bind(sublime.message_dialog, feedback['notify_break'].format(line, path)), 0) self.debug_thread_id = payload["threadId"]
def _add_to_watched_tasks(self, taskid): callback = None with self._taskswatchers_lock: if not taskid in self._taskswatchers: callback = self._taskswatchers[taskid] = bind( self._task_change_handler, taskid) if callback: events.publish('Storage.On', 'change', r'^' + taskid.replace('.', r'\.') + '$', callback)
def handle_symbol_info(self, call_id, payload): decl_pos = payload.get("declPos") if decl_pos is None: self.env.error_message("Couldn't find the declaration position for symbol.\n{}" .format(payload.get("name"))) return f = decl_pos.get("file") offset = decl_pos.get("offset") line = decl_pos.get("line") if f is None: self.env.error_message("Couldn't find the file where it's defined.") return self.env.logger.debug("Jumping to file : {}".format(f)) view = self.env.editor.view_for_file(f) if view is None: view = self.env.window.open_file(f) def _scroll_once_loaded(view, offset, line, attempts=10): if not offset and not line: self.env.logger.debug("No offset or line number were found.") return if view.is_loading() and attempts: sublime.set_timeout(bind(_scroll_once_loaded, view, offset, line, attempts - 1), 100) return if not view.is_loading(): if not line: line, _ = view.rowcol(offset) line = line + 1 self.env.editor.scroll(view, line) else: self.env.logger.debug("Scrolling failed as the view didn't get ready in time.") sublime.set_timeout(bind(_scroll_once_loaded, view, offset, line, 10), 0) else: def _scroll(view, offset, line): if not line: line, _ = view.rowcol(offset) line = line + 1 self.env.window.focus_view(view) self.env.editor.scroll(view, line) sublime.set_timeout(bind(_scroll, view, offset, line), 0)
def fill_in_dot_ensime_with_mock_config(self, project_root, content=None): v = self.w.open_file(project_root + os.sep + ".ensime") if not v.size(): # we have to do this in such a perverse way # because direct v.begin_edit() won't work sublime.set_timeout( bind(self.fill_in_dot_ensime_with_mock_config_cont, project_root, v.file_name(), content), 0)
def folder_selected(self, selected_index): if selected_index != -1: target_folder = self.w.folders()[selected_index] v = self.w.open_file(target_folder + os.sep + ".ensime") if not v.size(): # we have to do this in such a perverse way # because direct v.begin_edit() won't work sublime.set_timeout( bind(self.fill_in_with_mock_config, target_folder, v.file_name()), 0)
def set_breakpoints(breakpoints, status): if status: if breakpoints: self.debug_set_break( breakpoints[0].file_name, breakpoints[0].line, bind(set_breakpoints, breakpoints[1:])) else: self._debug_start(launch.command_line) elif on_complete: on_complete(status)
def do_create(self): if len(self.w.folders()) == 0: message = "To use Ensime you need to have an active non-empty Sublime project, " message += "therefore Sublime is going to initialize one for you. " message += "\n\n" message += "You will be shown a dialog that will let you select a root folder for the project. " message += "After the root folder is selected, Sublime will create a configuration file in it." self.confirm_and_proceed(message, self.create_for_zero_folders) elif len(self.w.folders()) == 1: folder = self.w.folders()[0] message = "Since you have a single folder in the project, namely: " + folder + ", " message += "Sublime will use it to host the Ensime configuration file without further ado." self.confirm_and_proceed( message, bind(self.create_for_single_folder, folder)) else: message = "Since you have multiple folders in the project, pick an appropriate folder in the dialog that will follow." self.confirm_and_proceed( message, bind(self.create_for_multiple_folders, self.w.folders()))
def create_peer_mesh_local_gw_routes( clients, subnets=None, address_type=VNS3Attr.primary_private_ip ): """Create explicit routes between VNS3 peers for peering traffic by pointing peer ips to the local gateway. For example, for clients = [[email protected], [email protected]] in private clouds that are peered where 10.0.1.5 in 10.0.1.0/24 and 10.0.2.10 in 10.0.2.0/24 This func will create the following routes: On [email protected]: Route: cidr=10.0.2.10/32 on eth0 to network gateway at 10.0.1.1 On [email protected]: Route: cidr=10.0.1.5/32 on eth0 to network gateway at 10.0.2.1 Assumptions: - IF subnets list not passed, Clients need to have the VPC/VNet network they are contained in set on their state. e.g. client.update_state({"network": "10.0.1.0/24"}); client.query_state("network"); This function queries the following keys: network Arguments: clients {List[VNS3Client]} Keyword Arguments: subnets: List[str] - list of subnets for each client. address_type {str} -- Type of address to use for cidr route. (default: {VNS3Attr.primary_private_ip}) """ create_client_routes_funcs = [] client_ips = [ state.fetch_client_state_attribute(client, address_type) for client in clients ] if subnets: assert len(subnets) == len( clients ), "If subnets passed, must be same number of clients passed" for i, client in enumerate(clients): client_subnet_cidr = subnets[i] if subnets else client.query_state("subnet") client_ip = state.fetch_client_state_attribute(client, address_type) assert ( client_subnet_cidr ), "Each client must have 'subnet' set on client.state if subnets arg not passed" create_client_routes_funcs.append( bind( create_local_gateway_routes_for_peers, client, client_subnet_cidr, list(set(client_ips) - set([client_ip])), ) ) return api_operations.__bulk_call_api(create_client_routes_funcs, parallelize=True)
def load(window): """Intelligently guess the appropriate .ensime file location for the given window. Load the .ensime and parse as s-expression. Return: (inferred project root directory, config sexp) """ for f in locations(window): root = encode_path(os.path.dirname(f)) with open(f) as open_file: src = open_file.read() try: conf = sexp.read_relaxed(src) m = sexp.sexp_to_key_map(conf) if m.get(":root-dir"): root = m[":root-dir"] else: conf = conf + [key(":root-dir"), root] return (root, conf, None) except: return (None, None, bind(error_bad_config, window, f, sys.exc_info())) return (None, None, bind(error_no_config, window))
def compl(): # init the clock clk, tick = clock() # construct streams sp = fmap(soft(random.random), repeat(0.2, clk)) term = Stream(clk) interrupt = timeout(time_thres, term, term) value = merge([sp, fmap(const(-1), term)]) acc = scan(lambda acc, v: acc + v if v >= 0 else 0, 0, value) met = changed(lambda _, y: y <= value_thres, acc) each(term, merge([met, interrupt])) # hook to print trace acc.hook = print met.hook = bind(print, 'met!') interrupt.hook = bind(print, 'fail!') # start clock tick()
def sbt_command(*args): if _sbt_binary() and _sbt_binary_exists(): return [_sbt_binary()] + _sbt_flags() + list(args) else: message = "Configured path for the SBT binary, namely: " + _sbt_binary() + ", does not exist " message += "and cannot be resolved from your Sublime's PATH, namely: " + os.environ["PATH"] + "." message += "\n\n" message += "Consider updating the \"sbt_binary\" entry in Ensime configuration via Preferences > Package Settings > Ensime " message += "or adjusting your PATH. (Note that on Mac OS, Sublime doesn't read .bashrc or .bash_profile on startup, so " message += "it might be easier to provide an absolute path to the SBT binary rather than to try adjusting Sublime's PATH)." sublime.set_timeout(bind(sublime.error_message, message), 0) return None
def _add_to_watched_tasks(self, taskid): callback = None with self._taskswatchers_lock: if not taskid in self._taskswatchers: callback = self._taskswatchers[taskid] = bind(self._task_change_handler, taskid) if callback: events.publish( 'Storage.On' , 'change' , r'^' + taskid.replace('.',r'\.') + '$' , callback )
def __init__(self, error_func=None): """ Create a new Parser. """ self.error_func = error_func if error_func else self.error self.filename = '' self.parser = None self.lexer = None self.last_generated_tree = None self.lexer = UCLexer(error_func=bind(self.error, lexer=True)).build() self.tokens = self.lexer.tokens self.parser = yacc.yacc(module=self)
def set_breakpoints(breakpoints, status): if status: if breakpoints: self.debug_set_break(breakpoints[0].file_name, breakpoints[0].line, bind(set_breakpoints, breakpoints[1:])) else: if launch.main_class: self._debug_start(launch.command_line, on_complete) elif launch.remote_address: self._debug_attach(launch.remote_host, launch.remote_port, on_complete) else: raise Exception("unsupported launch: " + str(launch)) elif on_complete: on_complete(status)
def _scroll_once_loaded(view, offset, line, attempts=10): if not offset and not line: self.env.logger.debug("No offset or line number were found.") return if view.is_loading() and attempts: sublime.set_timeout(bind(_scroll_once_loaded, view, offset, line, attempts - 1), 100) return if not view.is_loading(): if not line: line, _ = view.rowcol(offset) line = line + 1 self.env.editor.scroll(view, line) else: self.env.logger.debug("Scrolling failed as the view didn't get ready in time.")
def init(self): self.tasktypelogo = {} from .configurationserver import server, config_application events.subscribe( 'TaskHiveCoordinator.TaskTypes.Changed' , bind( self._tasktypes_changed_handler , os.path.join(config_application.CWD, config_application.STATIC_FOLDER) ) ) self.server = server self.run_requests = 0
def set_breakpoints(breakpoints, status): if status: if breakpoints: self.debug_set_break( breakpoints[0].file_name, breakpoints[0].line, bind(set_breakpoints, breakpoints[1:])) else: if launch.main_class: self._debug_start(launch.command_line, on_complete) elif launch.remote_address: self._debug_attach(launch.remote_host, launch.remote_port, on_complete) else: raise Exception("unsupported launch: " + str(launch)) elif on_complete: on_complete(status)
def create_route_advertisements( clients, local_subnets ) -> data_types.BulkOperationResult: """create_route_advertisements Create a route advertisement for controllers network Arguments: clients {List[VNS3Client]} local_subnets {List[str]} - order should correspond with clients list Returns: data_types.BulkOperationResult """ assert len(clients) == len( local_subnets ), "clients list length must equal local_subnets list length" invalid = [] for index, client in enumerate(clients): private_ip = state.get_primary_private_ip(client) if not network_math.subnet_contains_ipv4(private_ip, local_subnets[index]): invalid.append("%s not in %s" % (private_ip, local_subnets[index])) if len(invalid): raise AssertionError( "Invalid subnets provided for clients: %s." % ",".join(invalid) ) def _create_route(_client, subnet): return _client.routing.post_create_route( **{ "cidr": subnet, "description": "Local subnet advertisement", "advertise": True, "gateway": "", } ) bound_api_calls = [ bind(_create_route, client, local_subnets[index]) for index, client in enumerate(clients) ] return api_operations.__bulk_call_api(bound_api_calls)
def handle_string_response(self, call_id, payload): """Handler for response `StringResponse`. This is the response for the following requests: 1. `DocUriAtPointReq` or `DocUriForSymbolReq` 2. `DebugToStringReq` """ # :EnDocBrowse or :EnDocUri url = payload['text'] if not url.startswith('http'): port = self.ensime.http_port() url = gconfig['localhost'].format(port, url) options = self.call_options.get(call_id) if options and options.get('browse'): sublime.set_timeout(bind(self._browse_doc, self.env, url), 0) del self.call_options[call_id] else: pass
def main(): import threading ev = threading.Event() ev.clear() events.subscribe('Application.Exit', bind(exit_the_main_loop, ev)) a = Application() print( "\n# Press 'abort' key combination (usually CTRL+C) to stop the %s server #\n" % APP_NAME) try: while not ev.is_set(): logging.debug("Main Loop...") ev.wait(30) except KeyboardInterrupt: pass print "Good bye"
def handle_import_suggestions(self, call_id, payload): imports = list() for suggestions in payload['symLists']: for suggestion in suggestions: imports.append(suggestion['name'].replace('$', '.')) imports = list(sorted(set(imports))) if not imports: self.env.error_message('No import suggestions found.') return def do_refactor(choice): if choice > -1: file_name = self.call_options[call_id].get('file_name') # request is async, file is reverted when patch is received and applied AddImportRefactorDesc(file_name, imports[choice]).run_in(self.env, async=True) sublime.set_timeout(bind(self.env.window.show_quick_panel, imports, do_refactor, sublime.MONOSPACE_FONT), 0)
def fetch_state_attribute(clients, attribute, bust_cache=False) -> data_types.BulkOperationResult: """Fetch state attribute for all clients Arguments: clients {List[VNS3Client]} attribute {str} Keyword Arguments: bust_cache {bool} Returns: [BulkOperationResult] -- [description] """ assert attribute in StateLibrary, "Attribute %s not currently supported" % attribute fetch_func = StateLibrary.get(attribute) api_calls = [ bind(fetch_func, client, bust_cache=bust_cache) for client in clients ] return api_op.__bulk_call_api(api_calls, parallelize=True)
def main(): import threading ev = threading.Event() ev.clear() events.subscribe( 'Application.Exit' , bind(exit_the_main_loop, ev) ) a = Application() print("\n# Press 'abort' key combination (usually CTRL+C) to stop the %s server #\n" % APP_NAME) try: while not ev.is_set(): logging.debug("Main Loop...") ev.wait(30) except KeyboardInterrupt: pass print "Good bye"
def handle_import_suggestions(self, call_id, payload): imports = list() for suggestions in payload['symLists']: for suggestion in suggestions: imports.append(suggestion['name'].replace('$', '.')) imports = list(sorted(set(imports))) if not imports: self.env.error_message('No import suggestions found.') return def do_refactor(choice): if choice > -1: file_name = self.call_options[call_id].get('file_name') # request is async, file is reverted when patch is received and applied AddImportRefactorDesc(file_name, imports[choice]).run_in(self.env, async=True) sublime.set_timeout( bind(self.env.window.show_quick_panel, imports, do_refactor), 0)
def apply_refactor(self, call_id, payload): supported_refactorings = [ "AddImport", "OrganizeImports", "Rename", "InlineLocal" ] if payload["refactorType"]["typehint"] in supported_refactorings: diff_file = payload["diff"] patch_set = fromfile(diff_file) if not patch_set: self.env.logger.warning( "Couldn't parse diff_file: {}".format(diff_file)) return result = patch_set.apply(0, "/") if result: file = self.refactorings[payload['procedureId']] sublime.set_timeout(bind(self.env.editor.reload_file, file), 0) self.env.logger.info( "Refactoring succeeded, patch file: {}".format(diff_file)) self.env.status_message("Refactoring succeeded") else: self.env.logger.error( "Patch refactoring failed, patch file: {}".format(diff_file)) self.env.status_message("Refactor failed: {}".format(diff_file))
def handle_completion_info_list(self, call_id, payload): """Handler for a completion response.""" prefix = payload.get("prefix") if (self.env.editor.current_prefix and self.env.editor.current_prefix == prefix): def _hack(prefix): if (sublime.active_window().active_view( ).is_auto_complete_visible() and self.env.editor.current_prefix == prefix): sublime.active_window().run_command("hide_auto_complete") completions = [ c for c in payload["completions"] if "typeInfo" in c ] self.env.editor.suggestions = [ completion_to_suggest(c) for c in completions ] def hack2(): sublime.active_window().active_view().run_command( "auto_complete") sublime.set_timeout(hack2, 1) sublime.set_timeout(bind(_hack, prefix), 0) else: self.env.editor.current_prefix = payload.get("prefix") self.env.logger.debug('handle_completion_info_list: in') # filter out completions without `typeInfo` field to avoid server bug. See #324 completions = [ c for c in payload["completions"] if "typeInfo" in c ] self.env.editor.suggestions = [ completion_to_suggest(c) for c in completions ] self.env.logger.debug('handle_completion_info_list: {}'.format( Pretty(self.env.editor.suggestions)))
def __init__( self ): self.root = tkinter.Tk() self.root.wm_title( "ELTool Control" ) self.pick_gif_button = tkinter.Button( self.root, text="Pick GIF", command=self.pick_gif, width=10 ) self.pick_wav_button = tkinter.Button( self.root, text="Pick WAV", command=self.pick_wav, width=10 ) self.back_color_button = tkinter.Button( self.root, text="Back Color", command=self.pick_back_color, width=10 ) self.text_color_button = tkinter.Button( self.root, text="Text Color", command=self.pick_text_color, width=10 ) self.player_id_label = tkinter.Label( self.root, text="ID:" ) self.player_id_entry = tkinter.Entry( self.root, width=20 ) self.font_name_label = tkinter.Label( self.root, text="Font Name:" ) self.font_name_entry = tkinter.Entry( self.root, width=20 ) self.font_size_label = tkinter.Label( self.root, text="Font Size:" ) self.font_size_entry = tkinter.Entry( self.root, width=20 ) self.start_button = tkinter.Button( self.root, text="Start", command=self.start, state=tkinter.DISABLED, width=20, height=2 ) self.test_button = tkinter.Button( self.root, text="Test", state=tkinter.DISABLED, command=bind( self.play_anim, "Goku donated $9000.01" ), width=20, height=2 ) self.player_id_entry.insert( 0, str( self.partID ) ) self.font_name_entry.insert( 0, "Arial" ) self.font_size_entry.insert( 0, "20" ) self.pick_gif_button.grid( column=0, row=0 ) self.pick_wav_button.grid( column=0, row=1 ) self.back_color_button.grid( column=0, row=2 ) self.text_color_button.grid( column=0, row=3 ) self.player_id_label.grid( column=3, row=0, sticky=tkinter.E ) self.player_id_entry.grid( column=4, row=0 ) self.font_name_label.grid( column=3, row=1, sticky=tkinter.E ) self.font_name_entry.grid( column=4, row=1 ) self.font_size_label.grid( column=3, row=2, sticky=tkinter.E ) self.font_size_entry.grid( column=4, row=2 ) self.start_button.grid( column=5, row=0, rowspan=2 ) self.test_button.grid( column=5, row=2, rowspan=2 ) self.total_win = TotalWin( self.root ) self.total_win.wm_title( "ELTool Total" ) self.total_win.grid_columnconfigure( 0, weight=1 ) self.total_win.grid_rowconfigure( 0, weight=1 ) self.update_total() self.root.mainloop()
def run(self): for folder in self.window.folders(): if folder.endswith("sandbox"): if os.listdir(folder): self.window.show_quick_panel(["Yes, clean " + folder, "No, don't delete anything"], bind(self.on_selected, folder)) else: self.on_selected(folder, 0)
def status_message(self, msg): sublime.set_timeout(bind(sublime.status_message, msg), 0)
def handle_debug_output(self, call_id, payload): """Handle responses `DebugOutputEvent`.""" sublime.set_timeout(bind(sublime.message_dialog, payload["body"].encode("ascii", "ignore")), 0)
def error_message(self, msg): sublime.set_timeout(bind(sublime.error_message, msg), 0)
def run(self, flags): self.flags = flags self.window.show_quick_panel(self.test_types(), bind(self.on_selected))
def peer_mesh( clients, peer_address_map=None, address_type=VNS3Attr.primary_private_ip, delay_configure=False, mtu=None, ): """peer_mesh Create a peering mesh by adding each client as peer for other clients. The order of the list of clients is the assumed peering id, i.e. client at clients[0] has peering id of 1, clients[1] has peering id of 2. Each TLS connection between peers is then automatically negotiated. Arguments: clients {List[VNS3Client]} Keyword Arguments: peer_address_map {Dict} - Optional map for peering addresses { [from_peer_id: str]: { [to_peer_id_1: str]: [peer_address_1: str], [to_peer_id_2: str]: [peer_address_2: str], ... } } address_type {str} - which address to use. Options: primary_private_ip, secondary_private_ip, public_ip or public_dns delay_configure {bool} -- delay automatic negotiation of peer (default: False) mtu {int} -- Override MTU for the peering TLS connection. VNS3 defaults to 1500. (default: {None}) Raises: CohesiveSDKException Returns: data_types.BulkOperationResult """ # fetch peer ids and set on clients ensure_peer_ids_result = fetch_state_attribute(clients, VNS3Attr.peer_id) if api_ops.bulk_operation_failed(ensure_peer_ids_result): errors_str = api_ops.stringify_bulk_result_exception( ensure_peer_ids_result) Logger.error("Failed to fetch peering Ids for all clients", errors=errors_str) raise CohesiveSDKException( "Failed to fetch peering Ids for all clients: %s" % errors_str) # constructu peer address mapping if peer_address_map is not None: Logger.debug("Using address map passed for peering mesh.") peer_id_to_client = { c.query_state(VNS3Attr.peer_id): c for c in clients } peer_address_mapping_tuples = [ (peer_id_to_client[from_peer_id], to_peers_map) for from_peer_id, to_peers_map in peer_address_map.items() ] else: Logger.debug("Constructing peering mesh") peer_address_mapping_tuples = _construct_peer_address_mapping( clients, address_type) common_peer_kwargs = {} if delay_configure: common_peer_kwargs.update(force=False) if mtu: common_peer_kwargs.update(overlay_mtu=mtu) def create_all_peers_for_client(client, post_peer_kwargs): return [ client.peering.post_create_peer( **dict(peering_request, **common_peer_kwargs)) for peering_request in post_peer_kwargs ] run_peering_funcs = [] # bind api function calls for peer creations for vns3_client, peer_mapping in peer_address_mapping_tuples: run_peering_funcs.append( bind( create_all_peers_for_client, vns3_client, [{ "id": peer_id, "name": peer_address } for peer_id, peer_address in peer_mapping.items()], )) Logger.debug("Creating %d-way peering mesh." % len(clients)) return api_ops.__bulk_call_api(run_peering_funcs, parallelize=True)
return training_loss, weights, gradOfLossWrtInput, train_op # Values: input_values = np.array([[0.52, 1.12, 0.77], [0.88, -1.08, 0.15], [0.52, 0.06, -1.30], [0.74, -2.49, 1.39]]) target_values = np.array([[1, 1, 0, 1]]).transpose() # Variables with tf.device("cpu"): inputs = tf.placeholder(tf.float32, [4, 3]) targets = tf.placeholder(tf.float32, [4, 1]) with ipu.scopes.ipu_scope("/device:IPU:0"): regression = bind(model, False) regression_custom = bind(model, True) fetches = ipu.ipu_compiler.compile(regression, [inputs, targets]) fetches_custom = ipu.ipu_compiler.compile(regression_custom, [inputs, targets]) cfg = ipu.utils.create_ipu_config() cfg = ipu.utils.auto_select_ipus(cfg, 1) ipu.utils.configure_ipu_system(cfg) ipu.utils.move_variable_initialization_to_cpu() # Run the optimisation with the built in op and the # custom op and record the relevant results: losses = [] custom_losses = [] grads = []
def on_message(self, data): logging.debug(HELLO + "Got message") # '%s'" % data) try: method, params, message_id = Messager.parse(data) except Exception as ex: logging.debug(HELLO + "Received the following error from message parser: %s" % ex) return ## we speak JSON-RPC (http://json-rpc.org/wiki/specification) over WebSocket ### RPC method if method == 'rpc': ''' 'rpc' method is a wrapper for an actual JSON-RPC packet. The reason we do it is to allow passing through authentication tocken for the connection, without polluting the arguments of the actual JSON-RPC call. We look like this: { method: rpc , id: None or number or string , params: { method: actual_called_method's_name , id: exact same as above, but irrelevant since we use the id from above , params: params to be passed to the called method -------------------- , authentication_token: something we got from server at hello stage, at inception of this websocket connection. -------------------- } } The wrapper call (when auth is good) calls one and only interface - PubSub. actual_called_method's_name is the channel name params are flattened into Python's *args, **kw boxes. id is special. Presence of it means we need a callback added to *kw This ID, WSConnection are packaged into the callback. The called method calls callback when done. ''' if not params or type(params) != dict or not params.get('method'): logging.debug(HELLO + " RPC call seems to have sub-call parts missing.") return actual_method = params['method'] args = [] kw = {} if 'params' in params: actual_params = params['params'] if type(actual_params) == list: args.extend(actual_params) elif type(actual_params) == dict: kw.update(actual_params) # all other types of objects are not allowed as values for 'params' in JSON-RPC # presence of message ID means, it's not "Notification" # (where caller does not expect a return value) # but is a "Call" where there is expectation of a return value. # since we are async, we can't return, we can only callback. if message_id: kw['callback'] = bind( process_rpc_response , weakref.ref(self) , message_id ) events.publish( actual_method , *args , **kw ) ### HELLO elif method == 'hello': supported_common = list( set(SUPPORTED_PROTOCOLS).intersection( params['protocols'] ) ) if len(supported_common): hello = { 'method': 'hello' , 'params': { 'protocols': supported_common , 'serverName': APP_NAME } , 'id': message_id } self.send_message(hello)
train_op = opt.minimize(training_loss) return training_loss, weights, gradOfLossWrtInput, train_op # Values: input_values = np.array([[0.52, 1.12, 0.77], [0.88, -1.08, 0.15], [0.52, 0.06, -1.30], [0.74, -2.49, 1.39]]) target_values = np.array([[1, 1, 0, 1]]).transpose() # Variables with tf.device("cpu"): inputs = tf.placeholder(tf.float16, [4, 3]) targets = tf.placeholder(tf.float16, [4, 1]) with ipu.scopes.ipu_scope("/device:IPU:0"): regression = bind(model, opts.optimiser) fetches = ipu.ipu_compiler.compile(regression, [inputs, targets]) cfg = ipu.config.IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() ipu.utils.move_variable_initialization_to_cpu() # Run the optimisation with each optimiser and compare to known results. losses = [] custom_losses = [] lamb_losses = [] grads = [] custom_grads = [] lamb_grads = [] with tf.Session() as sess:
######################### imports ############################### import sys from sys import stdin, stdout, stderr, argv import os import functools import functools as ftools # from functools import * from functools import ( partial, partial as bind, lru_cache, reduce, ) memoize = bind(lru_cache, maxsize=None) import itertools import itertools as itools from itertools import * from operator import * #iterable and such from random import ( random, randint, randrange, shuffle, ) import collections from collections import * from collections import ( defaultdict as ddict,
def set_breakpoints(breakpoints, status): if status: if breakpoints: self.debug_set_break(breakpoints[0].file_name, breakpoints[0].line, bind(set_breakpoints, breakpoints[1:])) else: self._debug_start(launch.command_line) elif on_complete: on_complete(status)
def main(): width = 640 height = 480 if not os.path.exists('/dev/video0'): path = 'sudo modprobe bcm2835-v4l2 max_video_width=640 max_video_height=480' os.system(path) allowed = { ord("w"): forward, ord("s"): back, ord("a"): turn_left, ord("d"): turn_right, ord("f"): stop } print("Allowed only: {}".format(allowed.keys())) cv2.namedWindow("camera") camera = cv2.VideoCapture(0) camera.set(3, width) camera.set(4, height) fourcc = cv2.VideoWriter_fourcc(*'MJPG') video_output = cv2.VideoWriter('./session.avi', fourcc, 30.0, (640, 480)) def set_motor_config(config_name, value): motor_configs[config_name] = value / 100 for config in motor_configs: update_config = bind(set_motor_config, config) cur_value = motor_configs[config] * 100 cv2.createTrackbar(config, "camera", cur_value, 100, update_config) cv2.setTrackbarMin(config, "camera", 0) cv2.setTrackbarMax(config, "camera", 100) action_time = None action_timeout = 250 direction = [] while camera.isOpened(): _, frame = camera.read() video_output.write(frame) cv2.imshow("camera", frame) key = cv2.waitKey(1) & 0xFF if key in allowed.keys(): action_time = datetime.datetime.now() if key not in direction: direction.append(key) allowed[key]() print(allowed[key]) elif key == ord("q"): stop() camera.release() video_output.release() cv2.destroyAllWindows() exit(0) if action_time is not None and (datetime.datetime.now() - action_time ).microseconds / 1000 > action_timeout: stop() print("stop") direction.clear() action_time = None