def refresh_completer(alias): try: refresher = CompletionRefresher() refresher.refresh(executors[alias], special=special, callbacks=( lambda c: swap_completer(c, alias)), settings=completerSettings[alias]) return Response(to_str(json.dumps({'success':True, 'errormessage':None})), mimetype='text/json') except: return Response(to_str(json.dumps({'success':False, 'errormessage':'Could not refresh metadata.'})), mimetype='text/json')
def update_completer_settings(alias, new_settings): if new_settings != completerSettings[alias]: completerSettings[alias].update(new_settings) refresher = CompletionRefresher() refresher.refresh(executors[alias], special=special, callbacks=(lambda c: swap_completer(c, alias, True)), settings=completerSettings[alias])
def connect_server(alias, authkey=None): completer_settings = { 'generate_aliases' : True, 'casing_file' : os.path.expanduser('~/.config/pgcli/casing'), 'generate_casing_file' : True, 'single_connection': True, "call_arg_style": "{arg_name} := ${{{arg_num}:{arg_default}}}", "call_arg_display_style": "{arg_name}:={arg_default}", "call_arg_oneliner_max": 2, "signature_arg_style": "{arg_name} {arg_type}" } completer_settings.update(completerSettings.get(alias, {})) completerSettings[alias] = completer_settings server = serverList.get(alias, None) if not server: return {'alias': alias, 'success':False, 'errormessage':'Unknown alias. ->' + alias} if executors[alias]: return {'alias': alias, 'success':False, 'errormessage':'Already connected to server.'} refresher = CompletionRefresher() history = [x['query'] for x in search_query_history('', False, 'query', 300, 'DESC')[:-1:299]] try: with executor_lock[alias]: dsn = server.get('dsn') executor = new_executor(server['url'], dsn, authkey) with executor.conn.cursor() as cur: cur.execute('SELECT oid, oid::regtype::text FROM pg_type') type_dict[alias] = dict(row for row in cur.fetchall()) executors[alias] = executor refresher.refresh( executor, special=special, history=history, callbacks=( lambda c: swap_completer(c, alias) ), settings=completerSettings[alias] ) serverList[alias]['connected'] = True except psycopg2.Error as e: return {'success':False, 'errormessage':to_str(e)} #wait for connection to be established sleep = 0 while True: time.sleep(0.01) sleep += 0.01 if sleep >= 5: return {'alias': alias, 'success':False, 'errormessage':'Connection timed out.'} elif executors[alias].conn.get_transaction_status() == TRANSACTION_STATUS_IDLE and executors[alias].conn.status == STATUS_READY: time.sleep(0.5) break #create a queue for this alias and start a worker thread executor_queues[alias] = Queue() t = Thread(target=executor_queue_worker, args=(alias,), name='executor_queue_worker') t.setDaemon(True) t.start() return {'alias': alias, 'success':True, 'color': config['connections'][alias].get('color'), 'errormessage':None}
def run_sql_async(view, sql): executor = executors[view.buffer_id()] panel = get_output_panel(view) logger.debug('Command: PgcliExecute: %r', sql) save_mode = get(view, 'pgcli_save_on_run_query_mode') # Make sure the output panel is visiblle sublime.active_window().run_command('pgcli_show_output_panel') # Put a leading datetime datestr = str(datetime.datetime.now()) + '\n\n' panel.run_command('append', {'characters': datestr, 'pos': 0}) results = executor.run(sql, pgspecial=special) try: for (title, cur, headers, status, _, _) in results: fmt = format_output(title, cur, headers, status, 'psql') out = ('\n'.join(fmt) + '\n\n' + str(datetime.datetime.now()) + '\n\n') panel.run_command('append', {'characters': out}) except psycopg2.DatabaseError as e: success = False out = str(e) + '\n\n' + str(datetime.datetime.now()) + '\n\n' panel.run_command('append', {'characters': out}) else: success = True if (view.file_name() and ((save_mode == 'always') or (save_mode == 'success' and success))): view.run_command('save') # Refresh the table names and column names if necessary. if has_meta_cmd(sql): logger.debug('Need completions refresh') url = get(view, 'pgcli_url') refresher = CompletionRefresher() refresher.refresh(executor, special=special, callbacks=( lambda c: swap_completer(c, url))) # Refresh search_path to set default schema. if has_change_path_cmd(sql): logger.debug('Refreshing search path') url = get(view, 'pgcli_url') with completer_lock: completers[url].set_search_path(executor.search_path()) logger.debug('Search path: %r', completers[url].search_path)
def connect_server(alias, authkey=None): defaultSettings = { 'generate_aliases' : True, 'casing_file' : os.path.expanduser('~/.config/pgcli/casing'), 'generate_casing_file' : True, 'single_connection': True } completerSettings[alias] = completerSettings.get(alias, defaultSettings) server = serverList.get(alias, None) if not server: return {'alias': alias, 'success':False, 'errormessage':'Unknown alias.'} if executors[alias]: return {'alias': alias, 'success':False, 'errormessage':'Already connected to server.'} refresher = CompletionRefresher() try: with executor_lock[alias]: dsn = server.get('dsn') executor = new_executor(server['url'], dsn, authkey) with executor.conn.cursor() as cur: cur.execute('SELECT oid, oid::regtype::text FROM pg_type') type_dict[alias] = dict(row for row in cur.fetchall()) executors[alias] = executor refresher.refresh(executor, special=special, callbacks=( lambda c: swap_completer(c, alias)), settings=completerSettings[alias]) serverList[alias]['connected'] = True except psycopg2.Error as e: return {'success':False, 'errormessage':to_str(e)} #wait for connection to be established sleep = 0 while True: time.sleep(0.01) sleep += 0.01 if sleep >= 5: return {'alias': alias, 'success':False, 'errormessage':'Connection timed out.'} elif executors[alias].conn.get_transaction_status() == TRANSACTION_STATUS_IDLE and executors[alias].conn.status == STATUS_READY: time.sleep(0.5) break; #create a queue for this alias and start a worker thread executor_queues[alias] = Queue() t = Thread(target=executor_queue_worker, args=(alias,), name='executor_queue_worker') t.setDaemon(True) t.start() return {'alias': alias, 'success':True, 'errormessage':None}
def run_sql_async(view, sql, panel): executor = executors[view.buffer_id()] logger.debug('Command: PgcliExecute: %r', sql) save_mode = get(view, 'pgcli_save_on_run_query_mode') # Make sure the output panel is visiblle sublime.active_window().run_command('pgcli_show_output_panel') # Put a leading datetime datestr = str(datetime.datetime.now()) + '\n\n' panel.run_command('append', {'characters': datestr, 'pos': 0}) results = executor.run(sql, pgspecial=special) try: for (title, cur, headers, status, _, _) in results: fmt = format_output(title, cur, headers, status, 'psql') out = ('\n'.join(fmt) + '\n\n' + str(datetime.datetime.now()) + '\n\n') panel.run_command('append', {'characters': out}) except psycopg2.DatabaseError as e: success = False out = str(e) + '\n\n' + str(datetime.datetime.now()) + '\n\n' panel.run_command('append', {'characters': out}) else: success = True if (view.file_name() and ((save_mode == 'always') or (save_mode == 'success' and success))): view.run_command('save') # Refresh the table names and column names if necessary. if has_meta_cmd(sql): logger.debug('Need completions refresh') url = get(view, 'pgcli_url') refresher = CompletionRefresher() refresher.refresh(executor, special=special, callbacks=(lambda c: swap_completer(c, url))) # Refresh search_path to set default schema. if has_change_path_cmd(sql): logger.debug('Refreshing search path') url = get(view, 'pgcli_url') with completer_lock: completers[url].set_search_path(executor.search_path()) logger.debug('Search path: %r', completers[url].search_path)
def check_pgcli(view): """Check if a pgcli connection for the view exists, or request one""" if not is_sql(view): view.set_status('pgcli', '') return with executor_lock: buffer_id = view.buffer_id() if buffer_id not in executors: url = get(view, 'pgcli_url') if not url: view.set_status('pgcli', '') logger.debug('Empty pgcli url %r', url) else: # Make a new executor connection view.set_status('pgcli', 'Connecting: ' + url) logger.debug('Connecting to %r', url) try: executor = new_executor(url) view.set_status('pgcli', pgcli_id(executor)) except Exception: logger.error('Error connecting to pgcli') logger.error('traceback: %s', traceback.format_exc()) executor = None status = 'ERROR CONNECTING TO {}'.format(url) view.set_status('pgcli', status) executors[buffer_id] = executor # Make sure we have a completer for the corresponding url with completer_lock: need_new_completer = executor and url not in completers if need_new_completer: completers[url] = PGCompleter() # Empty placeholder if need_new_completer: refresher = CompletionRefresher() refresher.refresh( executor, special=special, callbacks=(lambda c: swap_completer(c, url)))
def check_pgcli(view): """Check if a pgcli connection for the view exists, or request one""" if not is_sql(view): view.set_status('pgcli', '') return with executor_lock: buffer_id = view.buffer_id() if buffer_id not in executors: url = get(view, 'pgcli_url') if not url: view.set_status('pgcli', '') logger.debug('Empty pgcli url %r', url) else: # Make a new executor connection view.set_status('pgcli', 'Connecting: ' + url) logger.debug('Connecting to %r', url) try: executor = new_executor(url) view.set_status('pgcli', pgcli_id(executor)) except Exception: logger.error('Error connecting to pgcli') logger.error('traceback: %s', traceback.format_exc()) executor = None status = 'ERROR CONNECTING TO {}'.format(url) view.set_status('pgcli', status) executors[buffer_id] = executor # Make sure we have a completer for the corresponding url with completer_lock: need_new_completer = executor and url not in completers if need_new_completer: completers[url] = PGCompleter() # Empty placeholder if need_new_completer: refresher = CompletionRefresher() refresher.refresh(executor, special=special, callbacks=( lambda c: swap_completer(c, url)))
def refresher(): from pgcli.completion_refresher import CompletionRefresher return CompletionRefresher()