def handle_stats(self, user, channel, command, text): print "command stats" # self.sc.rtm_send_message(channel, "Fetching stats...") channel_name, channel_type = self.get_channel_info(channel) items = channel_report(channel_name, channel_type) if items: dispatch(items, channel_name, channel_type)
def runcommand(self): """ reads a list of \0 terminated arguments, executes and writes the return code to the result channel """ length = struct.unpack('>I', self._read(4))[0] if not length: args = [] else: args = self._read(length).split('\0') # copy the uis so changes (e.g. --config or --verbose) don't # persist between requests copiedui = self.ui.copy() self.repo.baseui = copiedui self.repo.ui = self.repo.dirstate._ui = self.repoui.copy() self.repo.invalidate() self.repo.invalidatedirstate() req = dispatch.request(args[:], copiedui, self.repo, self.cin, self.cout, self.cerr) ret = dispatch.dispatch(req) or 0 # might return None # restore old cwd if '--cwd' in args: os.chdir(self.cwd) self.cresult.write(struct.pack('>i', int(ret)))
def runcommand(self): """ reads a list of \0 terminated arguments, executes and writes the return code to the result channel """ length = struct.unpack('>I', self._read(4))[0] if not length: args = [] else: args = self._read(length).split('\0') # copy the uis so changes (e.g. --config or --verbose) don't # persist between requests copiedui = self.ui.copy() self.repo.baseui = copiedui self.repo.ui = self.repo.dirstate._ui = self.repoui.copy() self.repo.invalidate() req = dispatch.request(args[:], copiedui, self.repo, self.cin, self.cout, self.cerr) ret = dispatch.dispatch(req) or 0 # might return None # restore old cwd if '--cwd' in args: os.chdir(self.cwd) self.cresult.write(struct.pack('>i', int(ret)))
def _safe_serve(params, client_ip, client_hostname, cookie_data): # Note: Only logging imports here from config import WORK_DIR from logging import basicConfig as log_basic_config # Enable logging try: from config import LOG_LEVEL log_level = _convert_log_level(LOG_LEVEL) except ImportError: from logging import WARNING as LOG_LEVEL_WARNING log_level = LOG_LEVEL_WARNING log_basic_config(filename=path_join(WORK_DIR, "server.log"), level=log_level) # Do the necessary imports after enabling the logging, order critical try: from common import ProtocolError, ProtocolArgumentError, NoPrintJSONError from dispatch import dispatch from jsonwrap import dumps from message import Messager from session import get_session, init_session, close_session, NoSessionError, SessionStoreError except ImportError: # Note: Heisenbug trap for #612, remove after resolved from logging import critical as log_critical from sys import path as sys_path log_critical("Heisenbug trap reports: " + str(sys_path)) raise init_session(client_ip, cookie_data=cookie_data) response_is_JSON = True try: # Unpack the arguments into something less obscure than the # Python FieldStorage object (part dictonary, part list, part FUBAR) http_args = DefaultNoneDict() for k in params: # Also take the opportunity to convert Strings into Unicode, # according to HTTP they should be UTF-8 try: http_args[k] = unicode(params.getvalue(k), encoding="utf-8") except TypeError: Messager.error( "protocol argument error: expected string argument %s, got %s" % (k, type(params.getvalue(k))) ) raise ProtocolArgumentError # Dispatch the request json_dic = dispatch(http_args, client_ip, client_hostname) except ProtocolError, e: # Internal error, only reported to client not to log json_dic = {} e.json(json_dic) # Add a human-readable version of the error err_str = str(e) if err_str != "": Messager.error(err_str, duration=-1)
def _safe_serve(params, client_ip, client_hostname, cookie_data): # Note: Only logging imports here from config import WORK_DIR from logging import basicConfig as log_basic_config # Enable logging try: from config import LOG_LEVEL log_level = _convert_log_level(LOG_LEVEL) except ImportError: from logging import WARNING as LOG_LEVEL_WARNING log_level = LOG_LEVEL_WARNING log_basic_config(filename=path_join(WORK_DIR, 'server.log'), level=log_level) # Do the necessary imports after enabling the logging, order critical try: from common import ProtocolError, ProtocolArgumentError, NoPrintJSONError from dispatch import dispatch from jsonwrap import dumps from message import Messager from session import get_session, init_session, close_session, NoSessionError, SessionStoreError except ImportError: # Note: Heisenbug trap for #612, remove after resolved from logging import critical as log_critical from sys import path as sys_path log_critical('Heisenbug trap reports: ' + str(sys_path)) raise init_session(client_ip, cookie_data=cookie_data) response_is_JSON = True try: # Unpack the arguments into something less obscure than the # Python FieldStorage object (part dictonary, part list, part FUBAR) http_args = DefaultNoneDict() for k in params: # Also take the opportunity to convert Strings into Unicode, # according to HTTP they should be UTF-8 try: http_args[k] = unicode(params.getvalue(k), encoding='utf-8') except TypeError: Messager.error( 'protocol argument error: expected string argument %s, got %s' % (k, type(params.getvalue(k)))) raise ProtocolArgumentError # Dispatch the request json_dic = dispatch(http_args, client_ip, client_hostname) except ProtocolError, e: # Internal error, only reported to client not to log json_dic = {} e.json(json_dic) # Add a human-readable version of the error err_str = str(e) if err_str != '': Messager.error(err_str, duration=-1)
def lambda_handler(event, context): try: if ('querystring' in event): eventParms = event['querystring'].strip('{}').replace(',', ' ') eventDict = dict(re.findall(r'(\S+)=(".*?"|\S+)', eventParms)) returnValue = dispatch.dispatch(eventDict) return returnValue else: return u"None\n" except: return u"None\n"
def _safe_serve(params, client_ip, client_hostname, cookie_data): # Note: Only logging imports here from config import WORK_DIR from logging import basicConfig as log_basic_config # Enable logging try: from config import LOG_LEVEL log_level = _convert_log_level(LOG_LEVEL) except ImportError: from logging import WARNING as LOG_LEVEL_WARNING log_level = LOG_LEVEL_WARNING log_basic_config(filename=path_join(WORK_DIR, 'server.log'), level=log_level) # Do the necessary imports after enabling the logging, order critical from common import ProtocolError, NoPrintJSONError from dispatch import dispatch from jsonwrap import dumps from message import Messager from session import get_session, init_session, close_session, NoSessionError init_session(client_ip, cookie_data=cookie_data) try: # Unpack the arguments into something less obscure than the # Python FieldStorage object (part dictonary, part list, part FUBAR) http_args = DefaultNoneDict() for k in params: # Also take the opportunity to convert Strings into Unicode, # according to HTTP they should be UTF-8 http_args[k] = unicode(params.getvalue(k), encoding='utf-8') # Dispatch the request json_dic = dispatch(http_args, client_ip, client_hostname) response_data = ((JSON_HDR, ), dumps(Messager.output_json(json_dic))) except ProtocolError, e: # Internal error, only reported to client not to log json_dic = {} e.json(json_dic) # Add a human-readable version of the error err_str = str(e) if err_str != '': Messager.error(err_str) response_data = ((JSON_HDR, ), dumps(Messager.output_json(json_dic)))
def handle(self): status = None content = None try: status, content = dispatch.dispatch(self.get_request()) except Exception, e: # Log the exception logger.exception(e) # Return a 500 and the exception status = 500 exc = traceback.format_exc() content = json.dumps({ "messages": { "ERROR": ["%s" % (str(exc))] } })
def runcommand(self): """ reads a list of \0 terminated arguments, executes and writes the return code to the result channel """ length = struct.unpack('>I', self._read(4))[0] if not length: args = [] else: args = self._read(length).split('\0') # copy the uis so changes (e.g. --config or --verbose) don't # persist between requests copiedui = self.ui.copy() uis = [copiedui] if self.repo: self.repo.baseui = copiedui # clone ui without using ui.copy because this is protected repoui = self.repoui.__class__(self.repoui) repoui.copy = copiedui.copy # redo copy protection uis.append(repoui) self.repo.ui = self.repo.dirstate._ui = repoui self.repo.invalidateall() for ui in uis: # any kind of interaction must use server channels ui.setconfig('ui', 'nontty', 'true', 'commandserver') req = dispatch.request(args[:], copiedui, self.repo, self.cin, self.cout, self.cerr) ret = (dispatch.dispatch(req) or 0) & 255 # might return None # restore old cwd if '--cwd' in args: os.chdir(self.cwd) self.cresult.write(struct.pack('>i', int(ret)))
def _safe_serve(params, client_ip, client_hostname): from common import ProtocolError, NoPrintJSONError from config import WORK_DIR from dispatch import dispatch from jsonwrap import dumps from logging import basicConfig as log_basic_config from message import Messager from session import get_session # Enable logging try: from config import LOG_LEVEL log_level = _convert_log_level(LOG_LEVEL) except ImportError: from logging import WARNING as LOG_LEVEL_WARNING log_level = LOG_LEVEL_WARNING log_basic_config(filename=path_join(WORK_DIR, 'server.log'), level=log_level) # Session information is now available cookie_hdrs = get_session().get_cookie_hdrs() try: # Dispatch the request json_dic = dispatch(params, client_ip, client_hostname) response_data = ((JSON_HDR, ), dumps(Messager.output_json(json_dic))) except ProtocolError, e: # Internal error, only reported to client not to log json_dic = {} e.json(json_dic) # Add a human-readable version of the error err_str = str(e) if err_str != '': Messager.error(err_str) response_data = ((JSON_HDR, ), dumps(Messager.output_json(json_dic)))
def main(): args = parser.run() dispatch(args)
def _safe_serve(params, client_ip, client_hostname, cookie_data): # Note: Only logging imports here from config import WORK_DIR from logging import basicConfig as log_basic_config # Enable logging try: from config import LOG_LEVEL log_level = _convert_log_level(LOG_LEVEL) except ImportError: from logging import WARNING as LOG_LEVEL_WARNING log_level = LOG_LEVEL_WARNING log_basic_config(filename=path_join(WORK_DIR, 'server.log'), level=log_level) # Do the necessary imports after enabling the logging, order critical try: from common import ProtocolError, ProtocolArgumentError, NoPrintJSONError from dispatch import dispatch from jsonwrap import dumps from message import Messager from session import get_session, init_session, close_session, NoSessionError, SessionStoreError except ImportError: # Note: Heisenbug trap for #612, remove after resolved from logging import critical as log_critical from sys import path as sys_path log_critical('Heisenbug trap reports: ' + str(sys_path)) raise init_session(client_ip, cookie_data=cookie_data) response_is_JSON = True try: # Unpack the arguments into something less obscure than the # Python FieldStorage object (part dictonary, part list, part FUBAR) http_args = DefaultNoneDict() for k in params: # Also take the opportunity to convert Strings into Unicode, # according to HTTP they should be UTF-8 try: http_args[k] = params.getvalue(k) except TypeError as e: # Messager.error(e) Messager.error( 'protocol argument error: expected string argument %s, got %s' % (k, type( params.getvalue(k)))) raise ProtocolArgumentError # Dispatch the request json_dic = dispatch(http_args, client_ip, client_hostname) except ProtocolError as e: # Internal error, only reported to client not to log json_dic = {} e.json(json_dic) # Add a human-readable version of the error err_str = str(e) if err_str != '': Messager.error(err_str, duration=-1) except NoPrintJSONError as e: # Terrible hack to serve other things than JSON response_data = (e.hdrs, e.data) response_is_JSON = False # Get the potential cookie headers and close the session (if any) try: cookie_hdrs = get_session().cookie.hdrs() close_session() except SessionStoreError: Messager.error( "Failed to store cookie (missing write permission to brat work directory)?", -1) except NoSessionError: cookie_hdrs = None if response_is_JSON: response_data = ((JSON_HDR, ), dumps(Messager.output_json(json_dic))) return (cookie_hdrs, response_data)
import pickle import os, sys import dispatch SAVE_PATH=os.path.expanduser('~/.tvsync.d/filesprocessed') save = open(SAVE_PATH, 'r') processed=pickle.load(save) save.close() for f in sys.argv[1:]: if f in processed: print 'Already processed ' + f continue path, filename = os.path.split(f) dest = os.path.join('/tmp', filename) print "Copying %s to %s" % (f, dest) #shutil.copyfile(f, dest) dispatch.dispatch(dest) processed.append(f) save = open(SAVE_PATH, 'w') pickle.dump(processed, save) save.close()
parser = argparse.ArgumentParser( description="View events from LArSoft-generated ROOT file.") parser.add_argument('file', type=str, help="path to LArSoft-generated ROOT file") parser.add_argument('--entry', type=int, help="entry number in LArSoft-generated ROOT file") args = parser.parse_args() file_path = args.file if not os.path.isfile(file_path): print("File {} does not exist".format(file_path)) sys.exit(1) data = dispatch(file_path) entry = 0 number_entries = data.entries() if args.entry is not None: if args.entry < number_entries and args.entry >= 0: entry = args.entry else: print("Entry number outside of range [ 0, {} )".format(number_entries)) sys.exit(1) data.get_entry(entry) # The ADC data is returned as a 1-dimensional array. Reshape the array
from commands.channel_report import channel_report from config import * from slacker import Slacker from dispatch import dispatch slacker = Slacker(SLACK_TOKEN) if __name__ == "__main__": for (channel_name, channel_type) in channels_config: items = channel_report(channel_name, channel_type) if items: dispatch(items, channel_name, channel_type)
import pickle import os, sys import dispatch SAVE_PATH = os.path.expanduser('~/.tvsync.d/filesprocessed') save = open(SAVE_PATH, 'r') processed = pickle.load(save) save.close() for f in sys.argv[1:]: if f in processed: print 'Already processed ' + f continue path, filename = os.path.split(f) dest = os.path.join('/tmp', filename) print "Copying %s to %s" % (f, dest) #shutil.copyfile(f, dest) dispatch.dispatch(dest) processed.append(f) save = open(SAVE_PATH, 'w') pickle.dump(processed, save) save.close()