Ejemplo n.º 1
0
def main() -> int:
    # FIXME: sikuli CLI compat
    parser = argparse.ArgumentParser()
    parser.add_argument("-d", "--debug", default=False, action="store_true")
    parser.add_argument("-D", "--debugger", default=False, action="store_true")
    parser.add_argument("-s", "--scale", type=float, default=1.0)
    parser.add_argument("script")
    args = parser.parse_args()

    if args.debug:
        logging.basicConfig(
            format="%(asctime)-15s %(filename)s:%(lineno)d %(message)s",
            level=logging.DEBUG,
        )
        logging.getLogger("sikuli").setLevel(logging.DEBUG)

    if args.debugger:
        try:
            import pudb

            pudb.set_interrupt_handler()
        except ImportError:
            pass

    Settings.Scale = args.scale
    run(args.script)
    return 0
Ejemplo n.º 2
0
def main(override_args=None, settings=None):
    """Use settings object to load config file and detect changes in wordpress."""
    if not settings:
        settings = SettingsNamespaceUser()
    settings.init_settings(override_args)
    settings.download_slave = True
    settings.report_sanitation = True
    settings.exclude_cols = [
        'Address', 'Home Address', 'Phone Numbers', 'Personal E-mail'
    ]
    settings.include_cols = ['ACT Role', 'WP Roles']
    if Registrar.DEBUG_TRACE:
        from pudb import set_interrupt_handler
        set_interrupt_handler()

    settings.init_dirs()

    populate_filter_settings(settings)

    parsers = ParserNamespace()
    # import pudb; pudb.set_trace()
    parsers = populate_slave_parsers(parsers, settings)
    if settings['download_slave'] or settings['do_filter']:
        export_slave_parser(parsers, settings)

    report_cols = settings.coldata_class.get_col_data_native('report')
    exclude_cols = settings.get('exclude_cols')
    if exclude_cols:
        for col in exclude_cols:
            if col in report_cols:
                del report_cols[col]
    include_cols = settings.get('include_cols')
    if include_cols:
        for col in include_cols:
            if col not in report_cols:
                report_cols[col] = col

    Registrar.register_message("slave parser: \n%s" %
                               SanitationUtils.coerce_unicode(
                                   parsers.slave.tabulate(cols=report_cols)))

    new_roles = process_role_anomalies(parsers, settings)

    print(pformat(new_roles))

    make_sql(new_roles)
Ejemplo n.º 3
0
def main():
    # FIXME: sikuli CLI compat
    parser = argparse.ArgumentParser()
    parser.add_argument('-d', '--debug', default=False, action='store_true')
    parser.add_argument('-D', '--debugger', default=False, action='store_true')
    parser.add_argument('-s', '--scale', type=float, default=1.0)
    parser.add_argument('script')
    args = parser.parse_args()
    if args.debug:
        logging.getLogger("sikuli").setLevel(logging.DEBUG)
    if args.debugger:
        try:
            import pudb
            pudb.set_interrupt_handler()
        except ImportError:
            pass

    Settings.Scale = args.scale
    run(args.script)
    return 0
Ejemplo n.º 4
0
def main():
    # FIXME: sikuli CLI compat
    parser = argparse.ArgumentParser()
    parser.add_argument('-d', '--debug', default=False, action='store_true')
    parser.add_argument('-D', '--debugger', default=False, action='store_true')
    parser.add_argument('-s', '--scale', type=float, default=1.0)
    parser.add_argument('script')
    args = parser.parse_args()
    if args.debug:
        logging.getLogger("sikuli").setLevel(logging.DEBUG)
    if args.debugger:
        try:
            import pudb
            pudb.set_interrupt_handler()
        except ImportError:
            pass

    Settings.Scale = args.scale
    run(args.script)
    return 0
Ejemplo n.º 5
0
    def debug(self):
        # disable console logging when entering interactive debugger
        j.core.myenv.log_console = False
        import sys

        if j.core.myenv.debugger == "pudb":
            import pudb
            import threading

            dbg = pudb._get_debugger()

            if isinstance(threading.current_thread(), threading._MainThread):
                pudb.set_interrupt_handler()

            dbg.set_trace(sys._getframe().f_back, paused=True)
        elif j.core.myenv.debugger == "ipdb":
            try:
                import ipdb as debugger
            except ImportError:
                import pdb

                debugger = pdb.Pdb()
            debugger.set_trace(sys._getframe().f_back)
Ejemplo n.º 6
0
    def here(self):
        #"#""Print the local variables in the caller's frame."#""
        import inspect
        frame = inspect.currentframe().f_back

        if ('__file__' in frame.f_locals
                and frame.f_locals['__file__']):
            caller_file = frame.f_locals['__file__']
        else:
            caller_file = frame.f_code.co_filename
        script = path.basename(caller_file)    
        # aqui abajo pueden surgir bugs a futuro.
        # bpy.data.texts.keys() =>
        # ['__init__.py', '__init__.py.001', ...]
        if script in bpy.data.texts:
            dbg_code = bpy.data.texts[script].as_string()
        elif path.isfile(caller_file):
            dbg_code = open(caller_file).read()
        elif caller_file == '<blender_console>':
            return '# cannot breakpoint interactive console...'
        else:
            dbg_code = '# cannot get code...'
        #try:
        #    dbg_code = bpy.data.texts[script].as_string()
        #except:
        #    dbg_code = open(caller_file).read()
        frame.f_globals['_MODULE_SOURCE_CODE'] = dbg_code

        dbg = _get_debugger()
        
        # del inspect frame
        del frame
        from pudb import set_interrupt_handler
        import threading
        if isinstance(threading.current_thread(), threading._MainThread):
            set_interrupt_handler()
        dbg.set_trace(sys._getframe().f_back)
Ejemplo n.º 7
0
import os
import sys
import saga

import pudb; pudb.set_interrupt_handler()

USER_ID     = "merzky"
REMOTE_HOST = "ssh://gw68.quarry.iu.teragrid.org"
REMOTE_HOST = "fork://localhost"

def main () :
    try:

        for i in range(0, 1000):
            print "**************************** Job: %d *****************************" % i
            ctx = saga.Context("ssh")
            ctx.user_id = USER_ID

            session = saga.Session()
            session.add_context(ctx)

            # Create a job service object that represent a remote pbs cluster.
            # The keyword 'pbs' in the url scheme triggers the PBS adaptors
            # and '+ssh' enables PBS remote access via SSH.
            js = saga.job.Service("%s" % REMOTE_HOST, session=session) 

            # describe our job
            jd = saga.job.Description()

            # Next, we describe the job we want to run. A complete set of job
Ejemplo n.º 8
0
def set_trace():
    dbg = _get_debugger()
    set_interrupt_handler()
    dbg.set_trace(sys._getframe().f_back.f_back)
Ejemplo n.º 9
0
import os
import sys
import radical.saga as saga

import pudb; pudb.set_interrupt_handler()

USER_ID     = "merzky"
REMOTE_HOST = "ssh://gw68.quarry.iu.teragrid.org"
REMOTE_HOST = "fork://localhost"

def main () :
    try:

        for i in range(0, 1000):
            print(("**************************** Job: %d *****************************" % i))
            ctx = saga.Context("ssh")
            ctx.user_id = USER_ID

            session = saga.Session()
            session.add_context(ctx)

            # Create a job service object that represent a remote pbs cluster.
            # The keyword 'pbs' in the url scheme triggers the PBS adaptors
            # and '+ssh' enables PBS remote access via SSH.
            js = saga.job.Service("%s" % REMOTE_HOST, session=session) 

            # describe our job
            jd = saga.job.Description()

            # Next, we describe the job we want to run. A complete set of job
Ejemplo n.º 10
0
 def set_trace(self, depth=1):
     """ wrap pudb.set_trace, dropping any IO capturing. """
     self.disable_io_capture()
     dbg = Debugger()
     pudb.set_interrupt_handler()
     dbg.set_trace(sys._getframe(depth))
Ejemplo n.º 11
0
import sys
import radical.saga as saga

import pudb
pudb.set_interrupt_handler()


def main():

    try:
        i = 0
        js = saga.job.Service("fork://localhost/")

        while True:

            i = i + 1
            j = js.run_job("/bin/true")

            print("%5d : %-30s : %s" % (i, j.id, j.state))

            j.wait()

        return 0

    except saga.SagaException as ex:
        print("An exception occured: (%s) %s " % (ex.type, (str(ex))))
        print(" \n*** Backtrace:\n %s" % ex.traceback)
        return -1


if __name__ == "__main__":
Ejemplo n.º 12
0
""" Example application demonstrating job submission via bigjob 

    DON'T EDIT THIS FILE (UNLESS THERE IS A BUG)

    THIS FILE SHOULD NOT BE COMMITTED TO SVN WITH USER-SPECIFIC PATHS!
"""
import os
import time
import pdb
import sys
import saga

try :
    import pudb
    pudb.set_interrupt_handler()
except :
    pass

# configuration
""" This variable defines the coordination system that is used by BigJob
    e.g. 
        advert://localhost (SAGA/Advert SQLITE)
        advert://advert.cct.lsu.edu:8080 (SAGA/Advert POSTGRESQL)
        redis://localhost:6379 (Redis at localhost)
        tcp://localhost (ZMQ)
        tcp://* (ZMQ - listening to all interfaces)
"""

#COORDINATION_URL = "advert://localhost/?dbtype=sqlite3"
#COORDINATION_URL = "tcp://*"
COORDINATION_URL = "redis://10.0.1.18:6379"
Ejemplo n.º 13
0
    def __init__(self, *args, **kwargs):

        if not 'root_path' in kwargs:
            kwargs['root_path'] = str(pathlib.Path('.').absolute()) #TODO: pathlib probably isn't really needed here

        super(Poobrain, self).__init__(*args, **kwargs)

        self.cronjobs = []
        self.cli = flask.cli.FlaskGroup(create_app=lambda x:self)

        if config:
            for name in dir(config):
                if name.isupper():
                    self.config[name] = getattr(config, name)

        for name in dir(defaults):
            if name.isupper and not name in self.config:
                self.config[name] = getattr(defaults, name)

        try:
            if self.config['LOGFILE']: # log to file, if configured
                log_handler = logging.handlers.WatchedFileHandler(self.config['LOGFILE'])
                if self.debug:
                    log_handler.setLevel(logging.DEBUG)
                else:
                    log_handler.setLevel(logging.WARNING)

                self.logger.addHandler(log_handler)

        except IOError as e:
            import grp

            user = os.getlogin()
            group = grp.getgrgid(os.getgid()).gr_name
            sys.exit("Somethings' fucky with the log file: %s. Current user/group is %s/%s." % (e,user,group))

        if self.debug:
            # show SQL queries
            peeweelog = logging.getLogger('peewee')
            peeweelog.setLevel(logging.DEBUG)
            peeweelog.addHandler(logging.StreamHandler())

            try:

                import signal
                import pudb
                if hasattr(signal, 'SIGINFO'):
                    pudb.set_interrupt_handler(signal.SIGINFO)
                    print("%s: a graphical debugger can be invoked with SIGINFO (^T)" % (self.name.upper()))

                self.debugger = pudb

            except ImportError:
                print("pudb not installed, falling back to pdb!")

                import signal # shouldn't be needed but feels hacky to leave out
                import pdb

        self.boxes = {}
        self.poobrain_path = os.path.dirname(os.path.realpath(__file__))
        self.site_path = os.getcwd()
        self.resource_extension_whitelist = ['css', 'scss', 'png', 'svg', 'ttf', 'otf', 'woff', 'js', 'jpg']

        self.scss_compiler = scss.Compiler(extensions=(SCSSCore,), root=pathlib.Path('/'), search_path=self.theme_paths)

        if 'DATABASE' in self.config:
            self.db = db_url.connect(self.config['DATABASE'], autocommit=True, autorollback=True)

        else:

            import optparse # Pretty f*****g ugly, but at least its in the stdlib. TODO: Can we *somehow* make this work with prompt in cli/__init__.py install command?
            parser = optparse.OptionParser()
            parser.add_option('--database', default="sqlite:///%s.db" % project_name, dest='database') # NOTE: If you change this, you'll also have to change the --database default in cli/__init__.py or else install will f**k up

            (options, _) = parser.parse_args()
            self.logger.warning("No DATABASE in config, using generated default or --database parameter '%s'. This should only happen before the install command is executed." % options.database)
            self.db = db_url.connect(options.database)

        self.add_url_rule('/theme/<path:resource>', 'serve_theme_resources', self.serve_theme_resources)

        # Make sure that each request has a proper database connection
        self.before_request(self.request_setup)
        self.teardown_request(self.request_teardown)


        # set up site and admin blueprints
        self.site = Pooprint('site', 'site')
        self.admin = Pooprint('admin', 'admin')
Ejemplo n.º 14
0
import os
import sys
import saga

from pudb import set_interrupt_handler; set_interrupt_handler()

USER_ID     = "merzky"
REMOTE_HOST = "ssh://gw68.quarry.iu.teragrid.org"
REMOTE_HOST = "fork://localhost"

def main () :
    try:

        for i in range(0, 1000):
            print "**************************** Job: %d *****************************" % i
            ctx = saga.Context("ssh")
            ctx.user_id = USER_ID

            session = saga.Session()
            session.add_context(ctx)

            # Create a job service object that represent a remote pbs cluster.
            # The keyword 'pbs' in the url scheme triggers the PBS adaptors
            # and '+ssh' enables PBS remote access via SSH.
            js = saga.job.Service("%s" % REMOTE_HOST, session=session) 

            # describe our job
            jd = saga.job.Description()

            # Next, we describe the job we want to run. A complete set of job