示例#1
0
def dump_database(database, path='/var/backups/postgres', filename='', format='plain', port=None):
    """
    Generate a dump database to a remote destination path
    Example::

        import fabtools

        fabtools.postgres.dump_database('myapp', path='/var/backups/postgres', filename='myapp-backup.sql')
        # If not filename specified will be saved with the date file format: database-201312010000.sql
        fabtools.postgres.dump_database('myapp', path='/var/backups/postgres') 
        # If not path specified will be saved at '/var/backups/postgres'
        fabtools.postgres.dump_database('myapp')
        # You can scpecify the pg_dump's custom format (able to restore with pg_restore)
        fabtools.postgres.dump_database('myapp', format='custom')

    """
    command_options = []
    port_option = _port_option(port)

    if port_option is not None:
        command_options.append(port_option)

    command_options = ' '.join(command_options)

    if fabtools.files.is_dir(path):
        if database_exists(database):
                date = _date.today().strftime("%Y%m%d%H%M")
                if not filename:
                    filename = '%(database)s-%(date)s.sql' % locals()
                dest = quote(posixpath.join(path, filename))
                _run_as_pg('pg_dump %(command_options)s %(database)s --format=%(format)s --blobs --file=%(dest)s' % locals())
        else:
            abort('''Database does not exist: %(database)s''' % locals())
    else:
        abort('''Destination path does not exist: %(path)s''' % locals())
示例#2
0
    def _import_transporter(self, transporter):
        """Imports transporter module and class, returns class.
        Input value can be:
        * a full/absolute module path, like
          "MyTransporterPackage.SomeTransporterClass"
        """
        transporter_class = None
        module = None
        alternatives = []
        default_prefix = 'cloud_sync_app.transporter.transporter_'
        if not transporter.startswith(default_prefix):
            alternatives.append('%s%s' % (default_prefix, transporter))
        for module_name in alternatives:
            try:
                module = __import__(module_name, globals(), locals(), ["TRANSPORTER_CLASS"], -1)
            except ImportError:
                import traceback
                traceback.print_exc()
                pass

        if not module:
            msg = "The transporter module '%s' could not be found." % transporter
            if len(alternatives) > 1:
                msg = '%s Tried (%s)' % (msg, ', '.join(alternatives))
            self.logger.error(msg)
        else:
            try:
                classname = module.TRANSPORTER_CLASS
                module = __import__(module_name, globals(), locals(), [classname])
                transporter_class = getattr(module, classname)
            except AttributeError:
                self.logger.error("The Transporter module '%s' was found, but its Transporter class '%s' could not be found." % (module_name, classname))
        return transporter_class
def Get_Marshall(FeatureName, Enable = False, Instances = None, RunIntervalInSeconds = 300, Tag = "default", Format = "tsv", FilterType = "filter_changetracking", Configuration = None):
    arg_names = list(locals().keys())
    init_vars(Instances)
    
    CurrentInstances = Instances
    FeatureName = protocol.MI_String(FeatureName)
    Enable = protocol.MI_Boolean(Enable)
    for instance in CurrentInstances:
        instance['InstanceName'] = protocol.MI_String(instance['InstanceName'])
        instance['ClassName'] = protocol.MI_String(instance['ClassName'])
        if instance['Properties'] is not None and len(instance['Properties']):
            instance['Properties'] = protocol.MI_StringA(instance['Properties'])
    Instances = protocol.MI_InstanceA(CurrentInstances)
    RunIntervalInSeconds = protocol.MI_Uint64(RunIntervalInSeconds)
    Tag = protocol.MI_String(Tag)
    Format = protocol.MI_String(Format)
    FilterType = protocol.MI_String(FilterType)

    if Configuration is None:
        Configuration = []
    if Configuration is not None and len(Configuration):
        Configuration = protocol.MI_StringA(Configuration)

    retd = {}
    ld = locals()
    for k in arg_names:
        retd[k] = ld[k]
    return 0, retd
示例#4
0
def root(module=None):
  """
  Sets the "root module" for helipad.
  
  The root module's directory is used as the definition of where relative paths
  are based off of.
  """
  global _ROOT_MODULE
  
  if module is None:
    return _ROOT_MODULE
  
  if isinstance(module, basestring):
    components = module.split('.')
    module = __import__(module, globals(), locals(), [], -1)
    
    for component in components[1:]:
      module = getattr(module, component)
  
  if inspect.ismodule(module):
    _ROOT_MODULE = module
  else:
    raise ValueError, "Invalid module: %s" % module
  
  # Return a reference to this module (so that we can string together method calls)
  return __import__('helipad', globals(), locals(), [], -1)
示例#5
0
文件: views.py 项目: Ni-Cd/urtest
def project_detail(request, pk, page=''):
    print pk, page
    try:
        project = Project.objects.get(pk=pk)
    except Project.DoesNotExist:
        raise Http404
    testers = project.testers.all()
    bugs = project.bugs.all()
    if page == None:
       return render_to_response('project_detail.html', locals(),
                              context_instance=RequestContext(request))
    elif page == '/bugs':
        return render_to_response('project_bugs.html', locals(),
            context_instance=RequestContext(request))
    elif page == '/testers':
        return render_to_response('project_testers.html', locals(),
            context_instance=RequestContext(request))
    elif page == '/enlist':
        user = request.user
        if not user.is_authenticated():
            raise PermissionDenied
        # Текущий залогиненый пользователь должен быть тестером
        if not hasattr(user, 'tester'):
            raise PermissionDenied
        tester = user.tester
        project.add_tester(tester)
        return HttpResponseRedirect('/projects/%i/testers' % project.pk)
    else:
        raise Http404
示例#6
0
def helpModule(module):
    """
    Print the first text chunk for each established method in a module.

    module: module to write output from, format "folder.folder.module"
    """

    # split module.x.y into "from module.x import y" 
    t = module.split(".")
    importName = "from " + ".".join(t[:-1]) + " import " + t[-1]

    # dynamically do the import
    exec(importName)
    moduleName = t[-1]

    # extract all local functions from the imported module, 
    # referenced here by locals()[moduleName]
    functions = [locals()[moduleName].__dict__.get(a) for a in dir(locals()[moduleName]) if isinstance(locals()[moduleName].__dict__.get(a), types.FunctionType)]

    # pull all the doc strings out from said functions and print the top chunk
    for function in functions:
        base = function.func_doc
        base = base.replace("\t", " ")
        doc = "".join(base.split("\n\n")[0].strip().split("\n"))
        # print function.func_name + " : " + doc
        print helpers.formatLong(function.func_name, doc)
示例#7
0
 def check_server_disallowed(self):
     """
     Check if server domain name or IP is disallowed in settings.py.
     """
     hostname = self.netloc_parts[2].lower()
     if (hasattr(settings, 'DISALLOWED_DOMAIN_LIST') and
         settings.DISALLOWED_DOMAIN_LIST):
         for domain in settings.DISALLOWED_DOMAIN_LIST:
             if hostname == domain or hostname.endswith('.' + domain):
                 raise ValidationError(unicode(
                     _("Domain name %(domain)s is disallowed.") % locals()))
     try:
         ip = socket.gethostbyname(hostname)
     except socket.error:
         raise ValidationError(unicode(
             _("Could not resolve IP address for %(hostname)s.") %
             locals()))
     if (not hasattr(settings, 'DISALLOWED_SERVER_IP_LIST') or
         not settings.DISALLOWED_SERVER_IP_LIST):
         return
     server = long_ip(ip)
     # print 'server', server, dotted_ip(server), ip
     for disallowed in settings.DISALLOWED_SERVER_IP_LIST:
         disallowed = disallowed.strip()
         if disallowed == '' or disallowed.startswith('#'):
             continue
         mask = bit_mask(32)
         if '/' in disallowed:
             disallowed, bits = disallowed.split('/', 1)
             mask = slash_mask(int(bits))
         identifier = long_ip(disallowed) & mask
         masked = server & mask
         if masked == identifier:
             raise ValidationError(unicode(
                 _("Server IP address %(ip)s is disallowed.") % locals()))
示例#8
0
    def get(self, section, key, **kwargs):
        section = str(section).lower()
        key = str(key).lower()

        d = self.defaults

        # first check environment variables
        option = self._get_env_var_option(section, key)
        if option:
            return option

        # ...then the config file
        if self.has_option(section, key):
            return expand_env_var(
                ConfigParser.get(self, section, key, **kwargs))

        # ...then commands
        option = self._get_cmd_option(section, key)
        if option:
            return option

        # ...then the defaults
        if section in d and key in d[section]:
            return expand_env_var(d[section][key])

        else:
            logging.warn("section/key [{section}/{key}] not found "
                         "in config".format(**locals()))

            raise AirflowConfigException(
                "section/key [{section}/{key}] not found "
                "in config".format(**locals()))
示例#9
0
 def _reset_database(self, conn_string):
     conn_pieces = urlparse.urlparse(conn_string)
     if conn_string.startswith('sqlite'):
         # We can just delete the SQLite database, which is
         # the easiest and cleanest solution
         db_path = conn_pieces.path.strip('/')
         if db_path and os.path.exists(db_path):
             os.unlink(db_path)
         # No need to recreate the SQLite DB. SQLite will
         # create it for us if it's not there...
     elif conn_string.startswith('mysql'):
         # We can execute the MySQL client to destroy and re-create
         # the MYSQL database, which is easier and less error-prone
         # than using SQLAlchemy to do this via MetaData...trust me.
         database = conn_pieces.path.strip('/')
         loc_pieces = conn_pieces.netloc.split('@')
         host = loc_pieces[1]
         auth_pieces = loc_pieces[0].split(':')
         user = auth_pieces[0]
         password = ""
         if len(auth_pieces) > 1:
             if auth_pieces[1].strip():
                 password = "******" % auth_pieces[1]
         sql = ("drop database if exists %(database)s; "
                "create database %(database)s;") % locals()
         cmd = ("mysql -u%(user)s %(password)s -h%(host)s "
                "-e\"%(sql)s\"") % locals()
         exitcode, out, err = execute(cmd)
         self.assertEqual(0, exitcode)
示例#10
0
	def remove_file(self, path):
		path, name = self.sanitize(path)

		metadata = self._get_metadata(path)

		file_path = os.path.join(path, name)
		if not os.path.exists(file_path):
			return
		if not os.path.isfile(file_path):
			raise RuntimeError("{name} in {path} is not a file".format(**locals()))

		try:
			os.remove(file_path)
		except Exception as e:
			raise RuntimeError("Could not delete {name} in {path}".format(**locals()), e)

		if name in metadata:
			if "hash" in metadata[name]:
				hash = metadata[name]["hash"]
				for m in metadata.values():
					if not "links" in m:
						continue
					for link in m["links"]:
						if "rel" in link and "hash" in link and (link["rel"] == "model" or link["rel"] == "machinecode") and link["hash"] == hash:
							m["links"].remove(link)
			del metadata[name]
			self._save_metadata(path, metadata)
示例#11
0
def Get_Marshall(GetScript, SetScript, TestScript, User, Group):
    arg_names = list(locals().keys())
    if GetScript is None:
        GetScript = ''
    if SetScript is None:
        SetScript = ''
    if TestScript is None:
        TestScript = ''
    if User is None:
        User = ''
    if Group is None:
        Group = ''

    retval = 0
    (retval, GetScript, SetScript, TestScript, User, Group,
     Result) = Get(GetScript, SetScript, TestScript, User, Group)

    GetScript = protocol.MI_String(GetScript)
    SetScript = protocol.MI_String(SetScript)
    TestScript = protocol.MI_String(TestScript)
    User = protocol.MI_String(User)
    Group = protocol.MI_String(Group)
    Result = protocol.MI_String(Result)
    arg_names.append('Result')

    retd = {}
    ld = locals()
    for k in arg_names:
        retd[k] = ld[k]
    return retval, retd
示例#12
0
    def init_host_floating_ips(self):
        """Configures floating ips owned by host."""

        admin_context = context.get_admin_context()
        try:
            floating_ips = self.db.floating_ip_get_all_by_host(admin_context,
                                                               self.host)
        except exception.NotFound:
            return

        for floating_ip in floating_ips:
            fixed_ip_id = floating_ip.get('fixed_ip_id')
            if fixed_ip_id:
                try:
                    fixed_ip = self.db.fixed_ip_get(admin_context,
                                                    fixed_ip_id,
                                                    get_network=True)
                except exception.FixedIpNotFound:
                    msg = _('Fixed ip %(fixed_ip_id)s not found') % locals()
                    LOG.debug(msg)
                    continue
                interface = CONF.public_interface or floating_ip['interface']
                try:
                    self.l3driver.add_floating_ip(floating_ip['address'],
                                                  fixed_ip['address'],
                                                  interface,
                                                  fixed_ip['network'])
                except exception.ProcessExecutionError:
                    LOG.debug(_('Interface %(interface)s not found'), locals())
                    raise exception.NoFloatingIpInterface(interface=interface)
示例#13
0
文件: base.py 项目: AnveshMora/django
    def autoinc_sql(self, table, column):
        # To simulate auto-incrementing primary keys in Oracle, we have to
        # create a sequence and a trigger.
        sq_name = self._get_sequence_name(table)
        tr_name = self._get_trigger_name(table)
        tbl_name = self.quote_name(table)
        col_name = self.quote_name(column)
        sequence_sql = """
DECLARE
    i INTEGER;
BEGIN
    SELECT COUNT(*) INTO i FROM USER_CATALOG
        WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
    IF i = 0 THEN
        EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"';
    END IF;
END;
/""" % locals()
        trigger_sql = """
CREATE OR REPLACE TRIGGER "%(tr_name)s"
BEFORE INSERT ON %(tbl_name)s
FOR EACH ROW
WHEN (new.%(col_name)s IS NULL)
    BEGIN
        SELECT "%(sq_name)s".nextval
        INTO :new.%(col_name)s FROM dual;
    END;
/""" % locals()
        return sequence_sql, trigger_sql
示例#14
0
def buildIndirectMaps(infile, outfile, track):
    '''build a map between query and target, linking
    via intermediate targets.'''

    to_cluster = True

    path = P.asList(PARAMS["%s_path" % track])

    E.info("path=%s" % str(path))

    statement = []

    for stage, part in enumerate(path):
        filename = part + ".over.psl.gz"
        if not os.path.exists(filename):
            raise ValueError(
                "required file %s for %s (stage %i) not exist." % (filename, outfile, stage))

        if stage == 0:
            statement.append( '''gunzip < %(filename)s''' % locals() )
        else:
            statement.append( '''
               pslMap stdin <(gunzip < %(filename)s) stdout
            ''' % locals() )

    statement.append("gzip")

    statement = " | ".join(statement) + " > %(outfile)s " % locals()

    P.run()
示例#15
0
    def migrate_instance_start(self, context, instance_uuid,
                               floating_addresses,
                               rxtx_factor=None, project_id=None,
                               source=None, dest=None):
        # We only care if floating_addresses are provided and we're
        # switching hosts
        if not floating_addresses or (source and source == dest):
            return

        LOG.info(_("Starting migration network for instance"
                   " %(instance_uuid)s"), locals())
        for address in floating_addresses:
            floating_ip = self.db.floating_ip_get_by_address(context,
                                                             address)

            if self._is_stale_floating_ip_address(context, floating_ip):
                LOG.warn(_("Floating ip address |%(address)s| no longer "
                           "belongs to instance %(instance_uuid)s. Will not"
                           "migrate it "), locals())
                continue

            interface = CONF.public_interface or floating_ip['interface']
            fixed_ip = self.db.fixed_ip_get(context,
                                            floating_ip['fixed_ip_id'],
                                            get_network=True)
            self.l3driver.remove_floating_ip(floating_ip['address'],
                                             fixed_ip['address'],
                                             interface,
                                             fixed_ip['network'])

            # NOTE(wenjianhn): Make this address will not be bound to public
            # interface when restarts nova-network on dest compute node
            self.db.floating_ip_update(context,
                                       floating_ip['address'],
                                       {'host': None})
示例#16
0
def is_installed(pkg_name):
    """
    Check if a Portage package is installed.
    """
    manager = MANAGER

    with settings(hide("running", "stdout", "stderr", "warnings"),
                  warn_only=True):
        res = run("%(manager)s -p %(pkg_name)s" % locals())

    if not res.succeeded:
        return False

    if pkg_name.startswith("="):
        # The =, which is required when installing/checking for absolute
        # versions, will not appear in the results.
        pkg_name = pkg_name[1:]

    match = re.search(
            r"\n\[ebuild +(?P<code>\w+) *\] .*%(pkg_name)s.*" % locals(),
            res.stdout)
    if match and match.groupdict()["code"] in ("U", "R"):
        return True
    else:
        return False
示例#17
0
    def handle(self, *args, **options):
        # Remove plugins which yield an error upon importing
        for plugin in Plugin.objects.all():
            try:
                plugin.get_class()
            except ImportError:
                while True:
                    ans = raw_input('Error on importing {plugin.class_name}. Remove? [y/N]'.format(**locals()))
                    ans = ans.strip().lower()

                    if ans in ("", "n"):
                        break
                    elif ans == "y":
                        plugin.delete()
                        break

        # Look for plugins in plugin directory
        plugin_files = os.listdir(os.path.dirname(plugins.__file__))
        plugin_paths = (os.path.join(os.path.dirname(plugins.__file__), p) for p in plugin_files)
        detected_plugins = get_plugins(filter(is_module, plugin_paths))
        new_plugins = (p for p in detected_plugins if not Plugin.objects.filter(class_name=get_qualified_name(p)).exists())

        for p in new_plugins:
            log.info("Found new plugin: {p}".format(**locals()))

            plugin = Plugin.objects.create(
                label=p.name(),
                class_name=get_qualified_name(p),
                plugin_type=get_plugin_type(p)
            )

            log.info("Created new plugin: {plugin.class_name}".format(**locals()))
示例#18
0
文件: models.py 项目: 343max/NewsBlur
    def send_new_premium_email(self, force=False):
        subs = UserSubscription.objects.filter(user=self.user)
        message = """Woohoo!
        
User: %(user)s
Feeds: %(feeds)s

Sincerely,
NewsBlur""" % {'user': self.user.username, 'feeds': subs.count()}
        mail_admins('New premium account', message, fail_silently=True)
        
        if not self.user.email or not self.send_emails:
            return
        
        sent_email, created = MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk,
                                                               email_type='new_premium')
        
        if not created and not force:
            return
        
        user    = self.user
        text    = render_to_string('mail/email_new_premium.txt', locals())
        html    = render_to_string('mail/email_new_premium.xhtml', locals())
        subject = "Thanks for going premium on NewsBlur!"
        msg     = EmailMultiAlternatives(subject, text, 
                                         from_email='NewsBlur <%s>' % settings.HELLO_EMAIL,
                                         to=['%s <%s>' % (user, user.email)])
        msg.attach_alternative(html, "text/html")
        msg.send(fail_silently=True)
        
        logging.user(self.user, "~BB~FM~SBSending email for new premium: %s" % self.user.email)
示例#19
0
 def __init__(self, browser, remote, capabilities, user_agent, environment,
              ignore_ssl, fresh_instance):
     data = {}
     for arg in inspect.getargspec(HolmiumConfig.__init__).args[1:]:
         setattr(self, arg, locals()[arg])
         data[arg] = locals()[arg]
     super(HolmiumConfig, self).__init__(**data)
    def getargs(self,moduleName,className,method) :
        '''
          This will return the list of arguments in a method of python module of class.
          It accepts method list as an argument.
        '''
        print "Message : Argument list is being obtained for each method"
        methodArgsDict = {}
        if className == None:
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) -1], globals(), locals(), [moduleList[len(moduleList) -2]], -1)
                try :
                    names = vars(Module)[name]
                except KeyError:
                    print "Message : method '" + name + "'does not exists,Continued with including it. "
                    return False
                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]
        else :
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [className], -1)
                Class = getattr(Module, className)
                try :
                    names = vars(Class)[name]
                except KeyError :
                    print "Message : method '" + name + "'does not exists,Continued with include it."
                    return False

                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]

        return methodArgsDict
示例#21
0
def get_ebook_count(field, key, publish_year=None):
    ebook_count_db = get_ebook_count_db()

    # Handle the case of ebook_count_db_parametres not specified in the config.
    if ebook_count_db is None:
        return 0

    def db_lookup(field, key, publish_year=None):
        sql = "select sum(ebook_count) as num from subjects where field=$field and key=$key"
        if publish_year:
            if isinstance(publish_year, (tuple, list)):
                sql += " and publish_year between $y1 and $y2"
                (y1, y2) = publish_year
            else:
                sql += " and publish_year=$publish_year"
        return list(ebook_count_db.query(sql, vars=locals()))[0].num

    total = db_lookup(field, key, publish_year)
    if total:
        return total
    elif publish_year:
        sql = "select ebook_count as num from subjects where field=$field and key=$key limit 1"
        if len(list(ebook_count_db.query(sql, vars=locals()))) != 0:
            return 0
    years = find_ebook_count(field, key)
    if not years:
        return 0
    for year, count in sorted(years.iteritems()):
        ebook_count_db.query(
            "insert into subjects (field, key, publish_year, ebook_count) values ($field, $key, $year, $count)",
            vars=locals(),
        )

    return db_lookup(field, key, publish_year)
示例#22
0
	def Generate(self):
		#self.ParentNamespace = "Base"
		#self.Namespace = "Base"
		encoding = sys.getfilesystemencoding()
		path = self.path
		if hasattr(path,"decode"): # this is python2. Otherwise this is unicode already
			path = path.decode(encoding)
		exportName = self.export.Name
		if hasattr(exportName,"decode"): # this is python2. Otherwise this is unicode already
			exportName = exportName.decode(encoding)
		dirname = self.dirname
		if hasattr(dirname,"decode"): # this is python2. Otherwise this is unicode already
			dirname = dirname.decode(encoding)
		print("TemplateClassPyExport",path + exportName)
		# Imp.cpp must not exist, neither in path nor in dirname
		if(not os.path.exists(path + exportName + "Imp.cpp")):
			if(not os.path.exists(dirname + exportName + "Imp.cpp")):
				file = open(path + exportName + "Imp.cpp",'wb')
				generateBase.generateTools.replace(self.TemplateImplement,locals(),file)
				file.close()
		file = open(path + exportName + ".cpp",'wb')
		generateBase.generateTools.replace(self.TemplateModule,locals(),file)
		file.close()
		file = open(path + exportName + ".h",'wb')
		generateBase.generateTools.replace(self.TemplateHeader,locals(),file)
		#file.write( generateBase.generateTools.replace(self.Template,locals()))
		file.close()
示例#23
0
    def __init__(self, host, name, settings=None):
        Component.__init__(self, yadtshell.settings.SERVICE, host, name)

        settings = settings or {}
        self.needs_services = []
        self.needs_artefacts = []
        self.needs.add(host.uri)

        for k in settings:
            setattr(self, k, settings[k])
        extras = settings.get('extra', [])
        for k in extras:
            if hasattr(self, k):
                getattr(self, k).extend(extras[k])
            else:
                setattr(self, k, extras[k])

        for n in self.needs_services:
            if n.startswith(yadtshell.settings.SERVICE):
                self.needs.add(n % locals())
            else:
                self.needs.add(yadtshell.uri.create(
                    yadtshell.settings.SERVICE, host.host, n % locals()))
        for n in self.needs_artefacts:
            self.needs.add(yadtshell.uri.create(yadtshell.settings.ARTEFACT,
                                                host.host,
                                                n % locals() + "/" + yadtshell.settings.CURRENT))

        self.state = yadtshell.settings.STATE_DESCRIPTIONS.get(
            settings.get('state'),
            yadtshell.settings.UNKNOWN)
        self.script = None
示例#24
0
    def test_reusable_scope(self):

        scope = let(a="tacos", b="soup", c="cake")
        d = "godzilla"

        with scope:
            self.assertEquals(a, "tacos")
            self.assertEquals(b, "soup")
            self.assertEquals(c, "cake")
            self.assertEquals(d, "godzilla")

            a = "fajita"
            b = "stew"
            d = "mothra"

        self.assertFalse("a" in locals())
        self.assertFalse("b" in locals())
        self.assertFalse("c" in locals())
        self.assertTrue("d" in locals())

        self.assertFalse("a" in globals())
        self.assertFalse("b" in globals())
        self.assertFalse("c" in globals())
        self.assertFalse("d" in globals())

        self.assertEquals(d, "mothra")

        with scope:
            self.assertEquals(a, "fajita")
            self.assertEquals(b, "stew")
            self.assertEquals(c, "cake")
            self.assertEquals(d, "mothra")
示例#25
0
def add_param_writer_object(name, base_state, typ, var_type = "", var_index = None, root_node = False):
    var_type1 = "_" + var_type if var_type != "" else ""
    if isinstance(var_index, Number):
        var_index = "uint32_t(" + str(var_index) +")"
    set_varient_index = "serialize(_out, " + var_index +");\n" if var_index is not None else ""
    ret = Template(reindent(4,"""
        ${base_state}__${name}$var_type1 start_${name}$var_type() && {
            $set_varient_index
            return { _out, std::move(_state) };
        }
    """)).substitute(locals())
    if not is_stub(typ) and is_local_type(typ):
        ret += add_param_writer_basic_type(name, base_state, typ, var_type, var_index, root_node)
    if is_stub(typ):
        set_command = "_state.f.end(_out);" if var_type is not "" else ""
        return_command = "{ _out, std::move(_state._parent) }" if var_type is not "" and not root_node else "{ _out, std::move(_state) }"
        ret += Template(reindent(4, """
            template<typename Serializer>
            after_${base_state}__${name} ${name}$var_type(Serializer&& f) && {
                $set_varient_index
                f(writer_of_$typ(_out));
                $set_command
                return $return_command;
            }""")).substitute(locals())
    return ret
示例#26
0
def convert_to_kallisto(data):
    files = dd.get_input_sequence_files(data)
    if len(files) == 2:
        fq1, fq2 = files
    else:
        fq1, fq2 = files[0], None
    samplename = dd.get_sample_name(data)
    work_dir = dd.get_work_dir(data)
    kallisto_dir = os.path.join(work_dir, "kallisto", samplename, "fastq")
    out_file = os.path.join(kallisto_dir, "barcodes.batch")
    umis = config_utils.get_program("umis", dd.get_config(data))
    if file_exists(out_file):
        return out_file
    if dd.get_minimum_barcode_depth(data):
        cb_histogram = os.path.join(work_dir, "umis", samplename, "cb-histogram.txt")
        cb_cutoff = dd.get_minimum_barcode_depth(data)
        cb_options = "--cb_histogram {cb_histogram} --cb_cutoff {cb_cutoff}"
        cb_options = cb_options.format(**locals())
    else:
        cb_options = ""
    cmd = ("{umis} kallisto {cb_options} --out_dir {tx_kallisto_dir} {fq1}")
    with file_transaction(data, kallisto_dir) as tx_kallisto_dir:
        safe_makedir(tx_kallisto_dir)
        message = ("Transforming %s to Kallisto singlecell format. "
                   % fq1)
        do.run(cmd.format(**locals()), message)
    return out_file
示例#27
0
    def create_hatch(self, hatch):
        sidelen = 72
        if self._hatches.has_key(hatch):
            return self._hatches[hatch]
        name = 'H%d' % len(self._hatches)
        self._pswriter.write("""\
  << /PatternType 1
     /PaintType 2
     /TilingType 2
     /BBox[0 0 %(sidelen)d %(sidelen)d]
     /XStep %(sidelen)d
     /YStep %(sidelen)d

     /PaintProc {
        pop
        0 setlinewidth
""" % locals())
        self._pswriter.write(
            self._convert_path(Path.hatch(hatch), Affine2D().scale(72.0)))
        self._pswriter.write("""\
          stroke
     } bind
   >>
   matrix
   makepattern
   /%(name)s exch def
""" % locals())
        self._hatches[hatch] = name
        return name
示例#28
0
def _run_scalpel_paired(align_bams, items, ref_file, assoc_files,
                          region=None, out_file=None):
    """Detect indels with Scalpel.

    This is used for paired tumor / normal samples.
    """
    config = items[0]["config"]
    if out_file is None:
        out_file = "%s-paired-variants.vcf.gz" % os.path.splitext(align_bams[0])[0]
    if not utils.file_exists(out_file):
        with file_transaction(config, out_file) as tx_out_file:
            paired = get_paired_bams(align_bams, items)
            if not paired.normal_bam:
                ann_file = _run_scalpel_caller(align_bams, items, ref_file,
                                               assoc_files, region, out_file)
                return ann_file
            vcfstreamsort = config_utils.get_program("vcfstreamsort", config)
            perl_exports = utils.get_perl_exports(os.path.dirname(tx_out_file))
            tmp_path = "%s-scalpel-work" % utils.splitext_plus(out_file)[0]
            db_file = os.path.join(tmp_path, "main", "somatic.db")
            if not os.path.exists(db_file + ".dir"):
                if os.path.exists(tmp_path):
                    utils.remove_safe(tmp_path)
                opts = " ".join(_scalpel_options_from_config(items, config, out_file, region, tmp_path))
                opts += " --ref {}".format(ref_file)
                opts += " --dir %s" % tmp_path
                # caling
                cl = ("{perl_exports} && "
                      "scalpel-discovery --somatic {opts} --tumor {paired.tumor_bam} --normal {paired.normal_bam}")
                do.run(cl.format(**locals()), "Genotyping paired variants with Scalpel", {})
            # filtering to adjust input parameters
            bed_opts = " ".join(_scalpel_bed_file_opts(items, config, out_file, region, tmp_path))
            use_defaults = True
            if use_defaults:
                scalpel_tmp_file = os.path.join(tmp_path, "main/somatic.indel.vcf")
            # Uses default filters but can tweak min-alt-count-tumor and min-phred-fisher
            # to swap precision for sensitivity
            else:
                scalpel_tmp_file = os.path.join(tmp_path, "main/somatic-indel-filter.vcf.gz")
                with file_transaction(config, scalpel_tmp_file) as tx_indel_file:
                    cmd = ("{perl_exports} && "
                           "scalpel-export --somatic {bed_opts} --ref {ref_file} --db {db_file} "
                           "--min-alt-count-tumor 5 --min-phred-fisher 10 --min-vaf-tumor 0.1 "
                           "| bgzip -c > {tx_indel_file}")
                    do.run(cmd.format(**locals()), "Scalpel somatic indel filter", {})
            scalpel_tmp_file = bgzip_and_index(scalpel_tmp_file, config)
            scalpel_tmp_file_common = bgzip_and_index(os.path.join(tmp_path, "main/common.indel.vcf"), config)
            compress_cmd = "| bgzip -c" if out_file.endswith("gz") else ""
            bcftools_cmd_chi2 = get_scalpel_bcftools_filter_expression("chi2", config)
            bcftools_cmd_common = get_scalpel_bcftools_filter_expression("reject", config)
            fix_ambig = vcfutils.fix_ambiguous_cl()
            cl2 = ("vcfcat <({bcftools_cmd_chi2} {scalpel_tmp_file}) "
                   "<({bcftools_cmd_common} {scalpel_tmp_file_common}) | "
                   " {fix_ambig} | {vcfstreamsort} {compress_cmd} > {tx_out_file}")
            do.run(cl2.format(**locals()), "Finalising Scalpel variants", {})

    ann_file = annotation.annotate_nongatk_vcf(out_file, align_bams,
                                               assoc_files.get("dbsnp"), ref_file,
                                               config)
    return ann_file
    def head(self, **KWS):



        ## CHEETAH: generated from #def head at line 5, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        write(u'''<script type="text/javascript" src="http://maps.google.com/maps/api/js?sensor=false&language=pt-br"></script>
<script type="text/javascript">
  function initialize() {
    var hotel = new google.maps.LatLng(''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.latitude",True) # u'$site.latitude' on line 9, col 40
        if _v is not None: write(_filter(_v, rawExpr=u'$site.latitude')) # from line 9, col 40.
        write(u''', ''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.longitude",True) # u'$site.longitude' on line 9, col 56
        if _v is not None: write(_filter(_v, rawExpr=u'$site.longitude')) # from line 9, col 56.
        write(u''');
    var myOptions = {
      zoom: 16,
      center: hotel,
      mapTypeId: google.maps.MapTypeId.ROADMAP
    };
    var map = new google.maps.Map(document.getElementById("map_canvas"), myOptions);
    var hotelMarker = new google.maps.Marker({
      position: hotel, 
      map: map, 
      title:"''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.name",True) # u'$site.name' on line 19, col 14
        if _v is not None: write(_filter(_v, rawExpr=u'$site.name')) # from line 19, col 14.
        write(u'''"
\t});
\t
\tvar content = "S\xedtio Tur\xedstico: ''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.name",True) # u'$site.name' on line 22, col 34
        if _v is not None: write(_filter(_v, rawExpr=u'$site.name')) # from line 22, col 34.
        write(u'''<br>"
\tvar infoWindow = new google.maps.InfoWindow({content: content});
\tinfoWindow.setPosition(hotel);
    infoWindow.open(map);
  }

</script>
''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
    def getmethods(self,modulePath,Class) :
        '''
         This will get the list of methods in given module or class.
         It accepts the module path and class name. If there is no
         class name then it has be mentioned as None.
        '''
        methodList = []
        moduleList = modulePath.split("/")
        newModule = ".".join([moduleList[len(moduleList) - 2],moduleList[len(moduleList) - 1]])
        print "Message : Method list is being obatined , Please wait ..."
        try :
            if Class :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [Class], -1)
                ClassList = [x.__name__ for x in Module.__dict__.values() if inspect.isclass(x)]
                self.ClassList = ClassList
                Class = vars(Module)[Class]
                methodList = [x.__name__ for x in Class.__dict__.values() if inspect.isfunction(x)]
            else :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(),[moduleList[len(moduleList) - 2]], -1)
                methodList = [x.__name__ for x in Module.__dict__.values() if inspect.isfunction(x)]
                ClassList = [x.__name__ for x in Module.__dict__.values() if inspect.isclass(x)]
                self.ClassList = ClassList
        except :
            print "Error : " +str(sys.exc_info()[1])


        self.method = methodList
        return self.method
示例#31
0
def investimento():
    projeto = db.projeto(request.args(0, cast=int))
    return locals()
示例#32
0
    def get_server_events_with_http_info(self, server_uuid, **kwargs):  # noqa: E501
        """Server Events  # noqa: E501

        https://api.gridscale.io/objects/servers/{server_uuid}/events To retrieve the event logs of the specified server.  # noqa: E501
        This method makes a synchronous HTTP request by default. To make an
        asynchronous HTTP request, please pass async_req=True
        >>> thread = api.get_server_events_with_http_info(server_uuid, async_req=True)
        >>> result = thread.get()

        :param async_req bool
        :param str server_uuid: The events endpoint for a server, returns a list of all the events on that object. You can alternatively find them as Audit Logs in your expert panel. (required)
        :return: EventResponse
                 If the method is called asynchronously,
                 returns the request thread.
        """

        all_params = ['server_uuid']  # noqa: E501
        all_params.append('async_req')
        all_params.append('_return_http_data_only')
        all_params.append('_preload_content')
        all_params.append('_request_timeout')

        params = locals()
        for key, val in six.iteritems(params['kwargs']):
            if key not in all_params:
                raise TypeError(
                    "Got an unexpected keyword argument '%s'"
                    " to method get_server_events" % key
                )
            params[key] = val
        del params['kwargs']
        # verify the required parameter 'server_uuid' is set
        if ('server_uuid' not in params or
                params['server_uuid'] is None):
            raise ValueError("Missing the required parameter `server_uuid` when calling `get_server_events`")  # noqa: E501

        collection_formats = {}

        path_params = {}
        if 'server_uuid' in params:
            path_params['server_uuid'] = params['server_uuid']  # noqa: E501

        query_params = []

        header_params = {}

        form_params = []
        local_var_files = {}

        body_params = None
        # HTTP header `Accept`
        header_params['Accept'] = self.api_client.select_header_accept(
            ['application/json'])  # noqa: E501

        # HTTP header `Content-Type`
        header_params['Content-Type'] = self.api_client.select_header_content_type(  # noqa: E501
            ['application/json'])  # noqa: E501

        # Authentication setting
        auth_settings = ['API_Token', 'User_UUID']  # noqa: E501

        return self.api_client.call_api(
            '/objects/servers/{server_uuid}/events', 'GET',
            path_params,
            query_params,
            header_params,
            body=body_params,
            post_params=form_params,
            files=local_var_files,
            response_type='EventResponse',  # noqa: E501
            auth_settings=auth_settings,
            async_req=params.get('async_req'),
            _return_http_data_only=params.get('_return_http_data_only'),
            _preload_content=params.get('_preload_content', True),
            _request_timeout=params.get('_request_timeout'),
            collection_formats=collection_formats)
示例#33
0
test.write(
    "wrapper.py", """import os
import sys
open('%s', 'wb').write("wrapper.py\\n")
os.system(" ".join(sys.argv[1:]))
""" % test.workpath('wrapper.out').replace('\\', '\\\\'))

test.write(
    'SConstruct', """
foo = Environment()
link = foo.subst("$LINK")
bar = Environment(LINK = r'%(_python_)s wrapper.py ' + link)
foo.Program(target = 'foo', source = 'foo.c')
bar.Program(target = 'bar', source = 'bar.c')
""" % locals())

test.write(
    'foo.c', r"""
#include <stdio.h>
#include <stdlib.h>
int
main(int argc, char *argv[])
{
        argv[argc++] = "--";
        printf("foo.c\n");
        exit (0);
}
""")

test.write(
示例#34
0
def listar_projetos():
    a = auth.user
    rows = db(db.projeto.created_by == auth.user).select()
    return locals()
示例#35
0
def prestacao_cobr():
    projeto = db.projeto(request.args(0, cast=int))
    return locals()
示例#36
0
def acesso_geral_projeto():
    projeto = db.projeto(request.args(0, cast=int))
    return locals()
示例#37
0
def alterar_dados_cobranca():
    projeto = db.projeto(request.args(0, cast=int))

    db.projeto.id.readable = False
    db.projeto.id.writable = False


    db.projeto.nome.readable = False
    db.projeto.nome.writable = False

    db.projeto.nome_chefe.readable = False
    db.projeto.nome_chefe.writable = False

    db.projeto.primeira_cidade.readable = False
    db.projeto.primeira_cidade.writable = False

    db.projeto.vale_saida_chefe.readable = False
    db.projeto.vale_saida_chefe.writable = False

    db.projeto.adiantamento_dinh_venda.readable = False
    db.projeto.adiantamento_dinh_venda.writable = False

    db.projeto.comissao_chefe_venda.readable = False
    db.projeto.comissao_chefe_venda.writable = False

    db.projeto.data_saida_venda.readable = False
    db.projeto.data_saida_venda.writable = False
    
    db.projeto.devolucao_dinh_venda.readable = False
    db.projeto.devolucao_dinh_venda.writable = False

    db.projeto.data_chegada_venda.readable = False
    db.projeto.data_chegada_venda.writable = False

    db.projeto.recebido_chegada_venda.readable = False
    db.projeto.recebido_chegada_venda.writable = False

    db.projeto.comissao_chefe_cobranca.readable = False
    db.projeto.comissao_chefe_cobranca.writable = False

    db.projeto.adiantamento_dinh_venda.readable = False
    db.projeto.adiantamento_dinh_venda.writable = False

    db.projeto.comissao_chefe_venda.readable = False
    db.projeto.comissao_chefe_venda.writable = False

    db.projeto.data_saida_venda.readable = False
    db.projeto.data_saida_venda.writable = False

    db.projeto.comissao_chefe_cobranca.readable = False
    db.projeto.comissao_chefe_cobranca.writable = False
    
    db.projeto.recebido_chegada_cobrac.readable = True
    db.projeto.recebido_chegada_cobrac.writable = True


    form = SQLFORM(db.projeto, request.args(0, cast=int))
    if form.process().accepted:
        session.flash = 'Filme atualizado'
        redirect(URL('prestacao_cobr', args=projeto.id))
    elif form.errors:
        response.flash = 'Erros no formulário!'
    else:
        if not response.flash:
            response.flash = 'Preencha o formulário!'
    return locals()
示例#38
0
文件: asmooth.py 项目: peroju/gammapy
    def run(self, counts, background=None, exposure=None):
        """
        Run image smoothing.

        Parameters
        ----------
        counts : `~gammapy.maps.WcsNDMap`
            Counts map
        background : `~gammapy.maps.WcsNDMap`
            Background map
        exposure : `~gammapy.maps.WcsNDMap`
            Exposure map

        Returns
        -------
        images : dict of `~gammapy.maps.WcsNDMap`
            Smoothed images; keys are:
                * 'counts'
                * 'background'
                * 'flux' (optional)
                * 'scales'
                * 'significance'.
        """
        pixel_scale = counts.geom.pixel_scales.mean()
        kernels = self.kernels(pixel_scale)

        cubes = {}
        cubes["counts"] = scale_cube(counts.data, kernels)

        if background is not None:
            cubes["background"] = scale_cube(background.data, kernels)
        else:
            # TODO: Estimate background with asmooth method
            raise ValueError("Background estimation required.")

        if exposure is not None:
            flux = (counts.data - background.data) / exposure.data
            cubes["flux"] = scale_cube(flux, kernels)

        cubes["significance"] = self._significance_cube(
            cubes, method=self.parameters["method"]
        )

        smoothed = self._reduce_cubes(cubes, kernels)

        result = {}

        for key in ["counts", "background", "scale", "significance"]:
            data = smoothed[key]

            # set remaining pixels with significance < threshold to mean value
            if key in ["counts", "background"]:
                mask = np.isnan(data)
                data[mask] = np.mean(locals()[key].data[mask])
            result[key] = WcsNDMap(counts.geom, data)

        if exposure is not None:
            data = smoothed["flux"]
            mask = np.isnan(data)
            data[mask] = np.mean(flux[mask])
            result["flux"] = WcsNDMap(counts.geom, data)

        return result
示例#39
0

if __name__ == '__main__':
    warnings.filterwarnings("ignore", module="google.auth")
    if sys.version_info[0] == 2:
        print("Must be using Python 3")
        exit()

    ctf_path = None
    if sys.version_info[1] < 9:
        ctf_path = os.getcwd() + '/' + os.path.dirname(__file__)
    else:
        ctf_path = os.path.dirname(__file__)

    os.chdir(ctf_path)

    # python3 thunder.py action [args]
    args = sys.argv[1:]
    if len(args) == 0:
        action = 'help'
    else:
        action = args[0]

    try:
        func = locals()[action]
        if not callable(func):
            raise KeyError
    except KeyError:
        func = help
    func(*args[1:])
示例#40
0
def lucros_prej():
    projeto = db.projeto(request.args(0, cast=int))
    return locals()
示例#41
0
    def _stencil_wrapper(self, result, sigret, return_type, typemap, calltypes,
                         *args):
        # Overall approach:
        # 1) Construct a string containing a function definition for the stencil function
        #    that will execute the stencil kernel.  This function definition includes a
        #    unique stencil function name, the parameters to the stencil kernel, loop
        #    nests across the dimensions of the input array.  Those loop nests use the
        #    computed stencil kernel size so as not to try to compute elements where
        #    elements outside the bounds of the input array would be needed.
        # 2) The but of the loop nest in this new function is a special sentinel
        #    assignment.
        # 3) Get the IR of this new function.
        # 4) Split the block containing the sentinel assignment and remove the sentinel
        #    assignment.  Insert the stencil kernel IR into the stencil function IR
        #    after label and variable renaming of the stencil kernel IR to prevent
        #    conflicts with the stencil function IR.
        # 5) Compile the combined stencil function IR + stencil kernel IR into existence.

        # Copy the kernel so that our changes for this callsite
        # won't effect other callsites.
        (kernel_copy,
         copy_calltypes) = self.copy_ir_with_calltypes(self.kernel_ir,
                                                       calltypes)
        # The stencil kernel body becomes the body of a loop, for which args aren't needed.
        ir_utils.remove_args(kernel_copy.blocks)
        first_arg = kernel_copy.arg_names[0]

        in_cps, out_cps = ir_utils.copy_propagate(kernel_copy.blocks, typemap)
        name_var_table = ir_utils.get_name_var_table(kernel_copy.blocks)
        ir_utils.apply_copy_propagate(kernel_copy.blocks, in_cps,
                                      name_var_table, typemap, copy_calltypes)

        if "out" in name_var_table:
            raise ValueError(
                "Cannot use the reserved word 'out' in stencil kernels.")

        sentinel_name = ir_utils.get_unused_var_name("__sentinel__",
                                                     name_var_table)
        if config.DEBUG_ARRAY_OPT >= 1:
            print("name_var_table", name_var_table, sentinel_name)

        the_array = args[0]

        if config.DEBUG_ARRAY_OPT >= 1:
            print("_stencil_wrapper", return_type, return_type.dtype,
                  type(return_type.dtype), args)
            ir_utils.dump_blocks(kernel_copy.blocks)

        # We generate a Numba function to execute this stencil and here
        # create the unique name of this function.
        stencil_func_name = "__numba_stencil_%s_%s" % (hex(
            id(the_array)).replace("-", "_"), self.id)

        # We will put a loop nest in the generated function for each
        # dimension in the input array.  Here we create the name for
        # the index variable for each dimension.  index0, index1, ...
        index_vars = []
        for i in range(the_array.ndim):
            index_var_name = ir_utils.get_unused_var_name(
                "index" + str(i), name_var_table)
            index_vars += [index_var_name]

        # Create extra signature for out and neighborhood.
        out_name = ir_utils.get_unused_var_name("out", name_var_table)
        neighborhood_name = ir_utils.get_unused_var_name(
            "neighborhood", name_var_table)
        sig_extra = ""
        if result is not None:
            sig_extra += ", {}=None".format(out_name)
        if "neighborhood" in dict(self.kws):
            sig_extra += ", {}=None".format(neighborhood_name)

        # Get a list of the standard indexed array names.
        standard_indexed = self.options.get("standard_indexing", [])

        if first_arg in standard_indexed:
            raise ValueError("The first argument to a stencil kernel must "
                             "use relative indexing, not standard indexing.")

        if len(set(standard_indexed) - set(kernel_copy.arg_names)) != 0:
            raise ValueError("Standard indexing requested for an array name "
                             "not present in the stencil kernel definition.")

        # Add index variables to getitems in the IR to transition the accesses
        # in the kernel from relative to regular Python indexing.  Returns the
        # computed size of the stencil kernel and a list of the relatively indexed
        # arrays.
        kernel_size, relatively_indexed = self.add_indices_to_kernel(
            kernel_copy, index_vars, the_array.ndim, self.neighborhood,
            standard_indexed, typemap, copy_calltypes)
        if self.neighborhood is None:
            self.neighborhood = kernel_size

        if config.DEBUG_ARRAY_OPT >= 1:
            print("After add_indices_to_kernel")
            ir_utils.dump_blocks(kernel_copy.blocks)

        # The return in the stencil kernel becomes a setitem for that
        # particular point in the iteration space.
        ret_blocks = self.replace_return_with_setitem(kernel_copy.blocks,
                                                      index_vars, out_name)

        if config.DEBUG_ARRAY_OPT >= 1:
            print("After replace_return_with_setitem", ret_blocks)
            ir_utils.dump_blocks(kernel_copy.blocks)

        # Start to form the new function to execute the stencil kernel.
        func_text = "def {}({}{}):\n".format(stencil_func_name,
                                             ",".join(kernel_copy.arg_names),
                                             sig_extra)

        # Get loop ranges for each dimension, which could be either int
        # or variable. In the latter case we'll use the extra neighborhood
        # argument to the function.
        ranges = []
        for i in range(the_array.ndim):
            if isinstance(kernel_size[i][0], int):
                lo = kernel_size[i][0]
                hi = kernel_size[i][1]
            else:
                lo = "{}[{}][0]".format(neighborhood_name, i)
                hi = "{}[{}][1]".format(neighborhood_name, i)
            ranges.append((lo, hi))

        # If there are more than one relatively indexed arrays, add a call to
        # a function that will raise an error if any of the relatively indexed
        # arrays are of different size than the first input array.
        if len(relatively_indexed) > 1:
            func_text += "    raise_if_incompatible_array_sizes(" + first_arg
            for other_array in relatively_indexed:
                if other_array != first_arg:
                    func_text += "," + other_array
            func_text += ")\n"

        # Get the shape of the first input array.
        shape_name = ir_utils.get_unused_var_name("full_shape", name_var_table)
        func_text += "    {} = {}.shape\n".format(shape_name, first_arg)

        # If we have to allocate the output array (the out argument was not used)
        # then us numpy.full if the user specified a cval stencil decorator option
        # or np.zeros if they didn't to allocate the array.
        if result is None:
            return_type_name = numpy_support.as_dtype(
                return_type.dtype).type.__name__
            if "cval" in self.options:
                cval = self.options["cval"]
                if return_type.dtype != typing.typeof.typeof(cval):
                    raise ValueError(
                        "cval type does not match stencil return type.")
                out_init = "{} = np.full({}, {}, dtype=np.{})\n".format(
                    out_name, shape_name, cval, return_type_name)
            else:
                out_init = "{} = np.zeros({}, dtype=np.{})\n".format(
                    out_name, shape_name, return_type_name)
            func_text += "    " + out_init
        else:  # result is present, if cval is set then use it
            if "cval" in self.options:
                cval = self.options["cval"]
                cval_ty = typing.typeof.typeof(cval)
                if not self._typingctx.can_convert(cval_ty, return_type.dtype):
                    msg = "cval type does not match stencil return type."
                    raise ValueError(msg)
                out_init = "{}[:] = {}\n".format(out_name, cval)
                func_text += "    " + out_init

        offset = 1
        # Add the loop nests to the new function.
        for i in range(the_array.ndim):
            for j in range(offset):
                func_text += "    "
            # ranges[i][0] is the minimum index used in the i'th dimension
            # but minimum's greater than 0 don't preclude any entry in the array.
            # So, take the minimum of 0 and the minimum index found in the kernel
            # and this will be a negative number (potentially -0).  Then, we do
            # unary - on that to get the positive offset in this dimension whose
            # use is precluded.
            # ranges[i][1] is the maximum of 0 and the observed maximum index
            # in this dimension because negative maximums would not cause us to
            # preclude any entry in the array from being used.
            func_text += ("for {} in range(-min(0,{}),"
                          "{}[{}]-max(0,{})):\n").format(
                              index_vars[i], ranges[i][0], shape_name, i,
                              ranges[i][1])
            offset += 1

        for j in range(offset):
            func_text += "    "
        # Put a sentinel in the code so we can locate it in the IR.  We will
        # remove this sentinel assignment and replace it with the IR for the
        # stencil kernel body.
        func_text += "{} = 0\n".format(sentinel_name)
        func_text += "    return {}\n".format(out_name)

        if config.DEBUG_ARRAY_OPT >= 1:
            print("new stencil func text")
            print(func_text)

        # Force the new stencil function into existence.
        exec_(func_text) in globals(), locals()
        stencil_func = eval(stencil_func_name)
        if sigret is not None:
            pysig = utils.pysignature(stencil_func)
            sigret.pysig = pysig
        # Get the IR for the newly created stencil function.
        stencil_ir = compiler.run_frontend(stencil_func)
        ir_utils.remove_dels(stencil_ir.blocks)

        # rename all variables in stencil_ir afresh
        var_table = ir_utils.get_name_var_table(stencil_ir.blocks)
        new_var_dict = {}
        reserved_names = (
            [sentinel_name, out_name, neighborhood_name, shape_name] +
            kernel_copy.arg_names + index_vars)
        for name, var in var_table.items():
            if not name in reserved_names:
                new_var_dict[name] = ir_utils.mk_unique_var(name)
        ir_utils.replace_var_names(stencil_ir.blocks, new_var_dict)

        stencil_stub_last_label = max(stencil_ir.blocks.keys()) + 1

        # Shift lables in the kernel copy so they are guaranteed unique
        # and don't conflict with any labels in the stencil_ir.
        kernel_copy.blocks = ir_utils.add_offset_to_labels(
            kernel_copy.blocks, stencil_stub_last_label)
        new_label = max(kernel_copy.blocks.keys()) + 1
        # Adjust ret_blocks to account for addition of the offset.
        ret_blocks = [x + stencil_stub_last_label for x in ret_blocks]

        if config.DEBUG_ARRAY_OPT >= 1:
            print("ret_blocks w/ offsets", ret_blocks, stencil_stub_last_label)
            print("before replace sentinel stencil_ir")
            ir_utils.dump_blocks(stencil_ir.blocks)
            print("before replace sentinel kernel_copy")
            ir_utils.dump_blocks(kernel_copy.blocks)

        # Search all the block in the stencil outline for the sentinel.
        for label, block in stencil_ir.blocks.items():
            for i, inst in enumerate(block.body):
                if (isinstance(inst, ir.Assign)
                        and inst.target.name == sentinel_name):
                    # We found the sentinel assignment.
                    loc = inst.loc
                    scope = block.scope
                    # split block across __sentinel__
                    # A new block is allocated for the statements prior to the
                    # sentinel but the new block maintains the current block
                    # label.
                    prev_block = ir.Block(scope, loc)
                    prev_block.body = block.body[:i]
                    # The current block is used for statements after sentinel.
                    block.body = block.body[i + 1:]
                    # But the current block gets a new label.
                    body_first_label = min(kernel_copy.blocks.keys())

                    # The previous block jumps to the minimum labelled block of
                    # the parfor body.
                    prev_block.append(ir.Jump(body_first_label, loc))
                    # Add all the parfor loop body blocks to the gufunc
                    # function's IR.
                    for (l, b) in kernel_copy.blocks.items():
                        stencil_ir.blocks[l] = b

                    stencil_ir.blocks[new_label] = block
                    stencil_ir.blocks[label] = prev_block
                    # Add a jump from all the blocks that previously contained
                    # a return in the stencil kernel to the block
                    # containing statements after the sentinel.
                    for ret_block in ret_blocks:
                        stencil_ir.blocks[ret_block].append(
                            ir.Jump(new_label, loc))
                    break
            else:
                continue
            break

        stencil_ir.blocks = ir_utils.rename_labels(stencil_ir.blocks)
        ir_utils.remove_dels(stencil_ir.blocks)

        assert (isinstance(the_array, types.Type))
        array_types = args

        new_stencil_param_types = list(array_types)

        if config.DEBUG_ARRAY_OPT >= 1:
            print("new_stencil_param_types", new_stencil_param_types)
            ir_utils.dump_blocks(stencil_ir.blocks)

        # Compile the combined stencil function with the replaced loop
        # body in it.
        new_func = compiler.compile_ir(self._typingctx, self._targetctx,
                                       stencil_ir, new_stencil_param_types,
                                       None, compiler.DEFAULT_FLAGS, {})
        return new_func
示例#42
0
This script manages import of various SCS binary data files into Blender.
"""

import bpy
import os
# from bpy_extras import io_utils
# from . import import_pmg
# from . import import_pit
# from . import import_pic
# from . import import_pip
# from . import import_pis
# from . import import_pia
# from . import io_utils
from .deprecated_utils import Print

if "bpy" in locals():
    import imp
    # if "import_pmg" in locals():
    #     imp.reload(import_pmg)
    # else:
    #     from . import import_pmg
    # if "import_pit" in locals():
    #     imp.reload(import_pit)
    # else:
    #     from . import import_pit
    # #if "import_pic" in locals():
    #     #imp.reload(import_pic)
    # #else:
    #     #from . import import_pic
    # if "import_pip" in locals():
    #     imp.reload(import_pip)
示例#43
0
    return model


def save_model(model):
    # Increment the number of the file name and save the regressor as a pickle
    print('[+] Saving regressor object...')
    i = 0
    while os.path.exists(MODEL_NAME + '_' + str(i) + '.pickle'):
        i += 1

    model.save(MODEL_NAME + '_' + str(i) + '.model')


if __name__ == '__main__':
    # Import data
    #    var_names = ['X_train', 'y_train', 'X_test', 'y_test']
    if 'X_train' not in locals():
        X_train, X_test, y_train, y_test = import_data(DATA_DIR, DATA_FNAME)
        X_train = tf.keras.utils.normalize(X_train, axis=1)
        X_test = tf.keras.utils.normalize(X_test, axis=1)
#    # Create the model
#    model = build_model()
#    # Train the model
#    model.fit(X_train, y_train, epochs=EPOCHS)
#
#    val_loss, val_acc = model.evaluate(X_test, y_test)
#    print(val_loss, val_acc)
#
#    save_model(model)

    print('[!] Done')
示例#44
0
    def get_xenonnt_tpc_voltage_map_names_with_http_info(self, **kwargs):  # noqa: E501
        """Retrieves one or more XenonntTpcVoltageMapNames  # noqa: E501

        This method makes a synchronous HTTP request by default. To make an
        asynchronous HTTP request, please pass async_req=True
        >>> thread = api.get_xenonnt_tpc_voltage_map_names_with_http_info(async_req=True)
        >>> result = thread.get()

        :param async_req bool: execute request asynchronously
        :param str where: the filters query parameter (ex.: {\"number\": 10})
        :param str sort: the sort query parameter (ex.: \"city,-lastname\")
        :param int page: the pages query parameter
        :param int max_results: the max results query parameter
        :param _return_http_data_only: response data without head status code
                                       and headers
        :param _preload_content: if False, the urllib3.HTTPResponse object will
                                 be returned without reading/decoding response
                                 data. Default is True.
        :param _request_timeout: timeout setting for this request. If one
                                 number provided, it will be total request
                                 timeout. It can also be a pair (tuple) of
                                 (connection, read) timeouts.
        :return: tuple(InlineResponse20071, status_code(int), headers(HTTPHeaderDict))
                 If the method is called asynchronously,
                 returns the request thread.
        """

        local_var_params = locals()

        all_params = [
            'where',
            'sort',
            'page',
            'max_results'
        ]
        all_params.extend(
            [
                'async_req',
                '_return_http_data_only',
                '_preload_content',
                '_request_timeout'
            ]
        )

        for key, val in six.iteritems(local_var_params['kwargs']):
            if key not in all_params:
                raise ApiTypeError(
                    "Got an unexpected keyword argument '%s'"
                    " to method get_xenonnt_tpc_voltage_map_names" % key
                )
            local_var_params[key] = val
        del local_var_params['kwargs']

        collection_formats = {}

        path_params = {}

        query_params = []
        if 'where' in local_var_params and local_var_params['where'] is not None:  # noqa: E501
            query_params.append(('where', local_var_params['where']))  # noqa: E501
        if 'sort' in local_var_params and local_var_params['sort'] is not None:  # noqa: E501
            query_params.append(('sort', local_var_params['sort']))  # noqa: E501
        if 'page' in local_var_params and local_var_params['page'] is not None:  # noqa: E501
            query_params.append(('page', local_var_params['page']))  # noqa: E501
        if 'max_results' in local_var_params and local_var_params['max_results'] is not None:  # noqa: E501
            query_params.append(('max_results', local_var_params['max_results']))  # noqa: E501

        header_params = {}

        form_params = []
        local_var_files = {}

        body_params = None
        # HTTP header `Accept`
        header_params['Accept'] = self.api_client.select_header_accept(
            ['application/json'])  # noqa: E501

        # Authentication setting
        auth_settings = ['BearerAuth']  # noqa: E501

        return self.api_client.call_api(
            '/voltage_maps/names/xenonnt/tpc', 'GET',
            path_params,
            query_params,
            header_params,
            body=body_params,
            post_params=form_params,
            files=local_var_files,
            response_type='InlineResponse20071',  # noqa: E501
            auth_settings=auth_settings,
            async_req=local_var_params.get('async_req'),
            _return_http_data_only=local_var_params.get('_return_http_data_only'),  # noqa: E501
            _preload_content=local_var_params.get('_preload_content', True),
            _request_timeout=local_var_params.get('_request_timeout'),
            collection_formats=collection_formats)
示例#45
0
def episodios(item):
    patronBlock = r'<h6>Episodio</h6>(?P<block>.*?)(?:<!--|</table>)'
    patron = r'<strong>(?P<title>[^<]+)</strong>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+><a href="http://link\.animetubeita\.com/2361078/(?P<url>[^"]+)"'
    action = 'findvideos'
    return locals()
示例#46
0
def abc(n):  #매개변수로 들어온 수 5 [0,1,2,3,4]
    L = []
    for i in range(n):
        L.append(i)
    print(locals())  #지역변수들 출력
    return L
示例#47
0
def mainlist(item):
	anime = [('Generi',['/generi', 'genres', 'genres']),
             ('Ordine Alfabetico',['/lista-anime', 'peliculas', 'list']),
             ('In Corso',['/category/serie-in-corso/', 'peliculas', 'in_progress'])
             ]
	return locals()
 def serialize_numpy(self, buff, numpy):
   """
   serialize message with numpy array types into buffer
   :param buff: buffer, ``StringIO``
   :param numpy: numpy python module
   """
   try:
     _x = self
     buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
     _x = self.header.frame_id
     length = len(_x)
     if python3 or type(_x) == unicode:
       _x = _x.encode('utf-8')
       length = len(_x)
     buff.write(struct.pack('<I%ss'%length, length, _x))
     _x = self
     buff.write(_get_struct_fB().pack(_x.time_from_start, _x.Sub_mode))
     buff.write(self.position_ref.tostring())
     buff.write(self.velocity_ref.tostring())
     buff.write(self.acceleration_ref.tostring())
     buff.write(_get_struct_f().pack(self.yaw_ref))
   except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
   except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
class Connection(object):
    """The RPyC *connection* (AKA *protocol*).

    :param service: the :class:`Service <rpyc.core.service.Service>` to expose
    :param channel: the :class:`Channel <rpyc.core.channel.Channel>` over which messages are passed
    :param config: the connection's configuration dict (overriding parameters
                   from the :data:`default configuration <DEFAULT_CONFIG>`)
    :param _lazy: whether or not to initialize the service with the creation of
                  the connection. Default is True. If set to False, you will
                  need to call :func:`_init_service` manually later
    """

    def __init__(self, service, channel, config = {}, _lazy = False):
        self._closed = True
        self._config = DEFAULT_CONFIG.copy()
        self._config.update(config)
        if self._config["connid"] is None:
            self._config["connid"] = "conn%d" % (next(_connection_id_generator),)

        self._channel = channel
        self._seqcounter = itertools.count()
        self._recvlock = Lock()
        self._sendlock = Lock()
        self._sync_replies = {}
        self._sync_lock = RLock()
        self._sync_event = Event()
        self._async_callbacks = {}
        self._local_objects = RefCountingColl()
        self._last_traceback = None
        self._proxy_cache = WeakValueDict()
        self._netref_classes_cache = {}
        self._remote_root = None
        self._send_queue = []
        self._local_root = service(weakref.proxy(self))
        if not _lazy:
            self._init_service()
        self._closed = False


    def _init_service(self):
        self._local_root.on_connect()

    def __del__(self):
        self.close()
    def __enter__(self):
        return self
    def __exit__(self, t, v, tb):
        self.close()
    def __repr__(self):
        a, b = object.__repr__(self).split(" object ")
        return "%s %r object %s" % (a, self._config["connid"], b)

    #
    # IO
    #
    def _cleanup(self, _anyway = True):
        if self._closed and not _anyway:
            return
        self._closed = True
        self._channel.close()
        self._local_root.on_disconnect()
        self._sync_replies.clear()
        self._async_callbacks.clear()
        self._local_objects.clear()
        self._proxy_cache.clear()
        self._netref_classes_cache.clear()
        self._last_traceback = None
        self._remote_root = None
        self._local_root = None
        #self._seqcounter = None
        #self._config.clear()

    def close(self, _catchall = True):
        """closes the connection, releasing all held resources"""
        if self._closed:
            return
        self._closed = True
        try:
            self._async_request(consts.HANDLE_CLOSE)
        except EOFError:
            pass
        except Exception:
            if not _catchall:
                raise
        finally:
            self._cleanup(_anyway = True)

    @property
    def closed(self):
        """Indicates whether the connection has been closed or not"""
        return self._closed

    def fileno(self):
        """Returns the connectin's underlying file descriptor"""
        return self._channel.fileno()

    def ping(self, data = None, timeout = 3):
        """
        Asserts that the other party is functioning properly, by making sure
        the *data* is echoed back before the *timeout* expires

        :param data: the data to send (leave ``None`` for the default buffer)
        :param timeout: the maximal time to wait for echo

        :raises: :class:`PingError` if the echoed data does not match
        """
        if data is None:
            data = "abcdefghijklmnopqrstuvwxyz" * 20
        res = self.async_request(consts.HANDLE_PING, data, timeout = timeout)
        if res.value != data:
            raise PingError("echo mismatches sent data")

    def _get_seq_id(self):
        return next(self._seqcounter)

    def _send(self, msg, seq, args):
        data = brine.dump((msg, seq, args))
        # GC might run while sending data
        # if so, a BaseNetref.__del__ might be called
        # BaseNetref.__del__ must call asyncreq,
        # which will cause a deadlock
        # Solution:
        # Add the current request to a queue and let the thread that currently
        # holds the sendlock send it when it's done with its current job.
        # NOTE: Atomic list operations should be thread safe,
        # please call me out if they are not on all implementations!
        self._send_queue.append(data)
        # It is crucial to check the queue each time AFTER releasing the lock:
        while self._send_queue:
            if not self._sendlock.acquire(False):
                # Another thread holds the lock. It will send the data after
                # it's done with its current job. We can safely return.
                return
            try:
                # Can happen if another consumer was scheduled in between
                # `while` and `acquire`:
                if not self._send_queue:
                    # Must `continue` to ensure that `send_queue` is checked
                    # after releasing the lock! (in case another producer is
                    # scheduled before `release`)
                    continue
                data = self._send_queue.pop(0)
                self._channel.send(data)
            finally:
                self._sendlock.release()

    def _send_request(self, seq, handler, args):
        self._send(consts.MSG_REQUEST, seq, (handler, self._box(args)))

    def _send_reply(self, seq, obj):
        self._send(consts.MSG_REPLY, seq, self._box(obj))

    def _send_exception(self, seq, exctype, excval, exctb):
        exc = vinegar.dump(exctype, excval, exctb,
            include_local_traceback = self._config["include_local_traceback"])
        self._send(consts.MSG_EXCEPTION, seq, exc)

    #
    # boxing
    #
    def _box(self, obj):
        """store a local object in such a way that it could be recreated on
        the remote party either by-value or by-reference"""
        if brine.dumpable(obj):
            return consts.LABEL_VALUE, obj
        if type(obj) is tuple:
            return consts.LABEL_TUPLE, tuple(self._box(item) for item in obj)
        elif isinstance(obj, netref.BaseNetref) and obj.____conn__() is self:
            return consts.LABEL_LOCAL_REF, obj.____oid__
        else:
            self._local_objects.add(obj)
            try:
                cls = obj.__class__
            except Exception:
                # see issue #16
                cls = type(obj)
            if not isinstance(cls, type):
                cls = type(obj)
            return consts.LABEL_REMOTE_REF, (id(obj), cls.__name__, cls.__module__)

    def _unbox(self, package):
        """recreate a local object representation of the remote object: if the
        object is passed by value, just return it; if the object is passed by
        reference, create a netref to it"""
        label, value = package
        if label == consts.LABEL_VALUE:
            return value
        if label == consts.LABEL_TUPLE:
            return tuple(self._unbox(item) for item in value)
        if label == consts.LABEL_LOCAL_REF:
            return self._local_objects[value]
        if label == consts.LABEL_REMOTE_REF:
            oid, clsname, modname = value
            if oid in self._proxy_cache:
                proxy = self._proxy_cache[oid]
                proxy.____refcount__ += 1  # other side increased refcount on boxing,
                                           # if I'm returning from cache instead of new object,
                                           # must increase refcount to match
                return proxy
            proxy = self._netref_factory(oid, clsname, modname)
            self._proxy_cache[oid] = proxy
            return proxy
        raise ValueError("invalid label %r" % (label,))

    def _netref_factory(self, oid, clsname, modname):
        typeinfo = (clsname, modname)
        if typeinfo in self._netref_classes_cache:
            cls = self._netref_classes_cache[typeinfo]
        elif typeinfo in netref.builtin_classes_cache:
            cls = netref.builtin_classes_cache[typeinfo]
        else:
            info = self.sync_request(consts.HANDLE_INSPECT, oid)
            cls = netref.class_factory(clsname, modname, info)
            self._netref_classes_cache[typeinfo] = cls
        return cls(weakref.ref(self), oid)

    #
    # dispatching
    #
    def _dispatch_request(self, seq, raw_args):
        try:
            handler, args = raw_args
            args = self._unbox(args)
            res = self._HANDLERS[handler](self, *args)
        except:
            # need to catch old style exceptions too
            t, v, tb = sys.exc_info()
            self._last_traceback = tb
            logger = self._config["logger"]
            if logger and t is not StopIteration:
                logger.debug("Exception caught", exc_info=True)
            if t is SystemExit and self._config["propagate_SystemExit_locally"]:
                raise
            if t is KeyboardInterrupt and self._config["propagate_KeyboardInterrupt_locally"]:
                raise
            self._send_exception(seq, t, v, tb)
        else:
            self._send_reply(seq, res)

    def _dispatch_reply(self, seq, raw):
        obj = self._unbox(raw)
        if seq in self._async_callbacks:
            self._async_callbacks.pop(seq)(False, obj)
        else:
            self._sync_replies[seq] = (False, obj)

    def _dispatch_exception(self, seq, raw):
        obj = vinegar.load(raw,
            import_custom_exceptions = self._config["import_custom_exceptions"],
            instantiate_custom_exceptions = self._config["instantiate_custom_exceptions"],
            instantiate_oldstyle_exceptions = self._config["instantiate_oldstyle_exceptions"])
        if seq in self._async_callbacks:
            self._async_callbacks.pop(seq)(True, obj)
        else:
            self._sync_replies[seq] = (True, obj)

    #
    # serving
    #
    def _recv(self, timeout, wait_for_lock):
        if not self._recvlock.acquire(wait_for_lock):
            return None
        try:
            if self._channel.poll(timeout):
                data = self._channel.recv()
            else:
                data = None
        except EOFError:
            self.close()
            raise
        finally:
            self._recvlock.release()
        return data

    def _dispatch(self, data):
        msg, seq, args = brine.load(data)
        if msg == consts.MSG_REQUEST:
            self._dispatch_request(seq, args)
        elif msg == consts.MSG_REPLY:
            self._dispatch_reply(seq, args)
        elif msg == consts.MSG_EXCEPTION:
            self._dispatch_exception(seq, args)
        else:
            raise ValueError("invalid message type: %r" % (msg,))

    def sync_recv_and_dispatch(self, timeout, wait_for_lock):
        # lock or wait for signal
        if self._sync_lock.acquire(False):
            try:
                self._sync_event.clear()
                data = self._recv(timeout, wait_for_lock = False)
                if not data:
                    return False
                self._dispatch(data)
                return True
            finally:
                self._sync_lock.release()
                self._sync_event.set()
        else:
            self._sync_event.wait()

    def poll(self, timeout = 0):
        """Serves a single transaction, should one arrives in the given
        interval. Note that handling a request/reply may trigger nested
        requests, which are all part of a single transaction.

        :returns: ``True`` if a transaction was served, ``False`` otherwise"""
        return self.sync_recv_and_dispatch(timeout, wait_for_lock=False)

    def serve(self, timeout = 1):
        """Serves a single request or reply that arrives within the given
        time frame (default is 1 sec). Note that the dispatching of a request
        might trigger multiple (nested) requests, thus this function may be
        reentrant.

        :returns: ``True`` if a request or reply were received, ``False``
                  otherwise.
        """
        return self.sync_recv_and_dispatch(timeout, wait_for_lock=True)

    def serve_all(self):
        """Serves all requests and replies for as long as the connection is
        alive."""
        try:
            while True:
                self.serve(None)
        except (socket.error, select_error, IOError):
            if not self.closed:
                raise
        except EOFError:
            pass
        finally:
            self.close()

    def serve_threaded(self, thread_count=10):
        def _thread_target():
            try:
                while True:
                    self.serve(None)
            except (socket.error, select_error, IOError):
                if not self.closed:
                    raise
            except EOFError:
                pass

        threads = []

        """Serves all requests and replies for as long as the connection is
        alive."""
        try:
            for _ in range(thread_count):
                thread = Thread(target=_thread_target)
                thread.daemon = True
                thread.start()
                threads.append(thread)

            for thread in threads:
                thread.join()
        finally:
            self.close()

    def poll_all(self, timeout=0):
        """Serves all requests and replies that arrive within the given interval.

        :returns: ``True`` if at least a single transaction was served, ``False`` otherwise
        """
        at_least_once = False
        t0 = time.time()
        duration = timeout
        try:
            while True:
                if self.poll(duration):
                    at_least_once = True
                if timeout is not None:
                    duration = t0 + timeout - time.time()
                    if duration < 0:
                        break
        except EOFError:
            pass
        return at_least_once

    #
    # requests
    #
    def sync_request(self, handler, *args):
        """Sends a synchronous request (waits for the reply to arrive)

        :raises: any exception that the requets may be generated
        :returns: the result of the request
        """
        seq = self._get_seq_id()
        self._send_request(seq, handler, args)

        timeout = self._config["sync_request_timeout"]
        while seq not in self._sync_replies:
            self.sync_recv_and_dispatch(timeout, True)

        isexc, obj = self._sync_replies.pop(seq)
        if isexc:
            raise obj
        else:
            return obj

    def _async_request(self, handler, args = (), callback = (lambda a, b: None)):
        seq = self._get_seq_id()
        self._async_callbacks[seq] = callback
        try:
            self._send_request(seq, handler, args)
        except:
            if seq in self._async_callbacks:
                del self._async_callbacks[seq]
            raise

    def async_request(self, handler, *args, **kwargs):
        """Send an asynchronous request (does not wait for it to finish)

        :returns: an :class:`rpyc.core.async.AsyncResult` object, which will
                  eventually hold the result (or exception)
        """
        timeout = kwargs.pop("timeout", None)
        if kwargs:
            raise TypeError("got unexpected keyword argument(s) %s" % (list(kwargs.keys()),))
        res = AsyncResult(weakref.proxy(self))
        self._async_request(handler, args, res)
        if timeout is not None:
            res.set_expiry(timeout)
        return res

    @property
    def root(self):
        """Fetches the root object (service) of the other party"""
        if self._remote_root is None:
            self._remote_root = self.sync_request(consts.HANDLE_GETROOT)
        return self._remote_root

    #
    # attribute access
    #
    def _check_attr(self, obj, name):
        if self._config["allow_exposed_attrs"]:
            if name.startswith(self._config["exposed_prefix"]):
                name2 = name
            else:
                name2 = self._config["exposed_prefix"] + name
            if hasattr(obj, name2):
                return name2
        if self._config["allow_all_attrs"]:
            return name
        if self._config["allow_safe_attrs"] and name in self._config["safe_attrs"]:
            return name
        if self._config["allow_public_attrs"] and not name.startswith("_"):
            return name
        return False

    def _access_attr(self, oid, name, args, overrider, param, default):
        if is_py3k:
            if type(name) is bytes:
                name = str(name, "utf8")
            elif type(name) is not str:
                raise TypeError("name must be a string")
        else:
            if type(name) not in (str, unicode):
                raise TypeError("name must be a string")
            name = str(name) # IronPython issue #10 + py3k issue
        obj = self._local_objects[oid]
        accessor = getattr(type(obj), overrider, None)
        if accessor is None:
            name2 = self._check_attr(obj, name)
            if not self._config[param] or not name2:
                raise AttributeError("cannot access %r" % (name,))
            accessor = default
            name = name2
        return accessor(obj, name, *args)

    #
    # request handlers
    #
    def _handle_ping(self, data):
        return data
    def _handle_close(self):
        self._cleanup()
    def _handle_getroot(self):
        return self._local_root
    def _handle_del(self, oid, count=1):
        self._local_objects.decref(oid)
    def _handle_repr(self, oid):
        return repr(self._local_objects[oid])
    def _handle_str(self, oid):
        return str(self._local_objects[oid])
    def _handle_cmp(self, oid, other):
        # cmp() might enter recursive resonance... yet another workaround
        #return cmp(self._local_objects[oid], other)
        obj = self._local_objects[oid]
        try:
            return type(obj).__cmp__(obj, other)
        except (AttributeError, TypeError):
            return NotImplemented
    def _handle_hash(self, oid):
        return hash(self._local_objects[oid])
    def _handle_call(self, oid, args, kwargs=()):
        return self._local_objects[oid](*args, **dict(kwargs))
    def _handle_dir(self, oid):
        return tuple(dir(self._local_objects[oid]))
    def _handle_inspect(self, oid):
        return tuple(netref.inspect_methods(self._local_objects[oid]))
    def _handle_getattr(self, oid, name):
        return self._access_attr(oid, name, (), "_rpyc_getattr", "allow_getattr", getattr)
    def _handle_delattr(self, oid, name):
        return self._access_attr(oid, name, (), "_rpyc_delattr", "allow_delattr", delattr)
    def _handle_setattr(self, oid, name, value):
        return self._access_attr(oid, name, (value,), "_rpyc_setattr", "allow_setattr", setattr)
    def _handle_callattr(self, oid, name, args, kwargs):
        return self._handle_getattr(oid, name)(*args, **dict(kwargs))
    def _handle_pickle(self, oid, proto):
        if not self._config["allow_pickle"]:
            raise ValueError("pickling is disabled")
        return pickle.dumps(self._local_objects[oid], proto)
    def _handle_buffiter(self, oid, count):
        items = []
        obj = self._local_objects[oid]
        i = 0
        try:
            while i < count:
                items.append(next(obj))
                i += 1
        except StopIteration:
            pass
        return tuple(items)
    def _handle_oldslicing(self, oid, attempt, fallback, start, stop, args):
        try:
            # first try __xxxitem__
            getitem = self._handle_getattr(oid, attempt)
            return getitem(slice(start, stop), *args)
        except Exception:
            # fallback to __xxxslice__. see issue #41
            if stop is None:
                stop = maxint
            getslice = self._handle_getattr(oid, fallback)
            return getslice(start, stop, *args)

    # collect handlers
    _HANDLERS = {}
    for name, obj in dict(locals()).items():
        if name.startswith("_handle_"):
            name2 = "HANDLE_" + name[8:].upper()
            if hasattr(consts, name2):
                _HANDLERS[getattr(consts, name2)] = obj
            else:
                raise NameError("no constant defined for %r", name)
    del name, name2, obj
示例#50
0
def genres(item):
    blacklist = ['Ultimi Episodi', 'Serie in Corso']
    patronMenu = r'<li[^>]+><a href="(?P<url>[^"]+)" >(?P<title>[^<]+)</a>'
    action = 'peliculas'
    return locals()
示例#51
0
    def create_vm(cls,
                  session,
                  instance,
                  kernel,
                  ramdisk,
                  use_pv_kernel=False):
        """Create a VM record.  Returns a Deferred that gives the new
        VM reference.
        the use_pv_kernel flag indicates whether the guest is HVM or PV

        There are 3 scenarios:

            1. Using paravirtualization,  kernel passed in

            2. Using paravirtualization, kernel within the image

            3. Using hardware virtualization
        """

        inst_type_id = instance.instance_type_id
        instance_type = instance_types.get_instance_type(inst_type_id)
        mem = str(long(instance_type['memory_mb']) * 1024 * 1024)
        vcpus = str(instance_type['vcpus'])
        rec = {
            'actions_after_crash': 'destroy',
            'actions_after_reboot': 'restart',
            'actions_after_shutdown': 'destroy',
            'affinity': '',
            'blocked_operations': {},
            'ha_always_run': False,
            'ha_restart_priority': '',
            'HVM_boot_params': {},
            'HVM_boot_policy': '',
            'is_a_template': False,
            'memory_dynamic_min': mem,
            'memory_dynamic_max': mem,
            'memory_static_min': '0',
            'memory_static_max': mem,
            'memory_target': mem,
            'name_description': '',
            'name_label': instance.name,
            'other_config': {
                'allowvssprovider': False
            },
            'other_config': {},
            'PCI_bus': '',
            'platform': {
                'acpi': 'true',
                'apic': 'true',
                'pae': 'true',
                'viridian': 'true',
                'timeoffset': '0'
            },
            'PV_args': '',
            'PV_bootloader': '',
            'PV_bootloader_args': '',
            'PV_kernel': '',
            'PV_legacy_args': '',
            'PV_ramdisk': '',
            'recommendations': '',
            'tags': [],
            'user_version': '0',
            'VCPUs_at_startup': vcpus,
            'VCPUs_max': vcpus,
            'VCPUs_params': {},
            'xenstore_data': {}
        }
        # Complete VM configuration record according to the image type
        # non-raw/raw with PV kernel/raw in HVM mode
        if use_pv_kernel:
            rec['platform']['nx'] = 'false'
            if instance.kernel_id:
                # 1. Kernel explicitly passed in, use that
                rec['PV_args'] = 'root=/dev/xvda1'
                rec['PV_kernel'] = kernel
                rec['PV_ramdisk'] = ramdisk
            else:
                # 2. Use kernel within the image
                rec['PV_bootloader'] = 'pygrub'
        else:
            # 3. Using hardware virtualization
            rec['platform']['nx'] = 'true'
            rec['HVM_boot_params'] = {'order': 'dc'}
            rec['HVM_boot_policy'] = 'BIOS order'

        LOG.debug(_('Created VM %s...'), instance.name)
        vm_ref = session.call_xenapi('VM.create', rec)
        instance_name = instance.name
        LOG.debug(_('Created VM %(instance_name)s as %(vm_ref)s.') % locals())
        return vm_ref
示例#52
0
def main(md=None,
         filename=None,
         cols=None,
         theme=None,
         c_theme=None,
         bg=None,
         c_no_guess=None,
         display_links=None,
         link_style=None,
         from_txt=None,
         do_html=None,
         code_hilite=None,
         c_def_lexer=None,
         theme_info=None,
         no_colors=None,
         tab_length=4,
         no_change_defenc=False,
         **kw):
    """ md is markdown string. alternatively we use filename and read """

    if sys.version_info[0] == 2 and not no_change_defenc:
        # if I don't do this here, then I'll get probs when being
        # used as a lib:
        # https://github.com/axiros/terminal_markdown_viewer/issues/39
        # If you hate it then switch it off but don't blame me on unicode errs.
        fix_py2_default_encoding()

    tab_length = tab_length or 4
    global def_lexer
    if c_def_lexer:
        def_lexer = c_def_lexer
    py_config_file = os.path.expanduser("~/.mdv.py")
    if os.path.exists(py_config_file):
        exec_globals = {}
        exec(io.open(py_config_file, encoding='utf-8').read(), exec_globals)
        globals().update(exec_globals)

    args = locals()
    if not md:
        if not filename:
            print('Using sample markdown:')
            make_sample()
            md = args['md'] = md_sample
            print(md)
            print
            print('Styling Result')
        else:
            if filename == '-':
                md = sys.stdin.read()
            else:
                with open(filename) as f:
                    md = f.read()

    # style rolers requested?
    global term_columns
    if cols:
        term_columns = int(cols)

    if c_theme == 'all' or theme == 'all':
        if c_theme == 'all':
            os.environ['AXC_CODE_THEME'] = os.environ['MDV_CODE_THEME'] = ''
        if theme == 'all':
            os.environ['AXC_THEME'] = os.environ['MDV_THEME'] = ''
        args.pop('kw')
        themes = read_themes()
        for k, v in list(themes.items()):
            if not filename:
                yl = 'You like *%s*, *%s*?' % (k, v['name'])
                args['md'] = md_sample.replace(you_like, yl)
            print(col('%s%s%s' % ('\n\n', '=' * term_columns, '\n'), L))
            # should really create an iterator here:
            if theme == 'all':
                args['theme'] = k
            else:
                args['c_theme'] = k
            print(main(**args))
        return ''

    global show_links
    if display_links:
        show_links = 'i'
    if link_style:  # rules
        show_links = link_style

    if bg and bg == 'light':
        # not in use rite now:
        global background, color
        background = BGL
        color = T

    set_theme(theme, theme_info=theme_info)

    global guess_lexer
    guess_lexer = not c_no_guess

    if not c_theme:
        c_theme = theme or 'default'

    if c_theme == 'None':
        c_theme = None

    if c_theme:
        set_theme(c_theme, for_code=1, theme_info=theme_info)

    if c_theme:
        # info:
        if not have_pygments:
            print(col('No pygments, can not analyze code for hilite', R))

    # Create an instance of the Markdown class with the new extension
    MD = markdown.Markdown(tab_length=int(tab_length),
                           extensions=[
                               AnsiPrintExtension(),
                               TableExtension(),
                               fenced_code.FencedCodeExtension()
                           ])
    if code_hilite:
        md = do_code_hilite(md, code_hilite)
    the_html = MD.convert(md)
    #print the_html
    # html?
    if do_html:
        return the_html

    # who wants html, here is our result:
    try:
        ansi = MD.ansi
    except:
        if the_html:
            # can this happen? At least show:
            print("we have markdown result but no ansi.")
            print(the_html)
        else:
            ansi = 'n.a. (no parsing result)'

    # The RAW html within source, incl. fenced code blocks:
    # phs are numbered like this in the md, we replace back:
    PH = markdown.util.HTML_PLACEHOLDER
    stash = MD.htmlStash
    nr = -1
    tags = Tags()
    for ph in stash.rawHtmlBlocks:
        nr += 1
        raw = html_parser.unescape(ph[0])
        if raw[:3].lower() == '<br':
            raw = '\n'
        pre = '<pre><code'
        if raw.startswith(pre):
            _, raw = raw.split(pre, 1)
            if 'class="' in raw:
                # language:
                lang = raw.split('class="', 1)[1].split('"')[0]
            else:
                lang = ''
            raw = raw.split('>', 1)[1].rsplit('</code>', 1)[0]
            raw = tags.code(raw.strip(), from_fenced_block=1, lang=lang)
        ansi = ansi.replace(PH % nr, raw)

    # don't want these: gone through the extension now:
    # ansi = ansi.replace('```', '')

    # sub part display (the -f feature)
    if from_txt:
        if not from_txt.split(':', 1)[0] in ansi:
            # display from top then:
            from_txt = ansi.strip()[1]
        from_txt, mon_lines = (from_txt + ':%s' %
                               (term_rows - 6)).split(':')[:2]
        mon_lines = int(mon_lines)
        pre, post = ansi.split(from_txt, 1)
        post = '\n'.join(post.split('\n')[:mon_lines])
        ansi = '\n(...)%s%s%s' % ('\n'.join(pre.rsplit(
            '\n', 2)[-2:]), from_txt, post)

    ansi = set_hr_widths(ansi) + '\n'
    if no_colors:
        return clean_ansi(ansi)
    return ansi + '\n'
示例#53
0
        'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
    },
    {
        'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
    },
]


# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/

LANGUAGE_CODE = 'en-us'

TIME_ZONE = 'UTC'

USE_I18N = True

USE_L10N = True

USE_TZ = True


# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/

STATIC_URL = '/static/'

STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'

django_heroku.settings(locals())
示例#54
0
    def _fetch_image_glance_disk(cls, context, session, instance, image,
                                 image_type):
        """Fetch the image from Glance

        NOTE:
        Unlike _fetch_image_glance_vhd, this method does not use the Glance
        plugin; instead, it streams the disks through domU to the VDI
        directly.

        Returns: A single filename if image_type is KERNEL_RAMDISK
                 A list of dictionaries that describe VDIs, otherwise
        """
        instance_id = instance.id
        # FIXME(sirp): Since the Glance plugin seems to be required for the
        # VHD disk, it may be worth using the plugin for both VHD and RAW and
        # DISK restores
        LOG.debug(_("Fetching image %(image)s") % locals())
        LOG.debug(_("Image Type: %s"), ImageType.to_string(image_type))

        if image_type == ImageType.DISK_ISO:
            sr_ref = safe_find_iso_sr(session)
            LOG.debug(_("ISO: Found sr possibly containing the ISO image"))
        else:
            sr_ref = safe_find_sr(session)

        glance_client, image_id = glance.get_glance_client(context, image)
        glance_client.set_auth_token(getattr(context, 'auth_token', None))
        meta, image_file = glance_client.get_image(image_id)
        virtual_size = int(meta['size'])
        vdi_size = virtual_size
        LOG.debug(
            _("Size for image %(image)s:" + "%(virtual_size)d") % locals())
        if image_type == ImageType.DISK:
            # Make room for MBR.
            vdi_size += MBR_SIZE_BYTES
        elif image_type in (ImageType.KERNEL, ImageType.RAMDISK) and \
             vdi_size > FLAGS.max_kernel_ramdisk_size:
            max_size = FLAGS.max_kernel_ramdisk_size
            raise exception.Error(
                _("Kernel/Ramdisk image is too large: %(vdi_size)d bytes, "
                  "max %(max_size)d bytes") % locals())

        name_label = get_name_label_for_image(image)
        vdi_ref = cls.create_vdi(session, sr_ref, name_label, vdi_size, False)
        # From this point we have a VDI on Xen host;
        # If anything goes wrong, we need to remember its uuid.
        try:
            filename = None
            vdi_uuid = session.get_xenapi().VDI.get_uuid(vdi_ref)
            with_vdi_attached_here(
                session, vdi_ref, False, lambda dev: _stream_disk(
                    dev, image_type, virtual_size, image_file))
            if image_type in (ImageType.KERNEL, ImageType.RAMDISK):
                # We need to invoke a plugin for copying the
                # content of the VDI into the proper path.
                LOG.debug(_("Copying VDI %s to /boot/guest on dom0"), vdi_ref)
                fn = "copy_kernel_vdi"
                args = {}
                args['vdi-ref'] = vdi_ref
                # Let the plugin copy the correct number of bytes.
                args['image-size'] = str(vdi_size)
                task = session.async_call_plugin('glance', fn, args)
                filename = session.wait_for_task(task, instance_id)
                # Remove the VDI as it is not needed anymore.
                session.get_xenapi().VDI.destroy(vdi_ref)
                LOG.debug(_("Kernel/Ramdisk VDI %s destroyed"), vdi_ref)
                return [
                    dict(vdi_type=ImageType.to_string(image_type),
                         vdi_uuid=None,
                         file=filename)
                ]
            else:
                return [
                    dict(vdi_type=ImageType.to_string(image_type),
                         vdi_uuid=vdi_uuid,
                         file=None)
                ]
        except (cls.XenAPI.Failure, IOError, OSError) as e:
            # We look for XenAPI and OS failures.
            LOG.exception(_("instance %s: Failed to fetch glance image"),
                          instance_id,
                          exc_info=sys.exc_info())
            e.args = e.args + ([
                dict(vdi_type=ImageType.to_string(image_type),
                     vdi_uuid=vdi_uuid,
                     file=filename)
            ], )
            raise e
示例#55
0
    print(
        'Dash was not successfully imported. '
        'Make sure you don\'t have a file '
        'named \n"dash.py" in your current directory.',
        file=_sys.stderr)
    _sys.exit(1)

_basepath = _os.path.dirname(__file__)
_filepath = _os.path.abspath(_os.path.join(_basepath, 'package.json'))
with open(_filepath) as f:
    package = json.load(f)

package_name = package['name'].replace(' ', '_').replace('-', '_')
__version__ = package['version']

_current_path = _os.path.dirname(_os.path.abspath(__file__))

_this_module = _sys.modules[__name__]

_js_dist = [{
    'relative_package_path': 'dash_leaflet.min.js',
    'dev_package_path': 'dash_leaflet.dev.js',
    'namespace': package_name
}]

_css_dist = []

for _component in __all__:
    setattr(locals()[_component], '_js_dist', _js_dist)
    setattr(locals()[_component], '_css_dist', _css_dist)
示例#56
0
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases

DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.sqlite3',
        'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
    }
}


SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')

ALLOWED_HOSTS = ['*']

STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'

STATICFILES_DIRS = (
    os.path.join(BASE_DIR, 'core/static/'),
)


try:
    from PrivateChat.local_settings import *
except ImportError:
    pass

# Configure Django App for Heroku.
# configure for travis tooo
django_heroku.settings(locals(), test_runner=False)
示例#57
0
    def events_get_with_http_info(self, **kwargs):  # noqa: E501
        """List all instances of Event  # noqa: E501

        Gets a list of all instances of Event (more information in http://dbpedia.org/ontology/Event)  # noqa: E501
        This method makes a synchronous HTTP request by default. To make an
        asynchronous HTTP request, please pass async_req=True
        >>> thread = api.events_get_with_http_info(async_req=True)
        >>> result = thread.get()

        :param async_req bool: execute request asynchronously
        :param str label: Filter by label
        :param int page: Page number
        :param int per_page: Items per page
        :param _return_http_data_only: response data without head status code
                                       and headers
        :param _preload_content: if False, the urllib3.HTTPResponse object will
                                 be returned without reading/decoding response
                                 data. Default is True.
        :param _request_timeout: timeout setting for this request. If one
                                 number provided, it will be total request
                                 timeout. It can also be a pair (tuple) of
                                 (connection, read) timeouts.
        :return: tuple(list[Event], status_code(int), headers(HTTPHeaderDict))
                 If the method is called asynchronously,
                 returns the request thread.
        """

        local_var_params = locals()

        all_params = [
            'label',
            'page',
            'per_page'
        ]
        all_params.extend(
            [
                'async_req',
                '_return_http_data_only',
                '_preload_content',
                '_request_timeout'
            ]
        )

        for key, val in six.iteritems(local_var_params['kwargs']):
            if key not in all_params:
                raise ApiTypeError(
                    "Got an unexpected keyword argument '%s'"
                    " to method events_get" % key
                )
            local_var_params[key] = val
        del local_var_params['kwargs']

        if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 200:  # noqa: E501
            raise ApiValueError("Invalid value for parameter `per_page` when calling `events_get`, must be a value less than or equal to `200`")  # noqa: E501
        if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1:  # noqa: E501
            raise ApiValueError("Invalid value for parameter `per_page` when calling `events_get`, must be a value greater than or equal to `1`")  # noqa: E501
        collection_formats = {}

        path_params = {}

        query_params = []
        if 'label' in local_var_params and local_var_params['label'] is not None:  # noqa: E501
            query_params.append(('label', local_var_params['label']))  # noqa: E501
        if 'page' in local_var_params and local_var_params['page'] is not None:  # noqa: E501
            query_params.append(('page', local_var_params['page']))  # noqa: E501
        if 'per_page' in local_var_params and local_var_params['per_page'] is not None:  # noqa: E501
            query_params.append(('per_page', local_var_params['per_page']))  # noqa: E501

        header_params = {}

        form_params = []
        local_var_files = {}

        body_params = None
        # HTTP header `Accept`
        header_params['Accept'] = self.api_client.select_header_accept(
            ['application/json'])  # noqa: E501

        # Authentication setting
        auth_settings = []  # noqa: E501

        return self.api_client.call_api(
            '/events', 'GET',
            path_params,
            query_params,
            header_params,
            body=body_params,
            post_params=form_params,
            files=local_var_files,
            response_type='list[Event]',  # noqa: E501
            auth_settings=auth_settings,
            async_req=local_var_params.get('async_req'),
            _return_http_data_only=local_var_params.get('_return_http_data_only'),  # noqa: E501
            _preload_content=local_var_params.get('_preload_content', True),
            _request_timeout=local_var_params.get('_request_timeout'),
            collection_formats=collection_formats)
示例#58
0
    def check_sp_settings(self, settings):
        """
        Checks the SP settings info.

        :param settings: Dict with settings data
        :type settings: dict

        :returns: Errors found on the SP settings data
        :rtype: list
        """
        assert isinstance(settings, dict)

        errors = []
        if not isinstance(settings, dict) or not settings:
            errors.append('invalid_syntax')
        else:
            if not settings.get('sp'):
                errors.append('sp_not_found')
            else:
                allow_single_domain_urls = self._get_allow_single_label_domain(settings)
                # check_sp_certs uses self.__sp so I add it
                old_sp = self.__sp
                self.__sp = settings['sp']

                sp = settings['sp']
                security = settings.get('security', {})

                if not sp.get('entityId'):
                    errors.append('sp_entityId_not_found')

                if not sp.get('assertionConsumerService', {}).get('url'):
                    errors.append('sp_acs_not_found')
                elif not validate_url(sp['assertionConsumerService']['url'], allow_single_domain_urls):
                    errors.append('sp_acs_url_invalid')

                if sp.get('attributeConsumingService'):
                    attributeConsumingService = sp['attributeConsumingService']
                    if 'serviceName' not in attributeConsumingService:
                        errors.append('sp_attributeConsumingService_serviceName_not_found')
                    elif not isinstance(attributeConsumingService['serviceName'], basestring):
                        errors.append('sp_attributeConsumingService_serviceName_type_invalid')

                    if 'requestedAttributes' not in attributeConsumingService:
                        errors.append('sp_attributeConsumingService_requestedAttributes_not_found')
                    elif not isinstance(attributeConsumingService['requestedAttributes'], list):
                        errors.append('sp_attributeConsumingService_serviceName_type_invalid')
                    else:
                        for req_attrib in attributeConsumingService['requestedAttributes']:
                            if 'name' not in req_attrib:
                                errors.append('sp_attributeConsumingService_requestedAttributes_name_not_found')
                            if 'name' in req_attrib and not req_attrib['name'].strip():
                                errors.append('sp_attributeConsumingService_requestedAttributes_name_invalid')
                            if 'attributeValue' in req_attrib and type(req_attrib['attributeValue']) != list:
                                errors.append('sp_attributeConsumingService_requestedAttributes_attributeValue_type_invalid')
                            if 'isRequired' in req_attrib and type(req_attrib['isRequired']) != bool:
                                errors.append('sp_attributeConsumingService_requestedAttributes_isRequired_type_invalid')

                    if "serviceDescription" in attributeConsumingService and not isinstance(attributeConsumingService['serviceDescription'], basestring):
                        errors.append('sp_attributeConsumingService_serviceDescription_type_invalid')

                slo_url = sp.get('singleLogoutService', {}).get('url')
                if slo_url and not validate_url(slo_url, allow_single_domain_urls):
                    errors.append('sp_sls_url_invalid')

                if 'signMetadata' in security and isinstance(security['signMetadata'], dict):
                    if 'keyFileName' not in security['signMetadata'] or \
                            'certFileName' not in security['signMetadata']:
                        errors.append('sp_signMetadata_invalid')

                authn_sign = bool(security.get('authnRequestsSigned'))
                logout_req_sign = bool(security.get('logoutRequestSigned'))
                logout_res_sign = bool(security.get('logoutResponseSigned'))
                want_assert_enc = bool(security.get('wantAssertionsEncrypted'))
                want_nameid_enc = bool(security.get('wantNameIdEncrypted'))

                if not self.check_sp_certs():
                    if authn_sign or logout_req_sign or logout_res_sign or \
                       want_assert_enc or want_nameid_enc:
                        errors.append('sp_cert_not_found_and_required')

            if 'contactPerson' in settings:
                types = settings['contactPerson'].keys()
                valid_types = ['technical', 'support', 'administrative', 'billing', 'other']
                for c_type in types:
                    if c_type not in valid_types:
                        errors.append('contact_type_invalid')
                        break

                for c_type in settings['contactPerson']:
                    contact = settings['contactPerson'][c_type]
                    if ('givenName' not in contact or len(contact['givenName']) == 0) or \
                            ('emailAddress' not in contact or len(contact['emailAddress']) == 0):
                        errors.append('contact_not_enough_data')
                        break

            if 'organization' in settings:
                for org in settings['organization']:
                    organization = settings['organization'][org]
                    if ('name' not in organization or len(organization['name']) == 0) or \
                        ('displayname' not in organization or len(organization['displayname']) == 0) or \
                            ('url' not in organization or len(organization['url']) == 0):
                        errors.append('organization_not_enough_data')
                        break
            # Restores the value that had the self.__sp
            if 'old_sp' in locals():
                self.__sp = old_sp

        return errors
示例#59
0
    def events_id_get_with_http_info(self, id, **kwargs):  # noqa: E501
        """Get a single Event by its id  # noqa: E501

        Gets the details of a given Event (more information in http://dbpedia.org/ontology/Event)  # noqa: E501
        This method makes a synchronous HTTP request by default. To make an
        asynchronous HTTP request, please pass async_req=True
        >>> thread = api.events_id_get_with_http_info(id, async_req=True)
        >>> result = thread.get()

        :param async_req bool: execute request asynchronously
        :param str id: The ID of the Event to be retrieved (required)
        :param _return_http_data_only: response data without head status code
                                       and headers
        :param _preload_content: if False, the urllib3.HTTPResponse object will
                                 be returned without reading/decoding response
                                 data. Default is True.
        :param _request_timeout: timeout setting for this request. If one
                                 number provided, it will be total request
                                 timeout. It can also be a pair (tuple) of
                                 (connection, read) timeouts.
        :return: tuple(Event, status_code(int), headers(HTTPHeaderDict))
                 If the method is called asynchronously,
                 returns the request thread.
        """

        local_var_params = locals()

        all_params = [
            'id'
        ]
        all_params.extend(
            [
                'async_req',
                '_return_http_data_only',
                '_preload_content',
                '_request_timeout'
            ]
        )

        for key, val in six.iteritems(local_var_params['kwargs']):
            if key not in all_params:
                raise ApiTypeError(
                    "Got an unexpected keyword argument '%s'"
                    " to method events_id_get" % key
                )
            local_var_params[key] = val
        del local_var_params['kwargs']
        # verify the required parameter 'id' is set
        if self.api_client.client_side_validation and ('id' not in local_var_params or  # noqa: E501
                                                        local_var_params['id'] is None):  # noqa: E501
            raise ApiValueError("Missing the required parameter `id` when calling `events_id_get`")  # noqa: E501

        collection_formats = {}

        path_params = {}
        if 'id' in local_var_params:
            path_params['id'] = local_var_params['id']  # noqa: E501

        query_params = []

        header_params = {}

        form_params = []
        local_var_files = {}

        body_params = None
        # HTTP header `Accept`
        header_params['Accept'] = self.api_client.select_header_accept(
            ['application/json'])  # noqa: E501

        # Authentication setting
        auth_settings = []  # noqa: E501

        return self.api_client.call_api(
            '/events/{id}', 'GET',
            path_params,
            query_params,
            header_params,
            body=body_params,
            post_params=form_params,
            files=local_var_files,
            response_type='Event',  # noqa: E501
            auth_settings=auth_settings,
            async_req=local_var_params.get('async_req'),
            _return_http_data_only=local_var_params.get('_return_http_data_only'),  # noqa: E501
            _preload_content=local_var_params.get('_preload_content', True),
            _request_timeout=local_var_params.get('_request_timeout'),
            collection_formats=collection_formats)
示例#60
0
def get_users_list_index(request):
    #prepare the params
    return render_to_response("account/display_users.html", locals(), context_instance=RequestContext(request))