def __init__(self, **kwargs): self.task_data = {'cmd': None} self.request = kwargs.pop('request', {}) self.response = kwargs.pop('response', {}) try: self.session = self.request.env['session'] self.input = self.request.context['data'] self.output = self.request.context['result'] self.user_id = self.session.get('user_id') self.role_id = self.session.get('role_id') except AttributeError: # when we want to use engine functions independently, # we need to create a fake current object self.session = {} self.input = {} self.output = {} self.user_id = None self.role_id = None self.lang_code = self.input.get('lang_code', settings.DEFAULT_LANG) self.log = log self.pool = {} AuthBackend = get_object_from_path(settings.AUTH_BACKEND) self.auth = lazy_object_proxy.Proxy(lambda: AuthBackend(self)) self.user = lazy_object_proxy.Proxy(lambda: self.auth.get_user()) self.role = lazy_object_proxy.Proxy(lambda: self.auth.get_role()) self.msg_cache = Notify(self.user_id) log.debug("\n\nINPUT DATA: %s" % self.input) self.permissions = []
def __init__(self, **kwargs): self.task_data = {'cmd': None} self.session = Session() self.headers = {} self.input = {} # when we want to use engine functions independently, self.output = {} # we need to create a fake current object try: self.session = kwargs['session'] self.input = kwargs['input'] except KeyError: self.request = kwargs.pop('request', {}) self.response = kwargs.pop('response', {}) if 'env' in self.request: self.session = self.request.env['session'] self.input = self.request.context['data'] self.output = self.request.context['result'] self.remote_addr = None self.user_id = self.session.get('user_id') self.role_id = self.session.get('role_id') self.log = log self.pool = {} AuthBackend = get_object_from_path(settings.AUTH_BACKEND) self.auth = lazy_object_proxy.Proxy(lambda: AuthBackend(self)) self.user = lazy_object_proxy.Proxy(lambda: self.auth.get_user()) self.role = lazy_object_proxy.Proxy(lambda: self.auth.get_role()) log.debug("\n\nINPUT DATA: %s" % self.input) self.permissions = []
def __getattr__(self, key): #print("__getattr__", _delayed is not None, key) spr = super() if _delayed is not None: if key in _delayed: return _delayed[key] else: s = traceback.extract_stack()[-2] #print(s.name, s.name=="_handle_fromlist") #print(s.filename, s.filename=="<frozen importlib._bootstrap>") if s.name == "_handle_fromlist": _bootstrapShitTriggers = ( s.filename == "<frozen importlib._bootstrap>" ) #_bootstrap shit triggering importing of packages even if it was not requested try: # assertions are removed via optimization, this check is needed for interpreter with patched FrozedTable (useful for debugging this shit) assert s.filename.endswith( "importlib\\_bootstrap_external.py") # Windows assert s.filename.endswith( "importlib/_bootstrap_external.py") # Linux except: _bootstrapShitTriggers = True if _bootstrapShitTriggers: return ProxyWithWorkaround_( lambda: spr.__getattr__(key)) else: return lazy_object_proxy.Proxy( lambda: spr.__getattr__(key)) #print("key", key) #return spr.__getattr__(key) else: return lazy_object_proxy.Proxy(lambda: spr.__getattr__(key))
def _add_boxers_to_fight_lazy(self, fight): """ Method for initializing boxers as proxy objects that only load data when called. :param fight: (Fight) instance of Fight for which to load the boxers :return: The same instance of Fight with the boxers added as Proxies """ fight.boxer_left = lazy_object_proxy.Proxy( lambda: self.boxer_dao.find_by_id(fight.boxer_left_id)) fight.boxer_right = lazy_object_proxy.Proxy( lambda: self.boxer_dao.find_by_id(fight.boxer_right_id)) return fight
def new_template_vars(self): return { # Fetch it once per template render, not each time it's accessed 'cea_update_available': lazy_object_proxy.Proxy(self.available_update), 'airflow_base_template': self.airflow_base_template }
def setUp(self): super().setUp() self.url = reverse('v2:socket-list', args=(self.instance.name,)) self.data = { 'name': 'abc', 'zip_file': lazy_object_proxy.Proxy(lambda: self.get_file()), }
def lazy_import(module_name): return lazy_object_proxy.Proxy( lambda: importlib.import_module("." + module_name, "astroid") ) @object.__new__
def optimized_resolve( info: graphql.ResolveInfo, queryset: djm.QuerySet, **kwargs, ) -> dict: """Resolve django queryset base on query selection. And prime dataloader cache. Args: info (graphql.ResolveInfo): Resolve info. queryset (djm.QuerySet): Queryset to resolve. Returns: dict: Connection resolve result. """ qs = qs_.optimize(queryset.all(), info) ret = resolve(qs, **kwargs) def _prime_nodes(v): for i in v: # Prime dataloader cache Resolver(info=info).resolve_gid(i) return v nodes = ret['nodes'] ret['nodes'] = lazy.Proxy(lambda: _prime_nodes(nodes)) return ret
def test1(): def expensive_func(t): # standard_mode_diag = pd.read_excel(path+u"/icd10_临床版2.0B(DRG用途代码).xlsx", converters={"code": str}) print("kakkakakakakaklalallaalla") return t obj = lazy_zhu.Proxy(expensive_func) print("go....") print(obj)
def value(self): """Returns the value that is to be used. May be a lazy proxy object.""" if not self.has_value: raise ValueError("This field has currently no value.") if self.lazy: import lazy_object_proxy return lazy_object_proxy.Proxy(self._lazy_get) return self._value
def setUp(self): super().setUp() url = reverse('v2:socket-environment-list', args=(self.instance.name, )) self.data = { 'name': 'abc', 'zip_file': lazy_object_proxy.Proxy(lambda: self.get_file()), } self.client.post(url, data=self.data, format='multipart') self.url = reverse('v2:socket-environment-detail', args=(self.instance.name, self.data['name']))
def start_spark(app_name="Jupyter"): def sc_lazy(spark): return spark.sparkContext def hc_lazy(spark): return HiveContext(spark.sparkContext) global sc global hc global sqlContext global spark import findspark findspark.init() from pyspark.sql import SparkSession from pyspark.sql import HiveContext spark = lazy_object_proxy.Proxy( SparkSession.builder.appName(app_name).enableHiveSupport().config( "spark.dynamicAllocation.enabled", "true").config("spark.dynamicAllocation.minExecutors", "0").config( "spark.dynamicAllocation.maxExecutors", "11").config( "spark.dynamicAllocation.cachedExecutorIdleTimeout", "90s").config("spark.executor.cores", "1").config("spark.executor.memory", "10512m"). config("spark.memory.storageFraction", "0.2").config( "spark.serializer", "org.apache.spark.serializer.KryoSerializer").config( "spark.kryoserializer.buffer.max", "512m").config("spark.driver.memory", "10g").config( "spark.executor.memoryOverhead", 1024).config("spark.driver.memoryOverhead", 512).config( "spark.driver.maxResultSize", "10000m").config("spark.port.maxRetries", 96).getOrCreate) sc = lazy_object_proxy.Proxy(partial(sc_lazy, spark)) sqlContext = lazy_object_proxy.Proxy(partial(hc_lazy, spark)) hc = lazy_object_proxy.Proxy(partial(hc_lazy, spark))
def convertParsed256ColorMapIntoMatplotlib(parsed, index): """Converts Kaitai Struct parsed colormap into matplotlib colormap""" if index is None: index = range(len(parsed.color_tables)) if isinstance(index, (int, str)): index = [index] res = {"index": {}} for i in index: table = parsed.color_tables[i].color_table converted = lazy_object_proxy.Proxy( functools.partial(convertColorTable, table)) res["index"][el] = converted return res
def load_single_entry(self, value, partial): """Loads a single nested entry from its schema.""" type_ = normalize_type(value["@type"]) schema = self.schema["from"][str(type_)] if not schema: ValueError("Type {} not found in {}.{}".format( value["@type"], type(self.parent), self.data_key)) if schema.lazy: return lazy_object_proxy.Proxy(lambda: schema.load( value, unknown=self.unknown, partial=partial)) return schema.load(value, unknown=self.unknown, partial=partial)
def get_pool_context(self): # TODO: Add in-process caching context = { self.current.lane_name: self.current.user, 'self': self.current.user } if self.current.lane_owners: model_name = self.current.lane_owners.split('.')[0] context[model_name] = model_registry.get_model(model_name).objects for lane_name, role_id in self.current.pool.items(): if role_id: context[lane_name] = lazy_object_proxy.Proxy( lambda: self.role_model(super_context).objects.get(role_id )) return context
def get_pool_context(self): # TODO: Add in-process caching """ Builds context for the WF pool. Returns: Context dict. """ context = { self.current.lane_id: self.current.role, 'self': self.current.role } for lane_id, role_id in self.current.pool.items(): if role_id: context[lane_id] = lazy_object_proxy.Proxy( lambda: self.role_model(super_context).objects.get(role_id )) return context
def test_lazy_object_proxy(): import lazy_object_proxy def expensive_func(): from time import sleep print("starting calculation") # just as example for a slow computation sleep(0.1) print("finished calculation") # return the result of the calculation return 10 obj = lazy_object_proxy.Proxy(expensive_func) # function is called only when object is actually used assert obj == 10 # now expensive_func is called assert obj == 10 # the result without calling the expensive_func
def __call__(self, *args, **kwargs): if self.fonction is NotImplemented: if callable(args[0]): self.fonction = args[0] if args[0].__doc__ is not None and self.__doc__ is not None: doc = self.__doc__ doc += "\naide de la fonction originale\n" doc += args[0].__doc__ self.__doc_ = doc self.__wrapped__ = args[0] return self else: raise NotImplementedError futur = self.executor.submit(self.fonction, *args, **kwargs) self.fut.append(futur) # return future_proxy(futur) factory_proxy = partial(futures.Future.result, futur) return lazy_object_proxy.Proxy(factory_proxy)
def setUp(self): super().setUp() with mock.patch('apps.sockets.download_utils.ZipDownloadFileHandler.get_socket_spec') as download_mock: download_mock.return_value = """ endpoints: end1/test: POST: | print 1 DELETE: | print 1 """ self.socket = G(Socket, name='abc1') self.socket_endpoint = SocketEndpoint.objects.first() self.detail_url = reverse('v2:socket-endpoint-endpoint', args=(self.instance.name, self.socket_endpoint.name)) self.url = reverse('v2:socket-list', args=(self.instance.name,)) self.data = { 'name': 'abc', 'zip_file': lazy_object_proxy.Proxy(lambda: self.get_file()), }
def decode(content, parse_json=True, use_proxy=True): if content is None: return content if content.startswith(constants.JUMBO_FIELDS_PREFIX): def unwrap(): location, _size = content.split() value = _pull_jumbo_field(location) if parse_json: return json_loads_or_raw(value) return value if use_proxy: return lazy_object_proxy.Proxy(unwrap) return unwrap() if parse_json: return json_loads_or_raw(content) return content
def resolve( iterable, **kwargs, ) -> dict: """Resolve iterable to connection Args: iterable (typign.Iterable): value Returns: dict: Connection data. """ if isinstance(iterable, djm.Manager): iterable = iterable.all() if isinstance(iterable, djm.QuerySet): _len = lazy.Proxy(iterable.count) else: _len = len(iterable) return _resolve(iterable, _len, **kwargs)
def convertParsedPalColorMapIntoMatplotlib(parsed, index): """Converts Kaitai Struct parsed colormap into matplotlib colormap""" if index is None: index = range(len(parsed.meta)) if isinstance(index, (int, str)): index = [index] res = {"name": {}, "index": {}} nameIndex = {} for i, el in enumerate(index): if isinstance(el, int): pass elif isinstance(el, str): if not nameIndex: nameIndex = { table.title: i for table in enumerate(parsed.meta) } el = nameIndex[el] table = parsed.meta[el].color_table converted = lazy_object_proxy.Proxy( functools.partial(convertColorTable, table)) res["index"][el] = converted res["name"][res["index"][el].name] = converted return res
def __init__(self): self.records = UpdateRecoder() self.sql = lazy_object_proxy.Proxy(self.get_wind_connection)
def get_logger(cls, name): import lazy_object_proxy as lazy from functools import partial return lazy.Proxy(partial(Log._get_logger, name))
class ModelDB(object): pass bigg = ModelDB() try: model_ids = index_models_bigg().bigg_id except requests.ConnectionError: bigg.no_models_available = "Cameo couldn't reach http://bigg.ucsd.edu at initialization time. Are you connected to the internet?" except Exception as e: bigg.no_models_available = "Cameo could reach http://bigg.ucsd.edu at initialization time but something went wrong while decoding the server response." logger.debug(e) else: for id in model_ids: setattr(bigg, str_to_valid_variable_name(id), lazy_object_proxy.Proxy(partial(get_model_from_bigg, id))) minho = ModelDB() try: minho_models = index_models_minho() except requests.ConnectionError as e: minho.no_models_available = "Cameo couldn't reach http://darwin.di.uminho.pt/models at initialization time. Are you connected to the internet?" logger.debug(e) except Exception as e: minho.no_models_available = "Cameo could reach http://darwin.di.uminho.pt/models at initialization time but something went wrong while decoding the server response." logger.debug(e) else: model_indices = minho_models.id model_ids = minho_models.name for index, id in zip(model_indices, model_ids): setattr(minho, str_to_valid_variable_name(id), lazy_object_proxy.Proxy(partial(get_model_from_uminho, index)))
object_json = json.dumps(object) print("{prefix} = {object_json}".format(**locals())) project_config = yaml.load(open("config.yaml")) project_config["user"] = project_config.get("user", os.environ["USER"]) project_config["registry_user"] = project_config.get("registry_user", project_config["user"]) project_config["image_name"] = (project_config["name"].replace(" ", "_").replace( "-", "_")) project_config["full_image_name"] = "{}/{}/{}:latest".format( project_config["registry"], project_config["registry_user"], project_config["name"]) # print_path(project_config, "project_config") kubernetes.config.load_kube_config() def connect_batch(): return kubernetes.client.BatchV1Api() def connect_core(): kubernetes.config.load_kube_config() return kubernetes.client.CoreV1Api() batch = lazy_object_proxy.Proxy(connect_batch) api_instance = lazy_object_proxy.Proxy(connect_core)
# coding=UTF8 import lazy_object_proxy import rapidjson as json from jinja2.sandbox import SandboxedEnvironment def jinja_finalizer(value): if isinstance(value, (list, dict)): return json.dumps(value) return value jinja2_env = lazy_object_proxy.Proxy(lambda: SandboxedEnvironment( trim_blocks=True, lstrip_blocks=True, finalize=jinja_finalizer))
class LDAPAccess: host_dn: str = lazy_object_proxy.Proxy(lambda: os.environ["ldap_hostdn"]) _machine_pw = MachinePWCache(0, "") def __init__(self, host: str = None, port: int = None, ldap_base: str = None): self.ldap_base = ldap_base or os.environ["ldap_base"] self.logger = ConsoleAndFileLogging.get_logger(__name__, LOG_FILE_PATH_HTTP) self.server = Server( host=host or os.environ["ldap_server_name"], port=port or int(os.environ["ldap_server_port"]), get_info="ALL", ) @classmethod async def machine_password(cls) -> str: mtime = os.stat(MACHINE_PASSWORD_FILE).st_mtime if cls._machine_pw.mtime == mtime: return cls._machine_pw.password else: async with aiofiles.open(MACHINE_PASSWORD_FILE, "r") as fp: pw = await fp.read() pw = pw.strip() cls._machine_pw = MachinePWCache(mtime, pw) return pw def check_auth_dn(self, bind_dn: str, bind_pw: str) -> bool: try: with Connection( self.server, user=bind_dn, password=bind_pw, auto_bind=AUTO_BIND_TLS_BEFORE_BIND, authentication=SIMPLE, read_only=True, ): self.logger.info("Successful LDAP: %r.", bind_dn) return True except LDAPBindError: self.logger.info("Invalid credentials for %r.", bind_dn) return False except LDAPExceptionError as exc: self.logger.exception( "When connecting to %r with bind_dn %r: %s", self.server.host, bind_dn, exc, ) return False async def check_auth_and_get_user(self, username: str, password: str) -> Optional[User]: user_dn = await self.get_dn_of_user(username) if user_dn: admin_group_members = await self.admin_group_members() if user_dn in admin_group_members: return await self.get_user(username, user_dn, password, school_only=False) else: self.logger.debug("User %r not member of group %r.", username, ADMIN_GROUP_NAME) return None else: self.logger.debug("No such user in LDAP: %r.", username) return None async def search( self, filter_s: str, attributes: List[str] = None, base: str = None, bind_dn: str = None, bind_pw: str = None, raise_on_bind_error: bool = True, ) -> List[Entry]: base = base or self.ldap_base bind_dn = bind_dn or str(self.host_dn) bind_pw = bind_pw or await self.machine_password() try: with Connection( self.server, user=bind_dn, password=bind_pw, auto_bind=AUTO_BIND_TLS_BEFORE_BIND, authentication=SIMPLE, read_only=True, ) as conn: conn.search(base, filter_s, attributes=attributes) except LDAPExceptionError as exc: if isinstance(exc, LDAPBindError) and not raise_on_bind_error: return [] self.logger.exception( "When connecting to %r with bind_dn %r: %s", self.server.host, bind_dn, exc, ) raise return conn.entries async def get_dn_of_user(self, username: str) -> str: filter_s = f"(uid={escape_filter_chars(username)})" results = await self.search(filter_s, attributes=None) if len(results) == 1: return results[0].entry_dn elif len(results) > 1: raise RuntimeError( f"More than 1 result when searching LDAP with filter {filter_s!r}: {results!r}." ) else: return "" async def get_passwords( self, username: str, base: str = None, bind_dn: str = None, bind_pw: str = None, ) -> Optional[UserPasswords]: filter_s = f"(uid={escape_filter_chars(username)})" attributes = [ "krb5Key", "krb5KeyVersionNumber", "sambaPwdLastSet", "sambaNTPassword", "userPassword", ] results = await self.search(filter_s, attributes, base=base, bind_dn=bind_dn, bind_pw=bind_pw) if len(results) == 1: result = results[0] return UserPasswords( userPassword=result["userPassword"].values, sambaNTPassword=result["sambaNTPassword"].value, krb5Key=result["krb5Key"].values, krb5KeyVersionNumber=result["krb5KeyVersionNumber"].value, sambaPwdLastSet=result["sambaPwdLastSet"].value, ) elif len(results) > 1: raise RuntimeError( f"More than 1 result when searching LDAP with filter {filter_s!r}: {results!r}." ) else: return None @staticmethod def user_is_disabled(ldap_result): return ("D" in ldap_result["sambaAcctFlags"].value or ldap_result["krb5KDCFlags"].value == 254 or ("shadowExpire" in ldap_result and ldap_result["shadowExpire"].value and ldap_result["shadowExpire"].value < datetime.now().timestamp() / 3600 / 24)) async def get_user( self, username: str, bind_dn: str = None, bind_pw: str = None, attributes: List[str] = None, school_only=True, ) -> Optional[User]: if attributes: attributes = attributes + [ "displayName", "krb5KDCFlags", "sambaAcctFlags", "shadowExpire", "uid", ] else: attributes = [ "displayName", "krb5KDCFlags", "sambaAcctFlags", "shadowExpire", "uid", ] filter_s = f"(uid={escape_filter_chars(username)})" if school_only: filter_s = (f"(&{filter_s}(|" f"(objectClass=ucsschoolStaff)" f"(objectClass=ucsschoolStudent)" f"(objectClass=ucsschoolTeacher)" f"))") results = await self.search( filter_s, attributes, bind_dn=bind_dn, bind_pw=bind_pw, raise_on_bind_error=False, ) if len(results) == 1: result = results[0] return User( username=result["uid"].value, full_name=result["displayName"].value, disabled=self.user_is_disabled(result), dn=result.entry_dn, attributes=result.entry_attributes_as_dict, ) elif len(results) > 1: raise RuntimeError( f"More than 1 result when searching LDAP with filter {filter_s!r}: {results!r}." ) else: return None async def admin_group_members(self) -> List[str]: filter_s = f"(cn={escape_filter_chars(ADMIN_GROUP_NAME)})" base = f"cn=groups,{self.ldap_base}" results = await self.search(filter_s, ["uniqueMember"], base=base) if len(results) == 1: return results[0]["uniqueMember"].values else: self.logger.error("Reading %r from LDAP: results=%r", ADMIN_GROUP_NAME, results) return [] async def extended_attribute_ldap_mapping( self, udm_property_name: str) -> Optional[str]: filter_s = f"(&(objectClass=univentionUDMProperty)(cn={escape_filter_chars(udm_property_name)}))" base = f"cn=custom attributes,cn=univention,{self.ldap_base}" results = await self.search(filter_s, ["univentionUDMPropertyLdapMapping"], base=base) if len(results) == 1: return results[0]["univentionUDMPropertyLdapMapping"].value else: self.logger.error("Reading %r from LDAP: results=%r", udm_property_name, results) return None
from ..db import accounts as db_accounts from ..tinygrail.player import Player __all__ = [] class LoginPlayer(NamedTuple): name: str bangumi: Login tinygrail: Player def translate(acc: db_accounts.Account) -> LoginPlayer: user = user_info(acc.id) bangumi = Login(chii_auth=acc.chii_auth, ua=acc.ua, user=user) def update_identity(new_identity): db_accounts.update(acc.friendly_name, tinygrail_identity=new_identity) tinygrail = Player(acc.tinygrail_identity, on_identity_refresh=update_identity) return LoginPlayer(acc.friendly_name, bangumi, tinygrail) all_accounts: Dict[str, LoginPlayer] = {} for friendly_name in db_accounts.list_all(): account = db_accounts.retrieve(friendly_name) globals()[friendly_name] = all_accounts[friendly_name] = lazy_object_proxy.Proxy( (lambda acc: lambda: translate(acc))(account)) __all__.append(friendly_name)
def lazy_import(module_name): return lazy_object_proxy.Proxy( lambda: importlib.import_module('.' + module_name, 'astroid'))