Ejemplo n.º 1
0
class ConfigSyncSchedules(object):
    __name__ = "ConfigSyncSchedules"
    sub_ips = EC.sub_ips
    range_nb_ips = EC.range_nb_ips
    skip_ips = EC.skip_ips

    def __init__(self,
                 api_uri: str,
                 matchbox_path: str,
                 ignition_dict: dict,
                 extra_selector_dict=None):
        """
        :param api_uri: http://1.1.1.1:5000
        :param matchbox_path: /var/lib/matchbox
        :param ignition_dict: ignition.yaml
        """
        self.api_uri = api_uri
        os.environ["API_URI"] = self.api_uri
        self.matchbox_path = matchbox_path
        self.ignition_dict = ignition_dict
        self._reporting_ignitions()
        self.extra_selector = extra_selector_dict if extra_selector_dict else {}
        # inMemory cache for http queries
        if EC.sync_cache_ttl > 0:
            self._cache_query = SimpleCache(default_timeout=EC.sync_cache_ttl)
        else:
            self._cache_query = NullCache()

    def _reporting_ignitions(self):
        for k, v in self.ignition_dict.items():
            f = "%s/ignition/%s.yaml" % (self.matchbox_path, v)
            if os.path.isfile(f) is False:
                logger.error("%s:%s -> %s is not here" % (k, v, f))
                raise IOError(f)
            with open(f, 'rb') as ignition_file:
                blob = ignition_file.read()
            data = {v: blob.decode()}
            url = "%s/ignition/version/%s" % (self.api_uri, v)
            try:
                req = requests.post(url, data=json.dumps(data))
                req.close()
                response = json.loads(req.content.decode())
                logger.info("%s:%s -> %s is here content reported: %s" %
                            (k, v, f, response))
            except requests.exceptions.ConnectionError as e:
                logger.error("%s:%s -> %s is here content NOT reported: %s" %
                             (k, v, f, e))

    @staticmethod
    def get_dns_attr(fqdn: str):
        """
        TODO: Use LLDP to avoid vendor specific usage
        :param fqdn: e.g: r13-srv3.dc-1.foo.bar.cr
        :return:
        """
        d = {
            "shortname": "",
            "dc": "",
            "domain": "",
            "rack": "",
            "pos": "",
        }
        s = fqdn.split(".")
        d["shortname"] = s[0]
        try:
            d["dc"] = s[1]
        except IndexError:
            logger.error("IndexError %s[1] after split(.)" % fqdn)
            return d
        d["domain"] = ".".join(s[1:])
        try:
            rack, pos = s[0].split("-")
            d["rack"] = re.sub("[^0-9]+", "", rack)
            d["pos"] = re.sub("[^0-9]+", "", pos)
        except ValueError:
            logger.error("error during the split rack/pos %s" % s[0])
        return d

    @staticmethod
    def _cni_ipam(host_cidrv4: str, host_gateway: str):
        """
        see: https://github.com/containernetworking/cni/blob/master/SPEC.md#ip-allocation
        see: https://github.com/containernetworking/plugins/tree/master/plugins/ipam/host-local
        With the class variables provide a way to generate a static host-local ipam
        :param host_cidrv4: an host IP with its CIDR prefixlen, eg: '10.0.0.42/8'
        :param host_gateway: an host IP for the gateway, eg: '10.0.0.1'
        :return: dict
        """
        interface = IPv4Interface(host_cidrv4)
        subnet = interface.network

        try:
            assert 0 <= ConfigSyncSchedules.sub_ips <= 256
            assert (lambda x: x & (x - 1) == 0)(ConfigSyncSchedules.sub_ips)
        except AssertionError:
            raise ValueError(
                'sub_ips must be a power of two, in [0, 256] interval')

        if ConfigSyncSchedules.sub_ips > 0:
            ip_last_decimal_field = int(str(interface.ip).split('.')[-1])
            interface = IPv4Interface(interface.network.network_address +
                                      ip_last_decimal_field *
                                      ConfigSyncSchedules.sub_ips)

        range_start = interface.ip + ConfigSyncSchedules.skip_ips
        range_end = range_start + ConfigSyncSchedules.range_nb_ips
        ipam = {
            "type":
            "host-local",
            "subnet":
            "%s" % (str(subnet)),
            "rangeStart":
            str(range_start),
            "rangeEnd":
            str(range_end),
            "gateway":
            host_gateway,
            "routes": [
                {
                    "dst": "%s/32" % EC.perennial_local_host_ip,
                    "gw": str(IPv4Interface(host_cidrv4).ip)
                },
                {
                    "dst": "0.0.0.0/0"
                },
            ],
            "dataDir":
            "/var/lib/cni/networks"
        }
        return ipam

    @staticmethod
    def get_extra_selectors(extra_selectors: dict):
        """
        Extra selectors are passed to Matchbox
        :param extra_selectors: dict
        :return:
        """
        if extra_selectors:
            if type(extra_selectors) is dict:
                logger.debug("extra selectors: %s" % extra_selectors)
                return extra_selectors

            logger.error("invalid extra selectors: %s" % extra_selectors)
            raise TypeError("%s %s is not type dict" %
                            (extra_selectors, type(extra_selectors)))

        logger.debug("no extra selectors")
        return {}

    @property
    def etcd_member_ip_list(self):
        return self._query_ip_list(schedulerv2.ScheduleRoles.etcd_member)

    @property
    def kubernetes_control_plane_ip_list(self):
        return self._query_ip_list(
            schedulerv2.ScheduleRoles.kubernetes_control_plane)

    @property
    def kubernetes_nodes_ip_list(self):
        return self._query_ip_list(schedulerv2.ScheduleRoles.kubernetes_node)

    @staticmethod
    def order_http_uri(ips: list, ec_value: int, secure=False):
        ips.sort()
        e = [
            "http{}://%s:%d".format("s" if secure else "") % (k, ec_value)
            for k in ips
        ]
        return e

    @staticmethod
    def order_etcd_named(ips: list, ec_value: int, secure=False):
        ips.sort()
        e = [
            "%s=http{}://%s:%d".format("s" if secure else "") %
            (k, k, ec_value) for k in ips
        ]
        return ",".join(e)

    @property
    def kubernetes_etcd_initial_cluster(self):
        return self.order_etcd_named(self.etcd_member_ip_list,
                                     EC.kubernetes_etcd_peer_port,
                                     secure=True)

    @property
    def vault_etcd_initial_cluster(self):
        return self.order_etcd_named(self.etcd_member_ip_list,
                                     EC.vault_etcd_peer_port,
                                     secure=True)

    @property
    def fleet_etcd_initial_cluster(self):
        return self.order_etcd_named(self.etcd_member_ip_list,
                                     EC.fleet_etcd_peer_port,
                                     secure=True)

    @property
    def kubernetes_etcd_member_client_uri_list(self):
        return self.order_http_uri(self.etcd_member_ip_list,
                                   EC.kubernetes_etcd_client_port,
                                   secure=True)

    @property
    def vault_etcd_member_client_uri_list(self):
        return self.order_http_uri(self.etcd_member_ip_list,
                                   EC.vault_etcd_client_port,
                                   secure=True)

    @property
    def fleet_etcd_member_client_uri_list(self):
        return self.order_http_uri(self.etcd_member_ip_list,
                                   EC.fleet_etcd_client_port,
                                   secure=True)

    @property
    def kubernetes_etcd_member_peer_uri_list(self):
        return self.order_http_uri(self.etcd_member_ip_list,
                                   EC.kubernetes_etcd_peer_port,
                                   secure=True)

    @property
    def vault_etcd_member_peer_uri_list(self):
        return self.order_http_uri(self.etcd_member_ip_list,
                                   EC.vault_etcd_peer_port,
                                   secure=True)

    @property
    def fleet_etcd_member_peer_uri_list(self):
        return self.order_http_uri(self.etcd_member_ip_list,
                                   EC.fleet_etcd_peer_port,
                                   secure=True)

    @property
    def kubernetes_control_plane(self):
        return self.order_http_uri(self.kubernetes_control_plane_ip_list,
                                   EC.kubernetes_apiserver_insecure_port)

    @staticmethod
    def compute_disks_size(disks: list):
        total_size_gb = 0
        if not disks:
            return "inMemory"

        for d in disks:
            total_size_gb += d["size-bytes"] >> 30
        ladder = list(EC.disks_ladder_gb.items())
        ladder.sort(key=lambda x: x[1])
        for k, v in ladder:
            if total_size_gb < v:
                return k

        return ladder[-1][0]

    def produce_matchbox_data(self,
                              marker: str,
                              i: int,
                              m: dict,
                              automatic_name: str,
                              update_extra_metadata=None):
        fqdn = automatic_name
        try:
            if m["fqdn"]:
                fqdn = m["fqdn"]
        except KeyError as e:
            logger.warning("%s for %s" % (e, m["mac"]))

        etc_hosts = [k for k in EC.etc_hosts]
        dns_attr = self.get_dns_attr(fqdn)
        etc_hosts.append("127.0.1.1 %s %s" % (fqdn, dns_attr["shortname"]))
        cni_attr = self._cni_ipam(m["cidrv4"], m["gateway"])
        extra_metadata = {
            "etc_hosts":
            etc_hosts,
            # Etcd
            "etcd_name":
            m["ipv4"],
            "kubernetes_etcd_initial_cluster":
            self.kubernetes_etcd_initial_cluster,
            "vault_etcd_initial_cluster":
            self.vault_etcd_initial_cluster,
            "fleet_etcd_initial_cluster":
            self.fleet_etcd_initial_cluster,
            "kubernetes_etcd_initial_advertise_peer_urls":
            "https://%s:%d" % (m["ipv4"], EC.kubernetes_etcd_peer_port),
            "vault_etcd_initial_advertise_peer_urls":
            "https://%s:%d" % (m["ipv4"], EC.vault_etcd_peer_port),
            "fleet_etcd_initial_advertise_peer_urls":
            "https://%s:%d" % (m["ipv4"], EC.fleet_etcd_peer_port),
            "kubernetes_etcd_member_client_uri_list":
            ",".join(self.kubernetes_etcd_member_client_uri_list),
            "vault_etcd_member_client_uri_list":
            ",".join(self.vault_etcd_member_client_uri_list),
            "fleet_etcd_member_client_uri_list":
            ",".join(self.fleet_etcd_member_client_uri_list),
            "kubernetes_etcd_data_dir":
            EC.kubernetes_etcd_data_dir,
            "vault_etcd_data_dir":
            EC.vault_etcd_data_dir,
            "fleet_etcd_data_dir":
            EC.fleet_etcd_data_dir,
            "kubernetes_etcd_client_port":
            EC.kubernetes_etcd_client_port,
            "vault_etcd_client_port":
            EC.vault_etcd_client_port,
            "fleet_etcd_client_port":
            EC.fleet_etcd_client_port,
            "kubernetes_etcd_advertise_client_urls":
            "https://%s:%d" % (m["ipv4"], EC.kubernetes_etcd_client_port),
            "vault_etcd_advertise_client_urls":
            "https://%s:%d" % (m["ipv4"], EC.vault_etcd_client_port),
            "fleet_etcd_advertise_client_urls":
            "https://%s:%d" % (m["ipv4"], EC.fleet_etcd_client_port),

            # Kubernetes
            "kubernetes_apiserver_insecure_port":
            EC.kubernetes_apiserver_insecure_port,
            "kubernetes_node_ip":
            "%s" % m["ipv4"],
            "kubernetes_node_name":
            "%s" % m["ipv4"] if fqdn == automatic_name else fqdn,
            "kubernetes_service_cluster_ip_range":
            EC.kubernetes_service_cluster_ip_range,

            # Vault are located with the etcd members
            "vault_ip_list":
            ",".join(self.etcd_member_ip_list),
            "vault_port":
            EC.vault_port,
            "kubelet_healthz_port":
            EC.kubelet_healthz_port,
            "etcd_member_kubernetes_control_plane_ip_list":
            ",".join(self.etcd_member_ip_list),
            "etcd_member_kubernetes_control_plane_ip":
            self.etcd_member_ip_list,
            "hyperkube_image_url":
            EC.hyperkube_image_url,
            "cephtools_image_url":
            EC.cephtools_image_url,
            # IPAM
            "cni":
            json.dumps(cni_attr, sort_keys=True),
            "network": {
                "cidrv4": m["cidrv4"],
                "gateway": m["gateway"],
                "ip": m["ipv4"],
                "subnet": cni_attr["subnet"],
                "perennial_host_ip": EC.perennial_local_host_ip,
                "ip_or_fqdn":
                fqdn if EC.sync_replace_ip_by_fqdn else m["ipv4"],
            },
            # host
            "hostname":
            dns_attr["shortname"],
            "dns_attr":
            dns_attr,
            "nameservers":
            " ".join(EC.nameservers),
            "ntp":
            " ".join(EC.ntp),
            "fallbackntp":
            " ".join(EC.fallbackntp),
            "vault_polling_sec":
            EC.vault_polling_sec,
            "lifecycle_update_polling_sec":
            EC.lifecycle_update_polling_sec,
            "disk_profile":
            self.compute_disks_size(m["disks"]),
        }
        selector = {"mac": m["mac"]}
        selector.update(self.get_extra_selectors(self.extra_selector))
        if update_extra_metadata:
            extra_metadata.update(update_extra_metadata)
        gen = generator.Generator(
            api_uri=self.api_uri,
            group_id="%s-%d" % (marker, i),  # one per machine
            profile_id=marker,  # link to ignition
            name=marker,
            ignition_id="%s.yaml" % self.ignition_dict[marker],
            matchbox_path=self.matchbox_path,
            selector=selector,
            extra_metadata=extra_metadata,
        )
        gen.dumps()

    def etcd_member_kubernetes_control_plane(self):
        marker = self.etcd_member_kubernetes_control_plane.__name__
        roles = schedulerv2.EtcdMemberKubernetesControlPlane.roles

        machine_roles = self._query_roles(*roles)
        for i, m in enumerate(machine_roles):
            update_md = {
                # Roles
                "roles":
                ",".join(roles),
                # Etcd Members
                "kubernetes_etcd_member_peer_uri_list":
                ",".join(self.kubernetes_etcd_member_peer_uri_list),
                "vault_etcd_member_peer_uri_list":
                ",".join(self.vault_etcd_member_peer_uri_list),
                "fleet_etcd_member_peer_uri_list":
                ",".join(self.fleet_etcd_member_peer_uri_list),
                "kubernetes_etcd_peer_port":
                EC.kubernetes_etcd_peer_port,
                "vault_etcd_peer_port":
                EC.vault_etcd_peer_port,
                "fleet_etcd_peer_port":
                EC.fleet_etcd_peer_port,

                # K8s Control Plane
                "kubernetes_apiserver_count":
                len(machine_roles),
                "kubernetes_apiserver_insecure_bind_address":
                EC.kubernetes_apiserver_insecure_bind_address,
            }
            self.produce_matchbox_data(
                marker=marker,
                i=i,
                m=m,
                automatic_name="cp-%d-%s" % (i, m["ipv4"].replace(".", "-")),
                update_extra_metadata=update_md,
            )
        logger.info("synced %d" % len(machine_roles))
        return len(machine_roles)

    def kubernetes_nodes(self):
        marker = self.kubernetes_nodes.__name__
        roles = schedulerv2.KubernetesNode.roles

        machine_roles = self._query_roles(*roles)
        for i, m in enumerate(machine_roles):
            update_md = {
                # Roles
                "roles": ",".join(roles),
            }
            self.produce_matchbox_data(
                marker=marker,
                i=i,
                m=m,
                automatic_name="no-%d-%s" % (i, m["ipv4"].replace(".", "-")),
                update_extra_metadata=update_md,
            )
        logger.info("synced %d" % len(machine_roles))
        return len(machine_roles)

    def notify(self):
        """
        TODO if we need to notify the API for any reason
        :return:
        """
        req = requests.post("%s/sync-notify" % self.api_uri)
        req.close()
        logger.debug("notified API")

    def apply(self, nb_try=2, seconds_sleep=0):
        logger.info("start syncing...")
        for i in range(nb_try):
            try:
                nb = self.etcd_member_kubernetes_control_plane()
                nb += self.kubernetes_nodes()
                self.notify()
                return nb
            except Exception as e:
                logger.error("fail to apply the sync %s %s" % (type(e), e))
                if i + 1 == nb_try:
                    raise

            logger.warning("retry %d/%d in %d s" %
                           (i + 1, nb_try, seconds_sleep))
            time.sleep(seconds_sleep)
        raise RuntimeError("fail to apply after %d try" % nb_try)

    def _query_roles(self, *roles):
        roles = "&".join(roles)
        url = "/scheduler/%s" % roles
        logger.debug("roles='%s'" % roles)
        data = self._cache_query.get(url)
        if data is None:
            # not in cache or evicted
            logger.debug("cache is empty for %s" % url)
            req = requests.get("%s%s" % (self.api_uri, url))
            data = json.loads(req.content.decode())
            req.close()
            data.sort(key=lambda k: k["mac"])
            self._cache_query.set(url, data)
        return data

    def _query_ip_list(self, role):
        logger.debug("role='%s'" % role)
        url = "/scheduler/ip-list/%s" % role
        data = self._cache_query.get(url)
        if data is None:
            # not in cache or evicted
            logger.debug("cache is empty for %s" % url)
            req = requests.get("%s%s" % (self.api_uri, url))
            data = json.loads(req.content.decode())
            req.close()
            data.sort()
            self._cache_query.set(url, data)
        return data
Ejemplo n.º 2
0
class Cache(object):
    """
    This class is used to control the cache objects.

    If TESTING is True it will use NullCache.
    """

    def __init__(self, app=None):
        self.cache = None

        if app is not None:
            self.init_app(app)
        else:
            self.app = None
            
        self._memoized = []

    def init_app(self, app):
        "This is used to initialize cache with your app object"

        app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300)
        app.config.setdefault('CACHE_THRESHOLD', 500)
        app.config.setdefault('CACHE_KEY_PREFIX', None)
        app.config.setdefault('CACHE_MEMCACHED_SERVERS', None)
        app.config.setdefault('CACHE_DIR', None)
        app.config.setdefault('CACHE_OPTIONS', None)
        app.config.setdefault('CACHE_ARGS', [])
        app.config.setdefault('CACHE_TYPE', 'null')

        self.app = app

        self._set_cache()

    def _set_cache(self):
        if self.app.config['TESTING']:
            self.cache = NullCache()
        else:
            import_me = self.app.config['CACHE_TYPE']
            if '.' not in import_me:
                import_me = 'flaskext.cache.backends.' + \
                            import_me
            
            cache_obj = import_string(import_me)
            cache_args = self.app.config['CACHE_ARGS'][:]
            cache_options = dict(default_timeout= \
                                 self.app.config['CACHE_DEFAULT_TIMEOUT'])
            
            if self.app.config['CACHE_OPTIONS']:
                cache_options.update(self.app.config['CACHE_OPTIONS'])
            
            self.cache = cache_obj(self.app, cache_args, cache_options)
            
            if not isinstance(self.cache, BaseCache):
                raise TypeError("Cache object must subclass "
                                "werkzeug.contrib.cache.BaseCache")

    def get(self, *args, **kwargs):
        "Proxy function for internal cache object."
        return self.cache.get(*args, **kwargs)

    def set(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.set(*args, **kwargs)

    def add(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.add(*args, **kwargs)

    def delete(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.delete(*args, **kwargs)

    def cached(self, timeout=None, key_prefix='view/%s', unless=None):
        """
        Decorator. Use this to cache a function. By default the cache key
        is `view/request.path`. You are able to use this decorator with any
        function by changing the `key_prefix`. If the token `%s` is located
        within the `key_prefix` then it will replace that with `request.path`

        Example::

            # An example view function
            @cache.cached(timeout=50)
            def big_foo():
                return big_bar_calc()

            # An example misc function to cache.
            @cache.cached(key_prefix='MyCachedList')
            def get_list():
                return [random.randrange(0, 1) for i in range(50000)]

        .. code-block:: pycon

            >>> my_list = get_list()
            
        .. note::
        
            You MUST have a request context to actually called any functions
            that are cached.

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param key_prefix: Default 'view/%(request.path)s'. Beginning key to .
                           use for the cache key.
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unless this callable is true.
                       This will bypass the caching entirely.
        """

        def decorator(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                #: Bypass the cache entirely.
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                if '%s' in key_prefix:
                    cache_key = key_prefix % request.path
                else:
                    cache_key = key_prefix
                cache_key = cache_key.encode('utf-8')
                
                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key, rv, timeout=timeout)
                return rv
            return decorated_function
        return decorator
        
    def get_memoize_names(self):
        """
        Returns all function names used for memoized functions.
        
        This *will* include multiple function names when the memoized function
        has been called with differing arguments.
        
        :return: set of function names
        """
        return set([item[0] for item in self._memoized])
        
    def get_memoize_keys(self):
        """
        Returns all cache_keys used for memoized functions.
        
        :return: list generator of cache_keys
        """    
        return [item[1] for item in self._memoized]

    def memoize(self, timeout=None):
        """
        Use this to cache the result of a function, taking its arguments into
        account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @cache.memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        """

        def memoize(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                cache_key = hashlib.md5()
                
                try:
                    updated = "{0}{1}{2}".format(f.__name__, args, kwargs)
                except AttributeError:
                    updated = "%s%s%s" % (f.__name__, args, kwargs)
                    
                cache_key.update(updated)
                cache_key = cache_key.digest().encode('base64')[:22]

                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key, rv, timeout=timeout)
                    self._memoized.append((f.__name__, cache_key))
                return rv
            return decorated_function
        return memoize
    
    def delete_memoized(self, fname, *args, **kwargs):
        """
        Deletes the specified functions caches, based by given parameters.
        If parameters are given, only the functions that were memoized with them
        will be erased. Otherwise all the versions of the caches will be deleted.
        
        Example::
        
            @cache.memoize(50)
            def random_func():
                return random.randrange(1, 50)

            @cache.memoize()
            def param_func(a, b):
                return a+b+random.randrange(1, 50)
            
        .. code-block:: pycon
        
            >>> random_func()
            43
            >>> random_func()
            43
            >>> cache.delete_memoized('random_func')
            >>> random_func()
            16
            >>> param_func(1, 2)
            32
            >>> param_func(1, 2)
            32
            >>> param_func(2, 2)
            47
            >>> cache.delete_memoized('param_func', 1, 2)
            >>> param_func(1, 2)
            13
            >>> param_func(2, 2)
            47

            
        :param fname: Name of the memoized function.
        :param \*args: A list of positional parameters used with memoized function.
        :param \**kwargs: A dict of named parameters used with memoized function.
        """
        def deletes(item):

            # If no parameters given, delete all memoized versions of the function
            if not args and not kwargs:
              if item[0] == fname:
                self.cache.delete(item[1])
                return True
              return False

            # Construct the cache key as in memoized function
            cache_key = hashlib.md5()
            try:
                updated = "{0}{1}{2}".format(fname, args, kwargs)
            except AttributeError:
                updated = "%s%s%s" % (fname, args, kwargs)
            cache_key.update(updated)
            cache_key = cache_key.digest().encode('base64')[:22]

            if item[1] == cache_key:
                self.cache.delete(item[1])
                return True
            return False
        
        self._memoized[:] = [x for x in self._memoized if not deletes(x)]
Ejemplo n.º 3
0
class Cache(object):
    """
    This class is used to control the cache objects.

    If TESTING is True it will use NullCache.
    """
    def __init__(self, app=None):
        self.cache = None

        if app is not None:
            self.init_app(app)
        else:
            self.app = None

        self._memoized = []

    def init_app(self, app):
        "This is used to initialize cache with your app object"

        app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300)
        app.config.setdefault('CACHE_THRESHOLD', 500)
        app.config.setdefault('CACHE_KEY_PREFIX', None)
        app.config.setdefault('CACHE_MEMCACHED_SERVERS', None)
        app.config.setdefault('CACHE_DIR', None)
        app.config.setdefault('CACHE_OPTIONS', None)
        app.config.setdefault('CACHE_ARGS', [])
        app.config.setdefault('CACHE_TYPE', 'null')

        self.app = app

        self._set_cache()

    def _set_cache(self):
        if self.app.config['TESTING']:
            self.cache = NullCache()
        else:
            import_me = self.app.config['CACHE_TYPE']
            if '.' not in import_me:
                import_me = 'flaskext.cache.backends.' + \
                            import_me

            cache_obj = import_string(import_me)
            cache_args = self.app.config['CACHE_ARGS'][:]
            cache_options = dict(default_timeout= \
                                 self.app.config['CACHE_DEFAULT_TIMEOUT'])

            if self.app.config['CACHE_OPTIONS']:
                cache_options.update(self.app.config['CACHE_OPTIONS'])

            self.cache = cache_obj(self.app, cache_args, cache_options)

            if not isinstance(self.cache, BaseCache):
                raise TypeError("Cache object must subclass "
                                "werkzeug.contrib.cache.BaseCache")

    def get(self, *args, **kwargs):
        "Proxy function for internal cache object."
        return self.cache.get(*args, **kwargs)

    def set(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.set(*args, **kwargs)

    def add(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.add(*args, **kwargs)

    def delete(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.delete(*args, **kwargs)

    def cached(self, timeout=None, key_prefix='view/%s', unless=None):
        """
        Decorator. Use this to cache a function. By default the cache key
        is `view/request.path`. You are able to use this decorator with any
        function by changing the `key_prefix`. If the token `%s` is located
        within the `key_prefix` then it will replace that with `request.path`

        Example::

            # An example view function
            @cache.cached(timeout=50)
            def big_foo():
                return big_bar_calc()

            # An example misc function to cache.
            @cache.cached(key_prefix='MyCachedList')
            def get_list():
                return [random.randrange(0, 1) for i in range(50000)]

            my_list = get_list()
            
        .. note::
        
            You MUST have a request context to actually called any functions
            that are cached.
            
        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it. These attributes are readable/writable.
            
                **uncached**
                    The original undecorated function
                
                **cache_timeout**
                    The cache timeout value for this function. For a custom value
                    to take affect, this must be set before the function is called.
                    
                **make_cache_key**
                    A function used in generating the cache_key used.

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param key_prefix: Default 'view/%(request.path)s'. Beginning key to .
                           use for the cache key.
                           
                           .. versionadded:: 0.3.4                           
                               Can optionally be a callable which takes no arguments
                               but returns a string that will be used as the cache_key.
                               
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unless this callable is true.
                       This will bypass the caching entirely.
        """
        def decorator(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                #: Bypass the cache entirely.
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                cache_key = decorated_function.make_cache_key(*args, **kwargs)

                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key,
                                   rv,
                                   timeout=decorated_function.cache_timeout)
                return rv

            def make_cache_key(*args, **kwargs):
                if '%s' in key_prefix:
                    cache_key = key_prefix % request.path
                elif callable(key_prefix):
                    cache_key = key_prefix()
                else:
                    cache_key = key_prefix

                cache_key = cache_key.encode('utf-8')

                return cache_key

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = make_cache_key

            return decorated_function

        return decorator

    def get_memoize_names(self):
        """
        Returns all function names used for memoized functions.
        
        This *will* include multiple function names when the memoized function
        has been called with differing arguments.
        
        :return: set of function names
        """
        return set([item[0] for item in self._memoized])

    def get_memoize_keys(self):
        """
        Returns all cache_keys used for memoized functions.
        
        :return: list generator of cache_keys
        """
        return [item[1] for item in self._memoized]

    def memoize(self, timeout=None):
        """
        Use this to cache the result of a function, taking its arguments into
        account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @cache.memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753
            
        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it. These attributes are readable/writable.
            
                **uncached**
                    The original undecorated function
                
                **cache_timeout**
                    The cache timeout value for this function. For a custom value
                    to take affect, this must be set before the function is called.
                    
                **make_cache_key**
                    A function used in generating the cache_key used.

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        """
        def memoize(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                cache_key = decorated_function.make_cache_key(*args, **kwargs)

                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key,
                                   rv,
                                   timeout=decorated_function.cache_timeout)
                    self._memoized.append((f.__name__, cache_key))
                return rv

            def make_cache_key(*args, **kwargs):
                cache_key = hashlib.md5()

                try:
                    updated = "{0}{1}{2}".format(f.__name__, args, kwargs)
                except AttributeError:
                    updated = "%s%s%s" % (f.__name__, args, kwargs)

                cache_key.update(updated)
                cache_key = cache_key.digest().encode('base64')[:22]

                return cache_key

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = make_cache_key

            return decorated_function

        return memoize

    def delete_memoized(self, fname, *args, **kwargs):
        """
        Deletes the specified functions caches, based by given parameters.
        If parameters are given, only the functions that were memoized with them
        will be erased. Otherwise all the versions of the caches will be deleted.
        
        Example::
        
            @cache.memoize(50)
            def random_func():
                return random.randrange(1, 50)

            @cache.memoize()
            def param_func(a, b):
                return a+b+random.randrange(1, 50)
            
        .. code-block:: pycon
        
            >>> random_func()
            43
            >>> random_func()
            43
            >>> cache.delete_memoized('random_func')
            >>> random_func()
            16
            >>> param_func(1, 2)
            32
            >>> param_func(1, 2)
            32
            >>> param_func(2, 2)
            47
            >>> cache.delete_memoized('param_func', 1, 2)
            >>> param_func(1, 2)
            13
            >>> param_func(2, 2)
            47

            
        :param fname: Name of the memoized function.
        :param \*args: A list of positional parameters used with memoized function.
        :param \**kwargs: A dict of named parameters used with memoized function.
        """
        def deletes(item):

            # If no parameters given, delete all memoized versions of the function
            if not args and not kwargs:
                if item[0] == fname:
                    self.cache.delete(item[1])
                    return True
                return False

            # Construct the cache key as in memoized function
            cache_key = hashlib.md5()
            try:
                updated = "{0}{1}{2}".format(fname, args, kwargs)
            except AttributeError:
                updated = "%s%s%s" % (fname, args, kwargs)
            cache_key.update(updated)
            cache_key = cache_key.digest().encode('base64')[:22]

            if item[1] == cache_key:
                self.cache.delete(item[1])
                return True
            return False

        self._memoized[:] = [x for x in self._memoized if not deletes(x)]
Ejemplo n.º 4
0
class Cache(object):
    """
    This class is used to control the cache objects.

    If TESTING is True it will use NullCache.
    """
    def __init__(self, app=None, with_jinja2_ext=True):
        self.with_jinja2_ext = with_jinja2_ext

        self.cache = None

        if app is not None:
            self.init_app(app)
        else:
            self.app = None

        self._memoized = []

    def init_app(self, app):
        "This is used to initialize cache with your app object"

        app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300)
        app.config.setdefault('CACHE_THRESHOLD', 500)
        app.config.setdefault('CACHE_KEY_PREFIX', None)
        app.config.setdefault('CACHE_MEMCACHED_SERVERS', None)
        app.config.setdefault('CACHE_DIR', None)
        app.config.setdefault('CACHE_OPTIONS', None)
        app.config.setdefault('CACHE_ARGS', [])
        app.config.setdefault('CACHE_TYPE', 'null')

        if self.with_jinja2_ext:
            setattr(app.jinja_env, JINJA_CACHE_ATTR_NAME, self)

            app.jinja_env.add_extension(CacheExtension)

        self.app = app

        self._set_cache()

    def _set_cache(self):
        if self.app.config['TESTING']:
            self.cache = NullCache()
        else:
            import_me = self.app.config['CACHE_TYPE']
            if '.' not in import_me:
                import_me = 'flaskext.cache.backends.' + \
                            import_me

            cache_obj = import_string(import_me)
            cache_args = self.app.config['CACHE_ARGS'][:]
            cache_options = dict(default_timeout= \
                                 self.app.config['CACHE_DEFAULT_TIMEOUT'])

            if self.app.config['CACHE_OPTIONS']:
                cache_options.update(self.app.config['CACHE_OPTIONS'])

            self.cache = cache_obj(self.app, cache_args, cache_options)

            if not isinstance(self.cache, BaseCache):
                raise TypeError("Cache object must subclass "
                                "werkzeug.contrib.cache.BaseCache")

    def get(self, *args, **kwargs):
        "Proxy function for internal cache object."
        return self.cache.get(*args, **kwargs)

    def set(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.set(*args, **kwargs)

    def add(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.add(*args, **kwargs)

    def delete(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.delete(*args, **kwargs)

    def delete_many(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.delete_many(*args, **kwargs)

    def cached(self, timeout=None, key_prefix='view/%s', unless=None):
        """
        Decorator. Use this to cache a function. By default the cache key
        is `view/request.path`. You are able to use this decorator with any
        function by changing the `key_prefix`. If the token `%s` is located
        within the `key_prefix` then it will replace that with `request.path`

        Example::

            # An example view function
            @cache.cached(timeout=50)
            def big_foo():
                return big_bar_calc()

            # An example misc function to cache.
            @cache.cached(key_prefix='MyCachedList')
            def get_list():
                return [random.randrange(0, 1) for i in range(50000)]

            my_list = get_list()

        .. note::

            You MUST have a request context to actually called any functions
            that are cached.

        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it. These attributes are readable/writable.

                **uncached**
                    The original undecorated function

                **cache_timeout**
                    The cache timeout value for this function. For a custom value
                    to take affect, this must be set before the function is called.

                **make_cache_key**
                    A function used in generating the cache_key used.

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param key_prefix: Default 'view/%(request.path)s'. Beginning key to .
                           use for the cache key.

                           .. versionadded:: 0.3.4
                               Can optionally be a callable which takes no arguments
                               but returns a string that will be used as the cache_key.

        :param unless: Default None. Cache will *always* execute the caching
                       facilities unless this callable is true.
                       This will bypass the caching entirely.
        """
        def decorator(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                #: Bypass the cache entirely.
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                cache_key = decorated_function.make_cache_key(*args, **kwargs)

                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key,
                                   rv,
                                   timeout=decorated_function.cache_timeout)
                return rv

            def make_cache_key(*args, **kwargs):
                if '%s' in key_prefix:
                    cache_key = key_prefix % request.path
                elif callable(key_prefix):
                    cache_key = key_prefix()
                else:
                    cache_key = key_prefix

                cache_key = cache_key.encode('utf-8')

                return cache_key

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = make_cache_key

            return decorated_function

        return decorator

    def _memvname(self, funcname):
        return funcname + '_memver'

    def memoize_make_version_hash(self):
        return uuid.uuid4().bytes.encode('base64')[:6]

    def memoize_make_cache_key(self, fname, make_name=None):
        """
        Function used to create the cache_key for memoized functions.
        """
        def make_cache_key(f, *args, **kwargs):
            version_key = self._memvname(fname)
            version_data = self.cache.get(version_key)

            if version_data is None:
                version_data = self.memoize_make_version_hash()
                self.cache.set(version_key, version_data)

            cache_key = hashlib.md5()

            #: this should have to be after version_data, so that it
            #: does not break the delete_memoized functionality.
            if callable(make_name):
                altfname = make_name(fname)
            else:
                altfname = fname

            if callable(f):
                args, kwargs = self.memoize_kwargs_to_args(f, *args, **kwargs)

            try:
                updated = "{0}{1}{2}".format(altfname, args, kwargs)
            except AttributeError:
                updated = "%s%s%s" % (altfname, args, kwargs)

            cache_key.update(updated)
            cache_key = cache_key.digest().encode('base64')[:16]
            cache_key += version_data

            return cache_key

        return make_cache_key

    def memoize_kwargs_to_args(self, f, *args, **kwargs):
        #: Inspect the arguments to the function
        #: This allows the memoization to be the same
        #: whether the function was called with
        #: 1, b=2 is equivilant to a=1, b=2, etc.
        new_args = []
        arg_num = 0
        m_args = inspect.getargspec(f)[0]

        for i in range(len(m_args)):
            if m_args[i] in kwargs:
                new_args.append(kwargs[m_args[i]])
            elif arg_num < len(args):
                new_args.append(args[arg_num])
                arg_num += 1

        return tuple(new_args), {}

    def memoize(self, timeout=None, make_name=None, unless=None):
        """
        Use this to cache the result of a function, taking its arguments into
        account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @cache.memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753

        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it.

                **uncached**
                    The original undecorated function. readable only

                **cache_timeout**
                    The cache timeout value for this function. For a custom value
                    to take affect, this must be set before the function is called.

                    readable and writable

                **make_cache_key**
                    A function used in generating the cache_key used.

                    readable and writable


        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param make_name: Default None. If set this is a function that accepts
                          a single argument, the function name, and returns a
                          new string to be used as the function name. If not set
                          then the function name is used.
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unelss this callable is true.
                       This will bypass the caching entirely.

        .. versionadded:: 0.5
            params ``make_name``, ``unless``
        """
        def memoize(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                #: bypass cache
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                cache_key = decorated_function.make_cache_key(
                    f, *args, **kwargs)

                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key,
                                   rv,
                                   timeout=decorated_function.cache_timeout)
                return rv

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = self.memoize_make_cache_key(
                f.__name__, make_name)

            return decorated_function

        return memoize

    def delete_memoized(self, fname, *args, **kwargs):
        """
        Deletes the specified functions caches, based by given parameters.
        If parameters are given, only the functions that were memoized with them
        will be erased. Otherwise all the versions of the caches will be deleted.

        Example::

            @cache.memoize(50)
            def random_func():
                return random.randrange(1, 50)

            @cache.memoize()
            def param_func(a, b):
                return a+b+random.randrange(1, 50)

        .. code-block:: pycon

            >>> random_func()
            43
            >>> random_func()
            43
            >>> cache.delete_memoized('random_func')
            >>> random_func()
            16
            >>> param_func(1, 2)
            32
            >>> param_func(1, 2)
            32
            >>> param_func(2, 2)
            47
            >>> cache.delete_memoized('param_func', 1, 2)
            >>> param_func(1, 2)
            13
            >>> param_func(2, 2)
            47


        :param fname: Name of the memoized function, or a reference to the function.
        :param \*args: A list of positional parameters used with memoized function.
        :param \**kwargs: A dict of named parameters used with memoized function.

        .. note::

            Flask-Cache uses inspect to order kwargs into positional args when
            the function is memoized. If you pass a function reference into ``fname``
            instead of the function name, Flask-Cache will be able to place
            the args/kwargs in the proper order, and delete the positional cache.

            However, if ``delete_memozied`` is just called with the name of the
            function, be sure to pass in potential arguments in the same order
            as defined in your function as args only, otherwise Flask-Cache
            will not be able to compute the same cache key.

        .. note::

            Flask-Cache maintains an internal random version hash for the function.
            Using delete_memoized will only swap out the version hash, causing
            the memoize function to recompute results and put them into another key.

            This leaves any computed caches for this memoized function within the
            caching backend.

            It is recommended to use a very high timeout with memoize if using
            this function, so that when the version has is swapped, the old cached
            results would eventually be reclaimed by the caching backend.
        """
        if callable(fname):
            assert hasattr(fname, 'uncached')
            f = fname.uncached
            _fname = f.__name__
        else:
            f = None
            _fname = fname

        if not args and not kwargs:
            version_key = self._memvname(fname)
            version_data = self.memoize_make_version_hash()
            self.cache.set(version_key, version_data)
        else:
            cache_key = self.memoize_make_cache_key(_fname)(f, *args, **kwargs)
            self.cache.delete(cache_key)
Ejemplo n.º 5
0
class Cache(object):
    """
    This class is used to control the cache objects.

    If TESTING is True it will use NullCache.
    """

    def __init__(self, app=None):
        self.cache = None

        if app is not None:
            self.init_app(app)
        else:
            self.app = None
            
        self._memoized = []

    def init_app(self, app):
        "This is used to initialize cache with your app object"

        app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300)
        app.config.setdefault('CACHE_THRESHOLD', 500)
        app.config.setdefault('CACHE_KEY_PREFIX', None)
        app.config.setdefault('CACHE_MEMCACHED_SERVERS', None)
        app.config.setdefault('CACHE_DIR', None)
        app.config.setdefault('CACHE_OPTIONS', None)
        app.config.setdefault('CACHE_ARGS', [])
        app.config.setdefault('CACHE_TYPE', 'null')

        self.app = app

        self._set_cache()

    def _set_cache(self):
        if self.app.config['TESTING']:
            self.cache = NullCache()
        else:
            import_me = self.app.config['CACHE_TYPE']
            if '.' not in import_me:
                import_me = 'flaskext.cache.backends.' + \
                            import_me
            
            cache_obj = import_string(import_me)
            cache_args = self.app.config['CACHE_ARGS'][:]
            cache_options = dict(default_timeout= \
                                 self.app.config['CACHE_DEFAULT_TIMEOUT'])
            
            if self.app.config['CACHE_OPTIONS']:
                cache_options.update(self.app.config['CACHE_OPTIONS'])
            
            self.cache = cache_obj(self.app, cache_args, cache_options)
            
            if not isinstance(self.cache, BaseCache):
                raise TypeError("Cache object must subclass "
                                "werkzeug.contrib.cache.BaseCache")

    def get(self, *args, **kwargs):
        "Proxy function for internal cache object."
        return self.cache.get(*args, **kwargs)

    def set(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.set(*args, **kwargs)

    def add(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.add(*args, **kwargs)

    def delete(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.delete(*args, **kwargs)

    def cached(self, timeout=None, key_prefix='view/%s', unless=None):
        """
        Decorator. Use this to cache a function. By default the cache key
        is `view/request.path`. You are able to use this decorator with any
        function by changing the `key_prefix`. If the token `%s` is located
        within the `key_prefix` then it will replace that with `request.path`

        Example::

            # An example view function
            @cache.cached(timeout=50)
            def big_foo():
                return big_bar_calc()

            # An example misc function to cache.
            @cache.cached(key_prefix='MyCachedList')
            def get_list():
                return [random.randrange(0, 1) for i in range(50000)]

        .. code-block:: pycon

            >>> my_list = get_list()
            
        .. note::
        
            You MUST have a request context to actually called any functions
            that are cached.

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param key_prefix: Default 'view/%(request.path)s'. Beginning key to .
                           use for the cache key.
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unless this callable is true.
                       This will bypass the caching entirely.
        """

        def decorator(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                #: Bypass the cache entirely.
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                if '%s' in key_prefix:
                    cache_key = key_prefix % request.path
                else:
                    cache_key = key_prefix
                
                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key, rv, timeout=timeout)
                return rv
            return decorated_function
        return decorator

    def memoize(self, timeout=None):
        """
        Use this to cache the result of a function, taking its arguments into
        account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @cache.memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        """

        def memoize(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                cache_key = ('memoize', f.__name__, id(f), args, str(kwargs))
                                
                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key, rv, timeout=timeout)
                    if cache_key not in self._memoized:
                        self._memoized.append(cache_key)
                return rv
            return decorated_function
        return memoize
    
    def delete_memoized(self, *keys):
        """
        Deletes all of the cached functions that used Memoize for caching.
        
        Example::
        
            @cache.memoize(50)
            def random_func():
                return random.randrange(1, 50)
            
        .. code-block:: pycon
        
            >>> random_func()
            43
            >>> random_func()
            43
            >>> cache.delete_memoized('random_func')
            >>> random_func()
            16
            
        :param \*keys: A list of function names to clear from cache.
        """
        def deletes(item):
            if item[0] == 'memoize' and item[1] in keys:
                self.cache.delete(item)
                return True
            return False
        
        self._memoized[:] = [x for x in self._memoized if not deletes(x)]
Ejemplo n.º 6
0
class Cache(object):
    """
    This class is used to control the cache objects.

    If TESTING is True it will use NullCache.
    """

    def __init__(self, app=None, with_jinja2_ext=True):
        self.with_jinja2_ext = with_jinja2_ext

        self.cache = None

        if app is not None:
            self.init_app(app)
        else:
            self.app = None

        self._memoized = []

    def init_app(self, app):
        "This is used to initialize cache with your app object"

        app.config.setdefault('CACHE_DEFAULT_TIMEOUT', 300)
        app.config.setdefault('CACHE_THRESHOLD', 500)
        app.config.setdefault('CACHE_KEY_PREFIX', None)
        app.config.setdefault('CACHE_MEMCACHED_SERVERS', None)
        app.config.setdefault('CACHE_DIR', None)
        app.config.setdefault('CACHE_OPTIONS', None)
        app.config.setdefault('CACHE_ARGS', [])
        app.config.setdefault('CACHE_TYPE', 'null')

        if self.with_jinja2_ext:
            setattr(app.jinja_env, JINJA_CACHE_ATTR_NAME, self)

            app.jinja_env.add_extension(CacheExtension)

        self.app = app

        self._set_cache()

    def _set_cache(self):
        if self.app.config['TESTING']:
            self.cache = NullCache()
        else:
            import_me = self.app.config['CACHE_TYPE']
            if '.' not in import_me:
                import_me = 'flaskext.cache.backends.' + \
                            import_me

            cache_obj = import_string(import_me)
            cache_args = self.app.config['CACHE_ARGS'][:]
            cache_options = dict(default_timeout= \
                                 self.app.config['CACHE_DEFAULT_TIMEOUT'])

            if self.app.config['CACHE_OPTIONS']:
                cache_options.update(self.app.config['CACHE_OPTIONS'])

            self.cache = cache_obj(self.app, cache_args, cache_options)

            if not isinstance(self.cache, BaseCache):
                raise TypeError("Cache object must subclass "
                                "werkzeug.contrib.cache.BaseCache")

    def get(self, *args, **kwargs):
        "Proxy function for internal cache object."
        return self.cache.get(*args, **kwargs)

    def set(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.set(*args, **kwargs)

    def add(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.add(*args, **kwargs)

    def delete(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.delete(*args, **kwargs)

    def delete_many(self, *args, **kwargs):
        "Proxy function for internal cache object."
        self.cache.delete_many(*args, **kwargs)

    def cached(self, timeout=None, key_prefix='view/%s', unless=None):
        """
        Decorator. Use this to cache a function. By default the cache key
        is `view/request.path`. You are able to use this decorator with any
        function by changing the `key_prefix`. If the token `%s` is located
        within the `key_prefix` then it will replace that with `request.path`

        Example::

            # An example view function
            @cache.cached(timeout=50)
            def big_foo():
                return big_bar_calc()

            # An example misc function to cache.
            @cache.cached(key_prefix='MyCachedList')
            def get_list():
                return [random.randrange(0, 1) for i in range(50000)]

            my_list = get_list()

        .. note::

            You MUST have a request context to actually called any functions
            that are cached.

        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it. These attributes are readable/writable.

                **uncached**
                    The original undecorated function

                **cache_timeout**
                    The cache timeout value for this function. For a custom value
                    to take affect, this must be set before the function is called.

                **make_cache_key**
                    A function used in generating the cache_key used.

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param key_prefix: Default 'view/%(request.path)s'. Beginning key to .
                           use for the cache key.

                           .. versionadded:: 0.3.4
                               Can optionally be a callable which takes no arguments
                               but returns a string that will be used as the cache_key.

        :param unless: Default None. Cache will *always* execute the caching
                       facilities unless this callable is true.
                       This will bypass the caching entirely.
        """

        def decorator(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                #: Bypass the cache entirely.
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                cache_key = decorated_function.make_cache_key(*args, **kwargs)

                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key, rv,
                                   timeout=decorated_function.cache_timeout)
                return rv

            def make_cache_key(*args, **kwargs):
                if callable(key_prefix):
                    cache_key = key_prefix()
                elif '%s' in key_prefix:
                    cache_key = key_prefix % request.path
                else:
                    cache_key = key_prefix

                cache_key = cache_key.encode('utf-8')

                return cache_key

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = make_cache_key

            return decorated_function
        return decorator

    def _memvname(self, funcname):
        return funcname + '_memver'

    def memoize_make_version_hash(self):
        return uuid.uuid4().bytes.encode('base64')[:6]

    def memoize_make_cache_key(self, fname, make_name=None):
        """
        Function used to create the cache_key for memoized functions.
        """
        def make_cache_key(f, *args, **kwargs):
            version_key = self._memvname(fname)
            version_data = self.cache.get(version_key)

            if version_data is None:
                version_data = self.memoize_make_version_hash()
                self.cache.set(version_key, version_data)

            cache_key = hashlib.md5()

            #: this should have to be after version_data, so that it
            #: does not break the delete_memoized functionality.
            if callable(make_name):
                altfname = make_name(fname)
            else:
                altfname = fname

            if callable(f):
                args, kwargs = self.memoize_kwargs_to_args(f, *args, **kwargs)

            try:
                updated = "{0}{1}{2}".format(altfname, args, kwargs)
            except AttributeError:
                updated = "%s%s%s" % (altfname, args, kwargs)

            cache_key.update(updated)
            cache_key = cache_key.digest().encode('base64')[:16]
            cache_key += version_data

            return cache_key
        return make_cache_key

    def memoize_kwargs_to_args(self, f, *args, **kwargs):
        #: Inspect the arguments to the function
        #: This allows the memoization to be the same
        #: whether the function was called with
        #: 1, b=2 is equivilant to a=1, b=2, etc.
        new_args = []
        arg_num = 0
        m_args = inspect.getargspec(f)[0]

        for i in range(len(m_args)):
            if m_args[i] in kwargs:
                new_args.append(kwargs[m_args[i]])
            elif arg_num < len(args):
                new_args.append(args[arg_num])
                arg_num += 1

        return tuple(new_args), {}

    def memoize(self, timeout=None, make_name=None, unless=None):
        """
        Use this to cache the result of a function, taking its arguments into
        account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @cache.memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753

        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it.

                **uncached**
                    The original undecorated function. readable only

                **cache_timeout**
                    The cache timeout value for this function. For a custom value
                    to take affect, this must be set before the function is called.

                    readable and writable

                **make_cache_key**
                    A function used in generating the cache_key used.

                    readable and writable


        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param make_name: Default None. If set this is a function that accepts
                          a single argument, the function name, and returns a
                          new string to be used as the function name. If not set
                          then the function name is used.
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unelss this callable is true.
                       This will bypass the caching entirely.

        .. versionadded:: 0.5
            params ``make_name``, ``unless``
        """

        def memoize(f):
            @wraps(f)
            def decorated_function(*args, **kwargs):
                #: bypass cache
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                cache_key = decorated_function.make_cache_key(f, *args, **kwargs)

                rv = self.cache.get(cache_key)
                if rv is None:
                    rv = f(*args, **kwargs)
                    self.cache.set(cache_key, rv,
                                   timeout=decorated_function.cache_timeout)
                return rv

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = self.memoize_make_cache_key(f.__name__,
                                                                            make_name)

            return decorated_function
        return memoize

    def delete_memoized(self, fname, *args, **kwargs):
        """
        Deletes the specified functions caches, based by given parameters.
        If parameters are given, only the functions that were memoized with them
        will be erased. Otherwise all the versions of the caches will be deleted.

        Example::

            @cache.memoize(50)
            def random_func():
                return random.randrange(1, 50)

            @cache.memoize()
            def param_func(a, b):
                return a+b+random.randrange(1, 50)

        .. code-block:: pycon

            >>> random_func()
            43
            >>> random_func()
            43
            >>> cache.delete_memoized('random_func')
            >>> random_func()
            16
            >>> param_func(1, 2)
            32
            >>> param_func(1, 2)
            32
            >>> param_func(2, 2)
            47
            >>> cache.delete_memoized('param_func', 1, 2)
            >>> param_func(1, 2)
            13
            >>> param_func(2, 2)
            47


        :param fname: Name of the memoized function, or a reference to the function.
        :param \*args: A list of positional parameters used with memoized function.
        :param \**kwargs: A dict of named parameters used with memoized function.

        .. note::

            Flask-Cache uses inspect to order kwargs into positional args when
            the function is memoized. If you pass a function reference into ``fname``
            instead of the function name, Flask-Cache will be able to place
            the args/kwargs in the proper order, and delete the positional cache.

            However, if ``delete_memozied`` is just called with the name of the
            function, be sure to pass in potential arguments in the same order
            as defined in your function as args only, otherwise Flask-Cache
            will not be able to compute the same cache key.

        .. note::

            Flask-Cache maintains an internal random version hash for the function.
            Using delete_memoized will only swap out the version hash, causing
            the memoize function to recompute results and put them into another key.

            This leaves any computed caches for this memoized function within the
            caching backend.

            It is recommended to use a very high timeout with memoize if using
            this function, so that when the version has is swapped, the old cached
            results would eventually be reclaimed by the caching backend.
        """
        if callable(fname):
            assert hasattr(fname, 'uncached')
            f = fname.uncached
            _fname = f.__name__
        else:
            f = None
            _fname = fname

        if not args and not kwargs:
            version_key = self._memvname(fname)
            version_data = self.memoize_make_version_hash()
            self.cache.set(version_key, version_data)
        else:
            cache_key = self.memoize_make_cache_key(_fname)(f, *args, **kwargs)
            self.cache.delete(cache_key)
Ejemplo n.º 7
0
class Provider(object):
    def __init__(self, config):
        self._config = config
        self.cache = NullCache()
        self.statsd = NullStats()

    attribution = Attribution(
        licence_name="Open Government Licence",
        licence_url="http://www.nationalarchives.gov.uk/doc/open-government-licence/",
        attribution_text="Contains public sector information provided by the Met Office",
    )

    def latest_observations(self):
        response = self.cache.get(self._CACHE_KEY.format(self._config["location_id"]))

        if not response:
            self.statsd.incr(__name__ + ".cache_miss")
            with self.statsd.timer(__name__ + ".request_time"):
                response = self._make_request()

            max_age = parse_cache_control_header(response.info().getparam("Cache-Control")).max_age

            response = json.load(response)
            self.cache.set(self._CACHE_KEY.format(self._config["location_id"]), response, max_age)
        else:
            self.statsd.incr(__name__ + ".cache_hit")

        source_period = response["SiteRep"]["DV"]["Location"]["Period"]
        if isinstance(source_period, list):
            source_period = source_period[-1]
        source_observation = source_period["Rep"][-1]
        minutes_since_midnight = timedelta(minutes=int(source_observation["$"]))
        obs_time = datetime(*time.strptime(source_period["value"], "%Y-%m-%dZ")[:6], tzinfo=utc)
        obs_time += minutes_since_midnight

        weather_type, weather_type_id = self.WEATHER_TYPES.get(source_observation["W"])

        return {
            "type": weather_type,
            "type_id": weather_type_id,
            "temperature": u"{} °C".format(source_observation["T"]),
            "wind_speed": "{} mph".format(source_observation["S"]),
            "gust_speed": "{} mph".format(source_observation["G"]) if "G" in source_observation else "N/A",
            "wind_direction": source_observation["D"],
            "pressure": "{} mb".format(source_observation["P"]),
            "obs_location": capwords(response["SiteRep"]["DV"]["Location"]["name"]),
            "obs_time": obs_time.isoformat(),
        }

    def _make_request(self):
        return urlopen(
            "http://datapoint.metoffice.gov.uk/public/data/val/wxobs/all"
            + "/json/{location_id}?res=hourly&key={api_key}".format(**self._config)
        )

    _CACHE_KEY = "weather/metoffice/{}"

    WEATHER_TYPES = {
        "NA": (_("Not available"), ""),
        "0": (_("Clear night"), "clear_night"),
        "1": (_("Sunny day"), "sun"),
        "2": (_("Partly cloudy"), "cloud"),
        "3": (_("Partly cloudy"), "cloud"),
        "5": (_("Mist"), "fog"),
        "6": (_("Fog"), "fog"),
        "7": (_("Cloudy"), "cloud"),
        "8": (_("Overcast"), "cloud"),
        "9": (_("Light rain shower"), "rain"),
        "10": (_("Light rain shower"), "rain"),
        "11": (_("Drizzle"), "rain"),
        "12": (_("Light rain"), "rain"),
        "13": (_("Heavy rain shower"), "rain"),
        "14": (_("Heavy rain shower"), "rain"),
        "15": (_("Heavy rain"), "rain"),
        "16": (_("Sleet shower"), "rain"),
        "17": (_("Sleet shower"), "rain"),
        "18": (_("Sleet"), "rain"),
        "19": (_("Hail shower"), "rain"),
        "20": (_("Hail shower"), "rain"),
        "21": (_("Hail"), "rain"),
        "22": (_("Light snow shower"), "snow"),
        "23": (_("Light snow shower"), "snow"),
        "24": (_("Light snow"), "snow"),
        "25": (_("Heavy snow shower"), "snow"),
        "26": (_("Heavy snow shower"), "snow"),
        "27": (_("Heavy snow"), "snow"),
        "28": (_("Thunder shower"), "thunder"),
        "29": (_("Thunder shower"), "thunder"),
        "30": (_("Thunder"), "thunder"),
    }