def __init__(self):
     self.cache = dc.Cache('umlsCache')
Example #2
0
def cache():
    with dc.Cache() as cache:
        yield cache
    shutil.rmtree(cache.directory, ignore_errors=True)
Example #3
0
def test_disk_valueerror():
    with pytest.raises(ValueError):
        with dc.Cache(disk=dc.Disk('test')):
            pass
Example #4
0
# Obtain configuration dictionary.
config = get_config()

# Physical constants.
# TODO: Move physical constants to model definition.
water_density = 998.31  # [kg/m^3]
water_kinematic_viscosity = 1.3504e-6  # [m^2/s]
gravitational_acceleration = 9.81  # [m^2/s]

# Instantiate multiprocessing / parallel computing pool.
# - Pool is instantiated as None and only created on first use in `fledge.utils.starmap`.
parallel_pool = None

# Instantiate / reload cache.
if config['caching']['enable']:
    cache = diskcache.Cache(os.path.join(base_path, 'cache'))
    if config['caching']['reset_cache']:
        cache.clear()

# Modify matplotlib default settings.
plt.style.use(config['plots']['matplotlib_style'])
matplotlib.rc('axes',
              axisbelow=True)  # Ensure that axis grid is behind plot elements.
matplotlib.rc('figure', figsize=config['plots']['matplotlib_figure_size'])
matplotlib.rc('font', family=config['plots']['matplotlib_font_family'])
matplotlib.rc('image', cmap=config['plots']['matplotlib_colormap'])
matplotlib.rc(
    'pdf',
    fonttype=42)  # Avoid "Type 3 fonts" in PDFs for better compatibility.
matplotlib.rc('ps', fonttype=42)  # See: http://phyletica.org/matplotlib-fonts/
matplotlib.rc('savefig', format=config['plots']['file_format'])
Example #5
0
def memoized(func, *, size_limit=10**8, eviction_policy='least-recently-used', cache_dir=CACHE_DIR,
             typed=False, round_digits=15, ignore_args=None):
    """Persistent memoization function decorator with argument normalization and ignore list.

    :func: a callable object that is not a method
    :size_limit: (int, in bytes) approximate size limit of cache - default 100 MB
    :eviction_policy: rule to evict cache if size_limit is reached, any of
        diskcache.EVICTION_POLICY
    :cache_dir: location (directory path) of persistent cache files
    :typed: wheter to consider lists of identically valued arguments of different types as
        different arguments lists
    :round_digits: number of digits to round to, pass False to disable rounding
    :ignore_args: name or list of names of parameters to ignore
    :returns: a memoized version of function 'func'
    """
    func_hash = hashlib.md5(func.__code__.co_code).hexdigest()
    func_id = "{}.{:0>4s}".format(func.__qualname__, func_hash[-4:])
    cache_dir = os.path.join(cache_dir, func_id)
    func.cache = diskcache.Cache(cache_dir, size_limit=size_limit, eviction_policy=eviction_policy)
    func.async_results = {}

    atexit.register(func.cache.close)

    @atexit.register
    def consolidate_async():
        for key, result in func.async_results.items():
            try:
                if result.successful():
                    func.cache[dict(sorted(key))] = result.get()
            # Exception class changed in Python 3.7:
            # https://docs.python.org/3/library/multiprocessing.html#multiprocessing.pool.AsyncResult.successful
            except (AssertionError, ValueError):
                pass

    arg_names = inspect.getfullargspec(func).args
    if ignore_args is not None:
        ignore_args = frozenset([ignore_args] if isinstance(ignore_args, str) else ignore_args)
        assert all(arg in arg_names for arg in ignore_args), "Unknown argument name passed to 'ignore_args' option."

    @wraps(func)
    def wrapper(*args, **kwargs):
        key = kwargs.copy()
        key.update(zip(arg_names, args))
        if ignore_args is not None:
            key = {k: v for k, v in key.items() if k not in ignore_args}
        if not typed:
            key = {k: _normalize_type(v, round_digits) for k, v in key.items()}
        key = dict(sorted(key.items()))

        try:
            return func.cache[key]
        except KeyError:
            try:
                return func.async_results[tuple(key.items())]
            except KeyError:
                logging.debug("%s: cache miss on key %s", wrapper.__qualname__, repr(key))
                value = func(*args, **kwargs)
                if isinstance(value, pool.AsyncResult):
                    func.async_results[tuple(key.items())] = value
                else:
                    func.cache[key] = value
                return value

    return wrapper
Example #6
0
 def _set_creds(self, creds):
     with diskcache.Cache(directory=self._cachedir) as cache:
         cache[self._CREDS_STORAGE_KEY] = creds
     return creds
Example #7
0
from typing import Dict

import bs4
import diskcache
import requests

_CACHE = diskcache.Cache('.cache/html')


def fetch_static_soup(url: str,
                      params: Dict[str, str] = None) -> bs4.BeautifulSoup:
    html = _fetch_static(url, params)
    return bs4.BeautifulSoup(html, 'html.parser')


@_CACHE.memoize()
def _fetch_static(url: str, params: Dict[str, str] = None) -> str:
    return _fetch(url, params)


def _fetch(url: str, params: Dict[str, str] = None) -> str:
    response = requests.get(url, params=params)
    return response.content
Example #8
0
import diskcache as dc
cache = dc.Cache('stage_1_db')
crops = dc.Cache('stage_1_images')
from tqdm import tqdm
import glob, os
import cv2
size = 32
storage = "./images/"
os.chdir(storage)
step = 10
print(len(crops))
cached=set(list(cache))
for file in tqdm(glob.glob("*.*")):
    try:
        if not file in cached:
            img = cv2.imread(file)
            shape=img.shape
            h = shape[0]
            w = shape[1]
            for x in tqdm(range(0,w-size, step)):
                for y in range(0,h-size, step):
                    crop_img = img[y:y+size, x:x+size]
                    target = f"crop-{str(x)}-{str(y)}-{str(file)}"
                    crops[target]=crop_img
            cache[file]="OK"
    except KeyboardInterrupt:
        cache.close()
        break
    finally:
        pass
    
Example #9
0
import diskcache
import pickle
import hashlib

r = diskcache.Cache('tmp')


class Cache:
    CACHED_SECONDS = 768

    @classmethod
    def get(cls, path):
        if cls.has(path):
            return pickle.loads(r.get(cls._get_key(path)))
        return False

    @classmethod
    def has(cls, path):
        return r.get(cls._get_key(path)) is not None

    @classmethod
    def set(cls, path, entity, expire=CACHED_SECONDS):
        return r.set(cls._get_key(path), pickle.dumps(entity), expire)

    @classmethod
    def rem(cls, path):
        return r.delete(cls._get_key(path))

    @staticmethod
    def _get_key(path):
        return 'onelist:' + hashlib.md5(path.encode()).hexdigest()
Example #10
0
import feets

PATH = "/home/jbcabral/carpyncho3/correctors/"
import sys
sys.path.insert(0, PATH)

from parfeets import extract

# =============================================================================
# SOME VARS
# =============================================================================

host_name = socket.gethostname()

cache = diskcache.Cache("production_data/_cache" + host_name)

CPUS = joblib.cpu_count()

COLUMNS_TO_REMOVE = [
    'scls_h', 'scls_j', 'scls_k', "AndersonDarling", "AmplitudeJ",
    "AmplitudeH", "AmplitudeJH", "AmplitudeJK", 'Freq1_harmonics_rel_phase_0',
    'Freq2_harmonics_rel_phase_0', 'Freq3_harmonics_rel_phase_0', "CAR_mean",
    "CAR_tau", "CAR_sigma"
]

COLUMNS_NO_FEATURES = [
    'id', 'tile', 'cnt', 'ra_k', 'dec_k', 'vs_type', 'vs_catalog', 'cls'
]

TILES_BY_HOST = {
Example #11
0
 def __init__(self):
     self.cache = diskcache.Cache(Defaults.cache_dir)
Example #12
0
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
"""This is just a Test."""

import diskcache as dc

from enum import Enum

from .exceptions import DataverseError, MissingRequiredArgumentError, UnknownAPIEndpointError

__version__ = "0.2.0-dev"
_USER_AGENT = f"b4dataverse/{__version__}"

_cache = dc.Cache("tmp")


class APIEndpoint(Enum):
    EVE_ONLINE = "https://esi.evetech.net/latest"
    ZKILLBOARD = "https://zkillboard.com/api"


REQUEST_HEADERS = {"User-Agent": _USER_AGENT}
import numpy as np
import matplotlib as mpl
mpl.use('agg')
import matplotlib.pylab as plt
import matplotlib.cm as cm
import png
import io
import base64
import traceback

import diskcache as dc
cache = dc.Cache('tmp-cache')


def d_(x, n=10):
    d_filter = np.zeros(n)
    d_filter[0] = 1
    d_filter[-1] = -1
    R = np.convolve(x, d_filter, mode='same')

    R[:1] = 0
    R[-1:] = 0

    return R


def speed_estim_for_grade(grade, lut_speed_grade, peak=(0.1, 0.9), plot=False):

    i_grade = np.abs(grade - lut_speed_grade[2][:-1]).argmin()

    dp = np.copy(lut_speed_grade[0][:, i_grade])
 def __init__(self):
     self.cache = dc.Cache('snomedctCache')
Example #15
0
 def __init__(self, conn):
     self.cache = dc.Cache('tmp')
     self.conn = conn
     self.hits = 0
     self.total = 0
Example #16
0
import dash
import dash_labs as dl
import dash_bootstrap_components as dbc

from dash import html
from dash.long_callback import DiskcacheLongCallbackManager
from dash.dependencies import Input, Output

## Diskcache
import diskcache

cache = diskcache.Cache("./cache")
long_callback_manager = DiskcacheLongCallbackManager(cache)
import time

app = dash.Dash(__name__,
                plugins=[dl.plugins.pages],
                external_stylesheets=[dbc.themes.BOOTSTRAP])

navbar = dbc.NavbarSimple(
    dbc.DropdownMenu(
        [
            dbc.DropdownMenuItem(page["name"], href=page["path"])
            for page in dash.page_registry.values()
        ],
        nav=True,
        label="More Pages",
    ),
    brand="Multi Page App Plugin Demo",
    color="primary",
    dark=True,
Example #17
0
 def _get_creds(self):
     with diskcache.Cache(directory=self._cachedir) as cache:
         return cache.get(self._CREDS_STORAGE_KEY)
Example #18
0
def main():
    url = 'https://cods.colorado.edu/api/gpslidar/'
    msg_dict = {
        NavTimeUTC.id: NavTimeUTC,
        NavHPPOSLLH.id: NavHPPOSLLH,
        AckAck.id: AckAck,
        AckNak.id: AckNak,
        CfgValgetRec.id: CfgValgetRec,
        RxmRawx.id: RxmRawx,
        InfDebug.id: InfDebug,
        InfError.id: InfError,
        InfNotice.id: InfNotice,
        InfTest.id: InfTest,
        InfWarning.id: InfWarning
    }  # Dictionary of implemented packet formats

    def_loc = socket.gethostname()[0:4]

    # Parse arguments
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-c',
        '--comm',
        type=str,
        default="USB",
        help='Communication type ("USB" or "UART"). Default is "USB"')
    parser.add_argument(
        '-f',
        '--configfile',
        type=str,
        default='/home/ccaruser/ublox/default.ini',
        help='Location of configuration file to use. Default is "default.ini"')
    parser.add_argument(
        '-l',
        '--location',
        type=str,
        default=def_loc,
        help='GPS location. Default is first four letters of hostname (' +
        def_loc + ')')
    parser.add_argument('--led',
                        type=int,
                        default=21,
                        help='LED pin. Default is 21.')
    args = parser.parse_args()

    if args.comm == "USB":
        port = '/dev/serial/by-id/usb-u-blox_AG_-_www.u-blox.com_u-blox_GNSS_receiver-if00'  # Serial port  TODO: UART
    elif args.comm == "UART":
        port = '/dev/ttyS0'
    else:
        logging.critical("Bad communication type: " + args.comm)
        sys.exit(0)

    dev = serial.Serial(port,
                        timeout=5,
                        baudrate=38400,
                        parity=serial.PARITY_NONE,
                        stopbits=serial.STOPBITS_ONE,
                        bytesize=serial.EIGHTBITS)  # Open serial port

    # Configure
    config = ConfigParser(inline_comment_prefixes=('#', ';'))
    config.read(args.configfile)  # Read configuration packets to be sent

    packet = CfgValsetSend(config[args.comm])  # Create configuration packets

    # Write config packet
    wrtr = UBXWriter(dev, msg_dict)  # ublox writer
    wrtr.write_packet(packet.payload(), packet.id)  # Write ublox packets

    try:
        dev.baudrate = config[args.comm][
            'CFG-UART1-BAUDRATE']  # Set baud rate to desired rate in configuration file
    except KeyError:  # If there is no baud rate in configuration file
        pass

    # Read packets
    loc = args.location
    key = read_key('/home/ccaruser/.keys/' + loc +
                   '.key')  # Private key for sending
    led = LED(args.led)  # LED class initialization
    led.set_high()  # Turn on LED

    next_raw, next_pos = [], []

    leapS = None
    week = None

    cache_raw = dc.Cache('/var/tmp/unsent_gpsraw')
    cache_pos = dc.Cache('/var/tmp/unsent_gpspos')

    # Send old data
    t2 = Thread(target=send_old, args=(cache_raw, url + 'rawgps/' + loc, key))
    t2.start()
    t3 = Thread(target=send_old, args=(cache_pos, url + 'posgps/' + loc, key))
    t3.start()

    logging.info('Starting ' + loc + ' GPS at: ' + str(dt.datetime.utcnow()))

    try:
        while True:
            led_timer = dt.datetime.utcnow()
            raw, hp_pos = next_raw, next_pos  # Initialization of vectors
            next_raw, next_pos = [], []
            prev_raw, prev_pos = 0, 0
            while True:
                rdr = UBXReader(dev, msg_dict)  # Initialize reader
                packet = rdr.read_packet()  # Read packet
                if isinstance(packet, RxmRawx):  # If raw gps position packet
                    mod_raw = (packet.rcvTow - packet.leapS) % 60
                    if mod_raw >= prev_raw:
                        raw.append(packet)
                        week = packet.week
                        leapS = packet.leapS
                        prev_raw = mod_raw
                    else:
                        next_raw.append(packet)
                        break
                elif isinstance(
                        packet, NavHPPOSLLH
                ) and leapS:  # If high precision gps position packet
                    mod_pos = ((packet.iTOW / 1000) - leapS) % 60
                    if mod_pos >= prev_pos:
                        hp_pos.append(packet)
                        prev_pos = mod_pos
                    else:
                        next_pos.append(packet)
                        break
                elif isinstance(packet, NavTimeUTC):  # If time packet
                    #if packet.nano < 0:
                    #    time = dt.datetime(packet.year, packet.month, packet.day, packet.hour, packet.min,
                    #                       packet.sec, -packet.nano // 10**3)
                    #else:
                    #    time = dt.datetime(packet.year, packet.month, packet.day, packet.hour, packet.min,
                    #                       packet.sec, packet.nano // 10**3)
                    #cmd = 'sudo date -s "' + time.strftime('%Y-%m-%d %H:%M:%S') + 'UTC"'
                    #logging.info(cmd)
                    #os.system(cmd)
                    pass
                else:
                    pass
                if (dt.datetime.utcnow() - led_timer
                    ).total_seconds() >= 1:  # Switch led every second
                    led.switch()
                    led_timer = dt.datetime.utcnow()

            # Get packets to send and start threads to send packets through api
            if raw:
                p_raw = raw_packet(raw)
                if not t2.isAlive():
                    t2 = Thread(
                        target=call_send,
                        args=(url + 'rawgps/' + loc, key, p_raw,
                              (dt.datetime.utcnow() -
                               dt.datetime(1970, 1, 1)).total_seconds(),
                              cache_raw))
                    t2.start()
                else:
                    save_to_dc(cache_raw,
                               (dt.datetime.utcnow() -
                                dt.datetime(1970, 1, 1)).total_seconds(),
                               p_raw)

            if hp_pos and week and leapS and not t3.is_alive():
                p_pos = pos_packet(hp_pos, week, leapS)
                if not t3.isAlive():
                    t3 = Thread(
                        target=call_send,
                        args=(url + 'posgps/' + loc, key, p_pos,
                              (dt.datetime.utcnow() -
                               dt.datetime(1970, 1, 1)).total_seconds(),
                              cache_pos))
                    t3.start()
                else:
                    save_to_dc(cache_pos,
                               (dt.datetime.utcnow() -
                                dt.datetime(1970, 1, 1)).total_seconds(),
                               p_pos)

    finally:
        # At the end turn LED off
        led.set_low()
Example #19
0
 def clear_cached_credentials(self):
     with diskcache.Cache(directory=self._cachedir) as cache:
         cache.delete(self._CREDS_STORAGE_KEY)
Example #20
0
from collections import Counter
from requests import ConnectionError
from requests.exceptions import Timeout
from six.moves.urllib.parse import quote
import tripleoci.config as config
from tripleoci.config import log

try:
    from urlparse import urljoin
except ImportError:
    from urllib.parse import urljoin


requests.packages.urllib3.disable_warnings()

cache = diskcache.Cache(config.CACHE_DIR)
cache.expire()


class SSH(object):
    """SSH

        SSH class, just for any connection
    """

    def __init__(self,
                 host, port, user, timeout=None, key=None, key_path=None):
        self.ssh_cl = paramiko.SSHClient()
        self.ssh_cl.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        log.debug("Executing ssh {user}@{host}:{port}".format(
            user=user, host=host, port=port))
Example #21
0
 def __init__(self) -> None:
     self._cache = diskcache.Cache(CACHE_ROOT_DIR)
Example #22
0
def get_cache(settings: Settings = settings) -> diskcache.Cache:
    return diskcache.Cache(
        directory=str(settings.cache.dir),
        size_limit=settings.cache.size_limit_gb * 1e9
    )
Example #23
0
def load_with_cache(
    path_or_array: typing.Union[np.ndarray, io.IOBase, str, pathlib.Path],
    grayscale=False,
    use_cache=True,
    max_size=None,
    use_temp_dir=None,
) -> np.ndarray:
    """画像の読み込み。

    Args:
        path_or_array: 画像ファイルへのパス or npy/npzファイルへのパス or ndarray
        grascale: Trueならグレースケールで読み込み、FalseならRGB
        use_cache: 読み込み結果をdiskcacheライブラリでキャッシュするならTrue
        max_size: このサイズを超えるなら縮小する。int or tuple。tupleは(height, width)
        use_temp_dir: キャッシュを保存する場所。Noneだったらtempfile.gettempdir()の場所を使う
    Returns:
        読み込み結果のndarray。

    """
    max_size = tk.utils.normalize_tuple(max_size,
                                        2) if max_size is not None else None

    def _load():
        img = load(path_or_array, grayscale=grayscale)
        if max_size is not None and (img.shape[0] > max_size[0]
                                     or img.shape[1] > max_size[1]):
            r0 = max_size[0] / img.shape[0]
            r1 = max_size[1] / img.shape[1]
            r = min(r0, r1)
            img = resize(img, int(round(img.shape[1] * r)),
                         int(round(img.shape[0] * r)))
        return img

    if use_cache and isinstance(path_or_array, (str, pathlib.Path)):
        global _load_cache
        global _diskcache_load_failed
        if _load_cache is None and not _diskcache_load_failed:
            # 何回も呼び出されないかこれ?
            # マルチプロセスのときとか怖い
            temp_dir = tempfile.mkdtemp(
                suffix="pytoolkit",
                dir=use_temp_dir) if use_temp_dir is None else use_temp_dir
            try:
                import diskcache

                _load_cache = diskcache.Cache(temp_dir)
                atexit.register(_clear_cache, _load_cache)
            except BaseException:
                pathlib.Path(temp_dir).rmdir()
                _diskcache_load_failed = True
                tk.log.get(__name__).warning("diskcache load failed.",
                                             exc_info=True)
        if _load_cache is not None:
            key = f"{path_or_array}::{max_size}"
            img = _load_cache.get(key)
            if img is None:
                img = _load()
                _load_cache.set(key, img)
            return img

    return _load()
def storage_disk(request):
    client = diskcache.Cache('/tmp/ring-test/diskcache')
    return client, ring.disk
Example #25
0
def test_rsync():
    try:
        run(['rsync', '--version'])
    except OSError:
        return  # No rsync installed. Skip test.

    rsync_args = ['rsync', '-a', '--checksum', '--delete', '--stats']
    cache_dir1 = tempfile.mkdtemp() + os.sep
    cache_dir2 = tempfile.mkdtemp() + os.sep

    # Store some items in cache_dir1.

    with dc.Cache(cache_dir1) as cache1:
        for count in range(100):
            cache1[count] = str(count)

        for count in range(100, 200):
            cache1[count] = str(count) * int(1e5)

    # Rsync cache_dir1 to cache_dir2.

    run(rsync_args + [cache_dir1, cache_dir2])

    # Validate items in cache_dir2.

    with dc.Cache(cache_dir2) as cache2:
        for count in range(100):
            assert cache2[count] == str(count)

        for count in range(100, 200):
            assert cache2[count] == str(count) * int(1e5)

    # Store more items in cache_dir2.

    with dc.Cache(cache_dir2) as cache2:
        for count in range(200, 300):
            cache2[count] = str(count)

        for count in range(300, 400):
            cache2[count] = str(count) * int(1e5)

    # Rsync cache_dir2 to cache_dir1.

    run(rsync_args + [cache_dir2, cache_dir1])

    # Validate items in cache_dir1.

    with dc.Cache(cache_dir1) as cache1:
        for count in range(100):
            assert cache1[count] == str(count)

        for count in range(100, 200):
            assert cache1[count] == str(count) * int(1e5)

        for count in range(200, 300):
            assert cache1[count] == str(count)

        for count in range(300, 400):
            assert cache1[count] == str(count) * int(1e5)

    shutil.rmtree(cache_dir1, ignore_errors=True)
    shutil.rmtree(cache_dir2, ignore_errors=True)
Example #26
0
 def get_cache(self):
     if self.cache is None:
         self.cache_path.parent.mkdir(parents=True, exist_ok=True)
         self.cache = dc.Cache(self.cache_path)
     return self.cache
Example #27
0
def test_tag_index():
    with dc.Cache(tag_index=True) as cache:
        assert cache.tag_index == 1
    shutil.rmtree(cache.directory, ignore_errors=True)
import bson.json_util
import diskcache

# Populate cache defaults here instead of config.py because they are
# used in function decorators before the config.py is loaded into the
# Flask api environment
CACHE_DIRECTORY = os.getenv('CACHE_DIRECTORY', '/var/cache/app')
CACHE_SETTINGS = json.loads(os.getenv('CACHE_SETTINGS', '{}'))
CACHE_GET_DEFAULT = os.getenv("CACHE_GET_DEFAULT", '{"expire": 600}')
CACHE_GET_APPCONFIGS = json.loads(os.getenv("CACHE_GET_APPCONFIGS", CACHE_GET_DEFAULT))
CACHE_GET_APPCONFIG = json.loads(os.getenv("CACHE_GET_APPCONFIG", CACHE_GET_DEFAULT))

CACHE_SETTINGS.setdefault('size_limit', 3.5 * 1025 * 1024 * 1024)  # 3.5GB cache
cache = diskcache.Cache(
    directory=CACHE_DIRECTORY,
    **CACHE_SETTINGS,
)


def memoize(*args, **kwargs):
    """
    Basic memoize function that uses our cache and wraps memoize_stampede.
    It takes all the same arguments as memoize_stampede, except do not
    specify the cache argument.
    """

    def decorator(func):
        return diskcache.memoize_stampede(cache, *args, **kwargs)(func)

    return decorator
Example #29
0

def ping_diskcache(cachedb):
    ping = cachedb.get("ping")
    if not ping:
        res = cachedb.set("ping", "pong")
        if not res:
            return False
        ping = cachedb.get("ping")
    if ping != "pong":
        return False
    return True


print("[Migrate] Initializing connection...")
cachedb = diskcache.Cache(settings["diskcache_path"])
redisdb = redis.Redis(**settings["redis"])

print("[Migrate] Checking connection to both database...")
if not ping_diskcache(cachedb):
    print("[Migrate:diskcache] Failed to ping diskcache server, exiting...")
    sys.exit(1)
try:
    test = redisdb.get("ping")
except Exception:
    print("[Migrate:redis] Failed to ping redis server, exiting...")
    sys.exit(1)

print("[Migrate:diskcache] Fetching all keys and old data...")
all_old_keys = list(cachedb.iterkeys())
print(all_old_keys)
Example #30
0
 def _init_cache(self):
     self.cachedir = tempfile.mkdtemp(prefix="wtdm")
     self.cache = CacheWrapper(diskcache.Cache(self.cachedir))