Exemple #1
0
def load_result(key):
    bucket = storage_client.bucket(local_config.CLOUD_STORAGE_BUCKET_NAME)
    blob = bucket.blob(key)
    try:
        return blob.download_as_string().decode()
    except exceptions.NotFound:
        return cache_buddy.CacheMiss(key)
Exemple #2
0
def load_document(key: str):
    fname = os.path.join(DOC_CACHE_SUBDIR, key)
    try:
        return json.load(open(fname))
    except FileNotFoundError:
        raise cache_buddy.CacheMiss(key)
    except json.decoder.JSONDecodeError:
        raise ValueError("Error decoding document cache JSON data" + key)
Exemple #3
0
def load_author(key: str):
    fname = os.path.join(AUTHOR_CACHE_SUBDIR, key)
    try:
        return json.load(open(fname))
    except FileNotFoundError:
        if key in _author_cache_contents:
            refresh()
        raise cache_buddy.CacheMiss(key)
    except json.decoder.JSONDecodeError:
        raise ValueError("Error decoding author cache JSON data" + key)
Exemple #4
0
def load_document(key: str):
    doc_ref = db.collection(DOC_CACHE_COLLECTION).document(key)
    data = doc_ref.get()
    if data.exists:
        data = data.to_dict()
        try:
            del data['shard']
        except KeyError:
            pass
        return data
    raise cache_buddy.CacheMiss(key)
Exemple #5
0
def load_author(key: str):
    try:
        data = _author_data_cache[key]
        del _author_data_cache[key]
        return _decompress_record(data.to_dict())
    except KeyError:
        pass
    doc_ref = db.collection(AUTHOR_CACHE_COLLECTION).document(key)
    data = doc_ref.get()
    if data.exists:
        return _decompress_record(data.to_dict())
    else:
        raise cache_buddy.CacheMiss(key)
Exemple #6
0
def load_authors(keys: [str]):
    """Does _not_ return author records in the order of the input names"""
    not_in_local_cache = []
    result = []
    for key in keys:
        try:
            result.append(_author_data_cache[key].to_dict())
            del _author_data_cache[key]
        except KeyError:
            not_in_local_cache.append(key)
    
    if len(not_in_local_cache):
        doc_refs = [db.collection(AUTHOR_CACHE_COLLECTION).document(key)
                    for key in keys]
        data = db.get_all(doc_refs)
        for datum in data:
            if not datum.exists:
                raise cache_buddy.CacheMiss(datum.id)
            result.append(datum.to_dict())
    return [_decompress_record(r) for r in result]
Exemple #7
0
def load_result(key):
    fname = os.path.join(RESULT_CACHE_SUBDIR, key)
    try:
        return open(fname).read()
    except FileNotFoundError:
        raise cache_buddy.CacheMiss(key)
Exemple #8
0
def load_progress_data(key: str):
    fname = os.path.join(PROGRESS_CACHE_SUBDIR, key)
    try:
        return json.load(open(fname))
    except FileNotFoundError:
        raise cache_buddy.CacheMiss(key)
Exemple #9
0
def load_progress_data(key: str):
    raise cache_buddy.CacheMiss(key)