Esempio n. 1
0
def shrinkChunk(layout,
                typesize,
                chunk_max=CHUNK_MAX,
                layout_class='H5D_CHUNKED'):
    """ Shrink the chunk shape until it is less than the MAX target.
    """
    layout = list(layout)
    chunk_size = getChunkSize(layout, typesize)
    if chunk_size <= chunk_max:
        return tuple(layout)  # good already
    rank = len(layout)

    while chunk_size > chunk_max:
        # just adjust along extendable dimensions first
        old_chunk_size = chunk_size
        for dim in range(rank):
            if layout[dim] > 1:
                layout[dim] //= 2
                chunk_size = getChunkSize(layout, typesize)
                if chunk_size <= chunk_max:
                    break
            else:
                pass  # can't shrink chunk along this dimension
        if chunk_size >= old_chunk_size:
            # reality check to see if we'll ever break out of the while loop
            log.warning("Unexpected error in shrink_chunk")
            break
        elif chunk_size <= chunk_max:
            break  # we're good
        else:
            pass  # do another round
    return tuple(layout)
Esempio n. 2
0
    def __delitem__(self, key):
        node = self._delNode(key)  # remove from LRU
        del self._hash[key]  # remove from hash
        # remove from LRU list

        self._mem_size -= node._mem_size
        if key in self._dirty_set:
            log.warning(f"LRU {self._name} removing dirty node: {key}")
            self._dirty_set.remove(key)
            self._dirty_size -= node._mem_size
Esempio n. 3
0
def getChunkIds(dset_id, selection, layout, dim=0, prefix=None, chunk_ids=None):
    """ Get the all the chunk ids for chunks that lie in the selection of the
    given dataset.
    """
    num_chunks = getNumChunks(selection, layout)
    if num_chunks == 0:
        return []  # empty list
    if prefix is None:
        # construct a prefix using "c-" with the uuid of the dset_id
        if not dset_id.startswith("d-"):
            msg = "Bad Request: invalid dset id: {}".format(dset_id)
            log.warning(msg)
            raise ValueError(msg)
        prefix = "c-" + dset_id[2:] + '_'
    rank = len(selection)
    if chunk_ids is None:
        chunk_ids = []
    s = selection[dim]
    c = layout[dim]

    if s.step > c:
        # chunks may not be contiguous,  skip along the selection and add
        # whatever chunks we land in
        for i in range(s.start, s.stop, s.step):
            chunk_index = i // c
            chunk_id = prefix + str(chunk_index)
            if dim + 1 == rank:
                # we've gone through all the dimensions, add this id to the list
                chunk_ids.append(chunk_id)
            else:
                chunk_id += '_'  # seperator between dimensions
                # recursive call
                getChunkIds(dset_id, selection, layout, dim+1, chunk_id, chunk_ids)
    else:
        # get a contiguous set of chunks along the selection
        if s.step > 1:
            num_points = frac((s.stop-s.start), s.step)
            w = num_points * s.step - (s.step - 1)
        else:
            w = s.stop - s.start # selection width (>0)

        chunk_index_start = s.start // c
        chunk_index_end = frac((s.start + w), c)

        for i in range(chunk_index_start, chunk_index_end):
            chunk_id = prefix + str(i)
            if dim + 1 == rank:
                # we've gone through all the dimensions, add this id to the list
                chunk_ids.append(chunk_id)
            else:
                chunk_id += '_'  # seperator between dimensions
                # recursive call
                getChunkIds(dset_id, selection, layout, dim+1, chunk_id, chunk_ids)
    # got the complete list, return it!
    return chunk_ids
Esempio n. 4
0
from inspect import iscoroutinefunction
from asyncio import CancelledError
import datetime
import time
import hsds_logger as log
try:
    from azure.storage.blob.aio import BlobServiceClient
    from azure.core.exceptions import AzureError
except ImportError:
    log.warning("unable to import Azure blob packages")
from aiohttp.web_exceptions import HTTPNotFound, HTTPForbidden, HTTPInternalServerError
import config


class AzureBlobClient():
    """
     Utility class for reading and storing data to AzureStorage Blobs
    """
    def __init__(self, app):

        self._app = app

        if "azureBlobClient" in app:
            if "token_expiration" in app:
                # TBD - does this apply for Azure?
                # check that our token is not about to expire
                expiration = app["token_expiration"]
                now = datetime.datetime.now()
                delta = expiration - now
                if delta.total_seconds() > 10:
                    self._client = app["azureBlobClient"]