Пример #1
0
def subpool_imap(pool_size, func, iterable, flatten=False, unordered=False, buffer_size=None):
    """ Generator version of subpool_map. Should be used with unordered=True for optimal performance """

    if not pool_size:
        for args in iterable:
            yield func(*args)

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        """ As each call to 'func' will be done in a random greenlet of the subpool, we need to
        register their IDs with set_current_job() to make get_current_job() calls work properly
        inside 'func'.
    """
        next(counter)
        if current_job:
            set_current_job(current_job)

        try:
            ret = func(*args)
        except Exception, exc:
            trace = traceback.format_exc()
            log.error("Error in subpool: %s \n%s" % (exc, trace))
            raise

        if current_job:
            set_current_job(None)
        return ret
Пример #2
0
def subpool_map(pool_size, func, iterable):
    """ Starts a Gevent pool and run a map. Takes care of setting current_job and cleaning up. """

    if not pool_size:
        return [func(*args) for args in iterable]

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        """ As each call to 'func' will be done in a random greenlet of the subpool, we need to
            register their IDs with set_current_job() to make get_current_job() calls work properly
            inside 'func'.
        """
        next(counter)
        if current_job:
            set_current_job(current_job)

        try:
            ret = func(*args)
        except Exception, exc:
            trace = traceback.format_exc()
            log.error("Error in subpool: %s \n%s" % (exc, trace))
            raise

        if current_job:
            set_current_job(None)
        return ret
Пример #3
0
def subpool_map(pool_size, func, iterable):
    """ Starts a Gevent pool and run a map. Takes care of setting current_job and cleaning up. """

    if not pool_size:
        return [func(*args) for args in iterable]

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        """ As each call to 'func' will be done in a random greenlet of the subpool, we need to
            register their IDs with set_current_job() to make get_current_job() calls work properly
            inside 'func'.
        """
        next(counter)
        if current_job:
            set_current_job(current_job)

        try:
            ret = func(*args)
        except Exception, exc:
            trace = traceback.format_exc()
            log.error("Error in subpool: %s \n%s" % (exc, trace))
            raise

        if current_job:
            set_current_job(None)
        return ret
Пример #4
0
def subpool_imap(pool_size,
                 func,
                 iterable,
                 flatten=False,
                 unordered=False,
                 buffer_size=None):
    """ Generator version of subpool_map. Should be used with unordered=True for optimal performance """

    if not pool_size:
        for args in iterable:
            yield func(*args)

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        """ As each call to 'func' will be done in a random greenlet of the subpool, we need to
        register their IDs with set_current_job() to make get_current_job() calls work properly
        inside 'func'.
    """
        next(counter)
        if current_job:
            set_current_job(current_job)

        try:
            ret = func(*args)
        except Exception, exc:
            trace = traceback.format_exc()
            log.error("Error in subpool: %s \n%s" % (exc, trace))
            raise

        if current_job:
            set_current_job(None)
        return ret
Пример #5
0
def subpool_map(pool_size, func, iterable):
    """ Starts a Gevent pool and run a map. Takes care of setting current_job and cleaning up. """

    if not pool_size:
        return [func(*args) for args in iterable]

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        next(counter)
        if current_job:
            set_current_job(current_job)
        ret = func(*args)
        if current_job:
            set_current_job(None)
        return ret

    start_time = time.time()
    pool = gevent.pool.Pool(size=pool_size)
    ret = pool.map(inner_func, iterable)
    pool.join(raise_error=True)
    total_time = time.time() - start_time

    log.debug("SubPool ran %s greenlets in %0.6fs" % (counter, total_time))

    return ret
Пример #6
0
def subpool_map(pool_size, func, iterable):
    """ Starts a Gevent pool and run a map. Takes care of setting current_job and cleaning up. """

    if not pool_size:
        return [func(*args) for args in iterable]

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        next(counter)
        if current_job:
            set_current_job(current_job)
        ret = func(*args)
        if current_job:
            set_current_job(None)
        return ret

    start_time = time.time()
    pool = gevent.pool.Pool(size=pool_size)
    ret = pool.map(inner_func, iterable)
    pool.join(raise_error=True)
    total_time = time.time() - start_time

    log.debug("SubPool ran %s greenlets in %0.6fs" % (counter, total_time))

    return ret
Пример #7
0
def subpool_map(pool_size, func, iterable):
    """ Starts a Gevent pool and run a map. Takes care of setting current_job and cleaning up. """

    from .context import get_current_job, set_current_job, log

    if not pool_size:
        return [func(*args) for args in iterable]

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        """ As each call to 'func' will be done in a random greenlet of the subpool, we need to
            register their IDs with set_current_job() to make get_current_job() calls work properly
            inside 'func'.
        """
        next(counter)
        if current_job:
            set_current_job(current_job)

        try:
            ret = func(*args)
        except Exception as exc:
            trace = traceback.format_exc()
            exc.subpool_traceback = trace
            raise

        if current_job:
            set_current_job(None)
        return ret

    def inner_iterable():
        """ This will be called inside the pool's main greenlet, which ID also needs to be registered """
        if current_job:
            set_current_job(current_job)

        for x in iterable:
            yield x

        if current_job:
            set_current_job(None)

    start_time = time.time()
    pool = gevent.pool.Pool(size=pool_size)
    ret = pool.map(inner_func, inner_iterable())
    pool.join(raise_error=True)
    total_time = time.time() - start_time

    log.debug("SubPool ran %s greenlets in %0.6fs" % (counter, total_time))

    return ret
Пример #8
0
def subpool_map(pool_size, func, iterable):
    """ Starts a Gevent pool and run a map. Takes care of setting current_job and cleaning up. """

    if not pool_size:
        return [func(*args) for args in iterable]

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        """ As each call to 'func' will be done in a random greenlet of the subpool, we need to
            register their IDs with set_current_job() to make get_current_job() calls work properly
            inside 'func'.
        """
        next(counter)
        if current_job:
            set_current_job(current_job)

        try:
            ret = func(*args)
        except Exception as exc:
            trace = traceback.format_exc()
            log.error("Error in subpool: %s \n%s" % (exc, trace))
            raise

        if current_job:
            set_current_job(None)
        return ret

    def inner_iterable():
        """ This will be called inside the pool's main greenlet, which ID also needs to be registered """
        if current_job:
            set_current_job(current_job)

        for x in iterable:
            yield x

        if current_job:
            set_current_job(None)

    start_time = time.time()
    pool = gevent.pool.Pool(size=pool_size)
    ret = pool.map(inner_func, inner_iterable())
    pool.join(raise_error=True)
    total_time = time.time() - start_time

    log.debug("SubPool ran %s greenlets in %0.6fs" % (counter, total_time))

    return ret
Пример #9
0
    def __init__(self, reader: Callable[[], Awaitable[bytes]], writer: Callable[[bytes], Awaitable[int]]):
        self._read: Callable[[], Awaitable[bytes]] = reader
        self._write: Callable[[bytes], Awaitable[int]] = writer

        self._receive_message_responses_task: Optional[Task] = None
        self._handle_incoming_bytes_task: Optional[Task] = None
        self._handle_outgoing_bytes_task: Optional[Task] = None

        # Data structures for handling incoming and outgoing messages.
        self._incoming_messages_queue = AsyncioQueue()
        self._outgoing_messages_queue = AsyncioQueue()

        self.call_id_iterator: Iterator[int] = itertools_count(start=1)
        self._outstanding_message_call_id_to_future: Dict[int, Future] = {}
Пример #10
0
class User:
    __table = [
        {
            "id": "1",
            'username': '******',
            "first_name": "omid",
            "last_name": "esmaeili",
            "password": "******",
            'role': EnumRole.ADMIN
        },
    ]
    __id = itertools_count(2)

    @classmethod
    def get_all_users(cls) -> list:
        return cls.__table

    @classmethod
    def get_user(cls, username: str = None) -> dict:
        for user in User.get_all_users():
            if user.get("username") == username:
                return user
        return {}

    @classmethod
    def save(cls,
             username: str = None,
             first_name: str = None,
             last_name: str = None,
             password: str = None,
             role: str = None) -> bool:

        cls.__table.append(
            dict(id=str(next(cls.__id)),
                 uesrname=username,
                 first_name=first_name,
                 last_name=last_name,
                 password=password,
                 role=role))
        return True

    @classmethod
    def exists_with_username(cls, username: str = None) -> bool:
        for user in User.get_all_users():
            if user.get("username") == username:
                return True
        return False
Пример #11
0
def opener(filename, mode, **kwargs):
    """Generic file or file family opener.

    Parameters
    ----------
    filename : str
       Name of file to open.  See notes below for family conventions.
    mode : str
       One of 'r' (default, read-only), 'r+' (read-write, must exist),
       'a' (read-write, create if does not exist), 'w' (create, clobber if
       exists), 'w-' (create, fail if exists).
    **kwargs
       Other keywords.  This opener consumes one item from kwargs:
    nextaddr_mode : int
       Affects setting of nextaddr for families opened with 'a' or 'r+'
       mode.  0 (default) sets nextaddr to the end of the final existing file,
       1 sets nextaddr to 0 (beginning of first file), and 2 sets nextaddr
       to the beginning of the next file after all existing files.

    Returns
    -------
    handle
       A file handle implementing the generic interface, consisting of::

          handle.callbacks(flusher, initializer)
          addr = handle.next_address()  # next unused address
          f = handle.seek(addr)  # return ordinary file handle at addr
          f = handle.open(n)  # open nth file, calling initializer(f)
          handle.flush()  # make file readable, calling flusher(f)
          # flush() restores next_address to value on entry
          handle.close()  # flush if necessary, then close

    nexisting : int
       Number of existing paths matching `filename`.

    Notes
    -----
    The `filename` may be an iterable, one string per file in order.  The
    sequence may extend beyond the files which actually exist for 'r+', 'a',
    'w', or 'w-' modes.

    Alternatively `filename` specifies a family if it contains shell globbing
    wildcard characters.  Existing matching files are sorted first by length,
    then alphabetically (ensuring that 'file100' comes after 'file99', for
    example).  If there is only a single wildcard group, it also serves to
    define a sequence of future family names beyond those currently existing
    for 'r+', 'a', 'w', or 'w-' modes.  A '?' pattern is treated the same as
    a '[0-9]' pattern if all its matches are digits or if the pattern
    matches no existing files.  Similarly, a '*' acts like the minimum number
    of all-digit matches, or three digits if there are no matches.

    A single filename may also contain a ``%d`` or ``%0nd`` print format
    directive, which will be converted to the corresponding number of
    ``[0-9]`` glob patterns.

    """
    isstr = isinstance(filename, basestring)
    if isstr:
        filename = abspath(expanduser(expandvars(filename)))
        match = _digits_format.search(filename)
        if match:  # %0nd --> [0-9][0-9]... n times
            n = int(match.group(1) or '1')  # %d --> [0-9] once
            filename = filename.replace(match.group(0), ''.join(['[0-9]'] * n))
        match = _glob_group.search(filename)
        if match:
            existing = glob.glob(filename)
        else:
            existing = [f for f in [filename] if exists(f)]
    else:
        match = None
        filename = [expanduser(expandvars(f)) for f in filename]
        existing = [f for f in filename if exists(f)]
        if len(existing) < len(filename):
            for g, f in zip(existing, filename):
                if f == g:
                    continue
                raise IOError("intermediate file {} missing".format(f))
        if not filename:
            raise IOError("sequence of filenames is empty")
    mode = mode.lower()
    if existing:
        existing.sort(key=lambda path: (len(path), path))
        if mode.startswith('w-'):
            raise IOError("protecting existing file {}".format(existing[0]))
    elif mode.startswith('r'):
        raise IOError("no file matches {}".format(filename))
    if mode.startswith('r'):
        mode = 'r+b' if mode.startswith('r+') else 'rb'
    elif mode.startswith('a'):
        mode = 'r+b' if existing else 'w+b'
    elif mode.startswith('w'):
        existing = []  # ignore any existing files
        mode = 'w+b'
    else:
        raise IOError("open mode {} not understood".format(mode))
    # Compute:
    # pattern = pattern containing {...} if either existing or future
    # existing = list of existing {...} items (int or str) for pattern
    # future = iterable yielding future {...} items (int or str) for pattern
    future = None
    if match:
        if '+' in mode:
            prefix, suffix = filename[:match.start()], filename
            predictable = 2
            while match:
                predictable <<= 1
                suffix = suffix[match.end():]
                match = _glob_group.search(suffix)
            p, s = len(prefix), -len(suffix) if suffix else None
            existing = [f[p:s] for f in existing]
            if predictable:
                # With a single wildcard group, we may be able to predict
                # future names in the family.
                # We handle two cases:
                # 1. a sequence of *, ?, [0-9] we guess means decimal numbers
                # 2. a single range like [a-z] we take as sequence of chars
                fmt = '{}'
                pat = filename[p:s]
                chunks = _glob_digit.findall(pat)
                if ''.join(chunks) == pat:
                    nast = chunks.count('*')
                    ndig = len(chunks) - nast
                    fmt = '{' + ':0{}d'.format(3 * nast + ndig) + '}'
                    future = itertools_count(0)
                else:
                    future = iter(_expand_ranges(pat))
                if existing:
                    if all(f.isdigit() for f in existing):
                        # existing matches are all decimal numbers
                        nums = list(map(int, existing))
                        fmt = '{' + ':0{}d'.format(len(existing[0])) + '}'
                        if all(f == fmt.format(n)
                               for f, n in zip(existing, nums)):
                            existing = nums
                            future = itertools_count(existing[-1] + 1)
                        else:
                            fmt, future = '{}', None
                    elif fmt != '{}':
                        # pattern looked numerical, but matched non-digits
                        fmt, future = '{}', None
                    elif all(len(f) == 1 for f in existing):
                        # existing matches all non-digit single characters
                        final = existing[-1]
                        for f in future:
                            if f == final:
                                break
            pattern = prefix + fmt + suffix
        else:
            filename = existing
            isstr = False
    elif isstr:
        pattern = '{}'
        if not existing:
            future = iter([filename])
    if not isstr:
        prefix = commonprefix(filename)
        if len(filename) > 1:
            suffix = commonprefix([f[::-1] for f in filename])[::-1]
            if suffix == prefix:
                suffix = ''  # all filenames identical (not an error??)
        else:
            suffix = ''
        n = len(prefix)
        m = -len(suffix) if suffix else None
        existing = [f[n:m] for f in existing]
        future = iter([f[n:m] for f in filename[len(existing):]])
        pattern = prefix + '{}' + suffix
    return MultiFile(pattern, existing, future, mode, **kwargs), len(existing)
Пример #12
0
class Category:
    __table = [{'id': '1', 'name': 'shoes'}]
    __id = itertools_count(1)

    @classmethod
    def get_all_categories(cls) -> list:
        return cls.__table

    @classmethod
    def get_category_with_id(cls, _id: str = None) -> dict:
        if not _id:
            return {}

        for category in cls.__table:
            if _id == category.get("id"):
                return category

        return {}

    @classmethod
    def get_one_category_with_name(cls, name: str = None) -> dict:
        if not name:
            return {}

        for category in cls.__table:
            if name == category.get("name"):
                return category

        return {}

    @classmethod
    def save(cls, name: str = None) -> (bool, str):

        if not name:
            return False, "name is empty"

        if cls.exits_with_name(name):
            return False, "Category exists with name"

        cls.__table.append({"id": str(next(cls.__id)), "name": name})
        return True, ""

    @classmethod
    def update(cls, _id: str = None, name: str = None) -> bool:

        category = cls.get_category_with_id(_id)

        if category:
            if name:
                category["name"] = name

            return True

        return False

    @classmethod
    def delete(cls, _id: str = None) -> (bool, str):
        if not _id:
            return False, "id not found in params"

        # TODO check products does not use this category

        for category in cls.__table:
            if category.get("id") == _id:
                return (True, "") if cls.__table.remove(category) else (
                    False, f" category with id: {_id} not found")
        return False, "category not found for update"

    @classmethod
    def exits_with_id(cls, name: str = None) -> bool:
        if not name:
            return False

        for category in cls.__table:
            if category.get("id") == id:
                return True
        return False

    @classmethod
    def exits_with_name(cls, name: str = None) -> bool:
        if not name:
            return False

        for category in cls.__table:
            if category.get("name") == name:
                return True
        return False
Пример #13
0
class Product:
    __table = [{
        'id': '1',
        'name': 'shoes_sport',
        'price': 200000,
        'category_id': '1',
        'off': 2.0
    }, {
        'id': '2',
        'name': 'shoes_sport',
        'price': 200000,
        'category_id': '1',
        'off': 2.0
    }]
    __id = itertools_count(1)

    @classmethod
    def get_all_products(cls) -> list:
        return cls.__table

    @classmethod
    def get_one_product(cls, _id: str = None) -> dict:

        if not _id:
            return {}

        for product in cls.__table:
            if _id == product.get("id"):
                return product

        return {}

    @classmethod
    def save(cls,
             name: str = None,
             price: int = None,
             category_name: str = None,
             off: float = None) -> (bool, str):

        if not name or not price or not category_name or not off:
            return False, f"name: {name} or price: {price} or category: {category_name} or off: {off} not found"

        for category in Category().get_all_categories():
            if category.get("name") == name:
                cls.__table.append(
                    dict(id=str(next(Product.__id)),
                         name=name,
                         price=price,
                         category_id=category.get("id"),
                         off=off))
                return True, ""

        return False, "category not found"

    @classmethod
    def update(cls,
               _id: str = None,
               name: str = None,
               price: int = None,
               category_name: str = None,
               off: float = str) -> (bool, str):

        ctg = {}
        if category_name:
            ctg = Category.get_one_category_with_name(category_name)
            if not ctg:
                return False, f"category_name: {category_name} not found"

        product = cls.get_one_product(_id)

        if product:

            if category_name:
                product["category_id"] = ctg.get("id")

            if name:
                product["name"] = name

            if price:
                product["price"] = int(price)

            if off:
                product[off] = float(off)

            return True, ""

        return False, f"product not found with id: {_id}"

    @classmethod
    def delete(cls, _id: str = None) -> (bool, str):
        if not _id:
            return False, "_id not found"

        for product in cls.__table:
            if _id == product.get("id"):
                cls.__table.remove(product)
                return True, f"Successfully remove product with id: {_id}"

        return False, f"product not found with id: {_id}"
Пример #14
0
def subpool_imap(pool_size, func, iterable, flatten=False, unordered=False, buffer_size=None):
    """ Generator version of subpool_map. Should be used with unordered=True for optimal performance """

    if not pool_size:
        for args in iterable:
            yield func(*args)

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        """ As each call to 'func' will be done in a random greenlet of the subpool, we need to
        register their IDs with set_current_job() to make get_current_job() calls work properly
        inside 'func'.
    """
        next(counter)
        if current_job:
            set_current_job(current_job)

        try:
            ret = func(*args)
        except Exception as exc:
            trace = traceback.format_exc()
            log.error("Error in subpool: %s \n%s" % (exc, trace))
            raise

        if current_job:
            set_current_job(None)
        return ret

    def inner_iterable():
        """ This will be called inside the pool's main greenlet, which ID also needs to be registered """
        if current_job:
            set_current_job(current_job)

        for x in iterable:
            yield x

        if current_job:
            set_current_job(None)

    start_time = time.time()
    pool = gevent.pool.Pool(size=pool_size)

    if unordered:
        iterator = pool.imap_unordered(inner_func, inner_iterable(), maxsize=buffer_size or pool_size)
    else:
        iterator = pool.imap(inner_func, inner_iterable())

    for x in iterator:
        if flatten:
            for y in x:
                yield y
        else:
            yield x

    pool.join(raise_error=True)
    total_time = time.time() - start_time

    log.debug("SubPool ran %s greenlets in %0.6fs" % (counter, total_time))
Пример #15
0
def subpool_imap(pool_size,
                 func,
                 iterable,
                 flatten=False,
                 unordered=False,
                 buffer_size=None):
    """ Generator version of subpool_map. Should be used with unordered=True for optimal performance """

    if not pool_size:
        for args in iterable:
            yield func(*args)

    counter = itertools_count()

    current_job = get_current_job()

    def inner_func(*args):
        """ As each call to 'func' will be done in a random greenlet of the subpool, we need to
        register their IDs with set_current_job() to make get_current_job() calls work properly
        inside 'func'.
    """
        next(counter)
        if current_job:
            set_current_job(current_job)

        try:
            ret = func(*args)
        except Exception as exc:
            trace = traceback.format_exc()
            log.error("Error in subpool: %s \n%s" % (exc, trace))
            raise

        if current_job:
            set_current_job(None)
        return ret

    def inner_iterable():
        """ This will be called inside the pool's main greenlet, which ID also needs to be registered """
        if current_job:
            set_current_job(current_job)

        for x in iterable:
            yield x

        if current_job:
            set_current_job(None)

    start_time = time.time()
    pool = gevent.pool.Pool(size=pool_size)

    if unordered:
        iterator = pool.imap_unordered(inner_func,
                                       inner_iterable(),
                                       maxsize=buffer_size or pool_size)
    else:
        iterator = pool.imap(inner_func, inner_iterable())

    for x in iterator:
        if flatten:
            for y in x:
                yield y
        else:
            yield x

    pool.join(raise_error=True)
    total_time = time.time() - start_time

    log.debug("SubPool ran %s greenlets in %0.6fs" % (counter, total_time))