コード例 #1
0
def stack(*imgs, **kwargs):
    """Combine images together, overlaying later images onto earlier ones.

    Parameters
    ----------
    imgs : iterable of Image
        The images to combine.
    how : str, optional
        The compositing operator to combine pixels. Default is `'over'`.
    """
    if not imgs:
        raise ValueError("No images passed in")
    shapes = []
    for i in imgs:
        if not isinstance(i, Image):
            raise TypeError("Expected `Image`, got: `{0}`".format(type(i)))
        elif not shapes:
            shapes.append(i.shape)
        elif shapes and i.shape not in shapes:
            raise ValueError("The stacked images must have the same shape.")

    name = kwargs.get('name', None)
    op = composite_op_lookup[kwargs.get('how', 'over')]
    if len(imgs) == 1:
        return imgs[0]
    imgs = xr.align(*imgs, copy=False, join='outer')
    with np.errstate(divide='ignore', invalid='ignore'):
        out = tz.reduce(tz.flip(op), [i.data for i in imgs])
    return Image(out, coords=imgs[0].coords, dims=imgs[0].dims, name=name)
コード例 #2
0
ファイル: fixtures.py プロジェクト: Giruvegan/zipline
def alias(attr_name):
    """Make a fixture attribute an alias of another fixture's attribute by
    default.

    Parameters
    ----------
    attr_name : str
        The name of the attribute to alias.

    Returns
    -------
    p : classproperty
        A class property that does the property aliasing.

    Examples
    --------
    >>> class C(object):
    ...     attr = 1
    ...
    >>> class D(C):
    ...     attr_alias = alias('attr')
    ...
    >>> D.attr
    1
    >>> D.attr_alias
    1
    >>> class E(D):
    ...     attr_alias = 2
    ...
    >>> E.attr
    1
    >>> E.attr_alias
    2
    """
    return classproperty(flip(getattr, attr_name))
コード例 #3
0
ファイル: strings.py プロジェクト: wkusnierczyk/ibis
def execute_string_group_by_find_in_set(op, needle, haystack, **kwargs):
    # `list` could contain series, series groupbys, or scalars
    # mixing series and series groupbys is not allowed
    series_in_haystack = [
        type(piece)
        for piece in haystack
        if isinstance(piece, (pd.Series, SeriesGroupBy))
    ]

    if not series_in_haystack:
        return ibis.util.safe_index(haystack, needle)

    try:
        collection_type, = frozenset(map(type, series_in_haystack))
    except ValueError:
        raise ValueError('Mixing Series and SeriesGroupBy is not allowed')

    pieces = haystack_to_series_of_lists(
        [getattr(piece, 'obj', piece) for piece in haystack]
    )

    result = pieces.map(toolz.flip(ibis.util.safe_index)(needle))
    if issubclass(collection_type, pd.Series):
        return result

    assert issubclass(collection_type, SeriesGroupBy)

    return result.groupby(
        toolz.first(
            piece.grouper.groupings
            for piece in haystack
            if hasattr(piece, 'grouper')
        )
    )
コード例 #4
0
ファイル: strings.py プロジェクト: cloudera/ibis
def execute_string_group_by_find_in_set(op, needle, haystack, **kwargs):
    # `list` could contain series, series groupbys, or scalars
    # mixing series and series groupbys is not allowed
    series_in_haystack = [
        type(piece)
        for piece in haystack
        if isinstance(piece, (pd.Series, SeriesGroupBy))
    ]

    if not series_in_haystack:
        return ibis.util.safe_index(haystack, needle)

    try:
        collection_type, = frozenset(map(type, series_in_haystack))
    except ValueError:
        raise ValueError('Mixing Series and SeriesGroupBy is not allowed')

    pieces = haystack_to_series_of_lists(
        [getattr(piece, 'obj', piece) for piece in haystack]
    )

    result = pieces.map(toolz.flip(ibis.util.safe_index)(needle))
    if issubclass(collection_type, pd.Series):
        return result

    assert issubclass(collection_type, SeriesGroupBy)

    return result.groupby(
        toolz.first(
            piece.grouper.groupings
            for piece in haystack
            if hasattr(piece, 'grouper')
        )
    )
コード例 #5
0
def alias(attr_name):
    """Make a fixture attribute an alias of another fixture's attribute by
    default.

    Parameters
    ----------
    attr_name : str
        The name of the attribute to alias.

    Returns
    -------
    p : classproperty
        A class property that does the property aliasing.

    Examples
    --------
    >>> class C(object):
    ...     attr = 1
    ...
    >>> class D(object):
    ...     attr_alias = alias('attr')
    ...
    >>> D.attr
    1
    >>> D.attr_alias
    1
    >>> class E(D):
    ...     attr_alias = 2
    ...
    >>> E.attr
    1
    >>> E.attr_alias
    2
    """
    return classproperty(flip(getattr, attr_name))
コード例 #6
0
ファイル: resources.py プロジェクト: jcrudy/drlnd_p1
def match_filename(directory, pattern):
    contents = list(
        filter(curry(flip(fnmatchcase))(pattern), os.listdir(directory)))
    if len(contents) > 1:
        raise ValueError('File pattern is ambiguous.')
    if not contents:
        return None
    return contents[0]
コード例 #7
0
ファイル: functional.py プロジェクト: huangzhengyong/zipline
def foldr(f, seq, default=_no_default):
    """Fold a function over a sequence with right associativity.

    Parameters
    ----------
    f : callable[any, any]
        The function to reduce the sequence with.
        The first argument will be the element of the sequence; the second
        argument will be the accumulator.
    seq : iterable[any]
        The sequence to reduce.
    default : any, optional
        The starting value to reduce with. If not provided, the sequence
        cannot be empty, and the last value of the sequence will be used.

    Returns
    -------
    folded : any
        The folded value.

    Notes
    -----
    This functions works by reducing the list in a right associative way.

    For example, imagine we are folding with ``operator.add`` or ``+``:

    .. code-block:: python

       foldr(add, seq) -> seq[0] + (seq[1] + (seq[2] + (...seq[-1], default)))

    In the more general case with an arbitrary function, ``foldr`` will expand
    like so:

    .. code-block:: python

       foldr(f, seq) -> f(seq[0], f(seq[1], f(seq[2], ...f(seq[-1], default))))

    For a more in depth discussion of left and right folds, see:
    `https://en.wikipedia.org/wiki/Fold_(higher-order_function)`_
    The images in that page are very good for showing the differences between
    ``foldr`` and ``foldl`` (``reduce``).

    .. note::

       For performance reasons is is best to pass a strict (non-lazy) sequence,
       for example, a list.

    See Also
    --------
    :func:`functools.reduce`
    :func:`sum`
    """
    return reduce(
        flip(f),
        reversed(seq),
        *(default,) if default is not _no_default else ()
    )
コード例 #8
0
def foldr(f, seq, default=_no_default):
    """Fold a function over a sequence with right associativity.

    Parameters
    ----------
    f : callable[any, any]
        The function to reduce the sequence with.
        The first argument will be the element of the sequence; the second
        argument will be the accumulator.
    seq : iterable[any]
        The sequence to reduce.
    default : any, optional
        The starting value to reduce with. If not provided, the sequence
        cannot be empty, and the last value of the sequence will be used.

    Returns
    -------
    folded : any
        The folded value.

    Notes
    -----
    This functions works by reducing the list in a right associative way.

    For example, imagine we are folding with ``operator.add`` or ``+``:

    .. code-block:: python

       foldr(add, seq) -> seq[0] + (seq[1] + (seq[2] + (...seq[-1], default)))

    In the more general case with an arbitrary function, ``foldr`` will expand
    like so:

    .. code-block:: python

       foldr(f, seq) -> f(seq[0], f(seq[1], f(seq[2], ...f(seq[-1], default))))

    For a more in depth discussion of left and right folds, see:
    `https://en.wikipedia.org/wiki/Fold_(higher-order_function)`_
    The images in that page are very good for showing the differences between
    ``foldr`` and ``foldl`` (``reduce``).

    .. note::

       For performance reasons is is best to pass a strict (non-lazy) sequence,
       for example, a list.

    See Also
    --------
    :func:`functools.reduce`
    :func:`sum`
    """
    return reduce(
        flip(f),
        reversed(seq),
        *(default,) if default is not _no_default else ()
    )
コード例 #9
0
def nearest_working_datetime_range(dt_range, availability={}):
    """
    Nearest working datetime_range by datetime_range.
    """
    a = defaulitize_availability(availability)
    start_date = dt_range[0].date()

    if not is_date_available(start_date, a):
        return None

    tomorrow_available = is_date_available(tomorrow(start_date), a)
    working_dt_ranges = working_datetime_ranges_of_date(
        start_date,
        a['special_working_hours'], a['week_working_hours'],
        merge_tomorrow=tomorrow_available)

    is_near = partial(flip(end_after_or_eq), dt_range)
    return first_match(is_near, working_dt_ranges)
コード例 #10
0
def resolve(query: Querylike[T], api: Api[T_auth], loaders: load.Registry,
            auth: T_auth, sender: http.Sender) -> T:
    """resolve a querylike object.

    Parameters
    ----------
    query
        the querylike object to evaluate
    api
        the API to handle the request
    loaders
        The registry of object loaders
    auth
        The authentication object
    sender
        The request sender
    """
    return thread_last(query, attrgetter('__req__'), api.prepare,
                       (flip(api.add_auth), auth), sender, api.parse,
                       loaders(query.__rtype__))
コード例 #11
0
def stack(*imgs, **kwargs):
    """Combine images together, overlaying later images onto earlier ones.

    Parameters
    ----------
    imgs : iterable of Image
        The images to combine.
    how : str, optional
        The compositing operator to combine pixels. Default is `'over'`.
    """
    if not imgs:
        raise ValueError("No images passed in")
    for i in imgs:
        if not isinstance(i, Image):
            raise TypeError("Expected `Image`, got: `{0}`".format(type(i)))
    op = composite_op_lookup[kwargs.get('how', 'over')]
    if len(imgs) == 1:
        return imgs[0]
    imgs = xr.align(*imgs, copy=False, join='outer')
    out = tz.reduce(tz.flip(op), [i.data for i in imgs])
    return Image(out, coords=imgs[0].coords, dims=imgs[0].dims)
コード例 #12
0
def stack(*imgs, **kwargs):
    """Combine images together, overlaying later images onto earlier ones.

    Parameters
    ----------
    imgs : iterable of Image
        The images to combine.
    how : str, optional
        The compositing operator to combine pixels. Default is `'over'`.
    """
    if not imgs:
        raise ValueError("No images passed in")
    for i in imgs:
        if not isinstance(i, Image):
            raise TypeError("Expected `Image`, got: `{0}`".format(type(i)))
    op = composite_op_lookup[kwargs.get('how', 'over')]
    if len(imgs) == 1:
        return imgs[0]
    imgs = xr.align(*imgs, copy=False, join='outer')
    out = tz.reduce(tz.flip(op), [i.data for i in imgs])
    return Image(out, coords=imgs[0].coords, dims=imgs[0].dims)
コード例 #13
0
ファイル: pos.py プロジェクト: hafeesk/optic_store
def get_pos_data():
    from erpnext.accounts.doctype.sales_invoice.pos import get_pos_data

    data = get_pos_data()
    allowed_items = get("bin_data", data, {}).keys()
    prices = _get_item_prices(allowed_items)

    def set_prices(item):
        get_price = compose(partial(get, seq=prices, default={}),
                            partial(get, "item_code"))
        return merge(item, get_price(item))

    trans_items = compose(
        partial(map, set_prices),
        partial(filter, lambda x: x.get("name") in allowed_items),
        partial(get, "items", default=[]),
    )
    add_branch = compose(
        flip(merge, {"os_branch": get_user_branch()}),
        lambda x: x.as_dict(),
        partial(get, "doc", default={}),
    )

    return merge(data, {"items": trans_items(data), "doc": add_branch(data)})
コード例 #14
0
ファイル: test_yahoo.py プロジェクト: fswzb/zipline-1
    def _expected_data(self):
        sids = 0, 1, 2
        modifier = {
            'low': 0,
            'open': 1,
            'close': 2,
            'high': 3,
            'volume': 0,
        }
        pricing = [
            np.hstack((np.arange(252, dtype='float64')[:, np.newaxis] + 1 +
                       sid * 10000 + modifier[column] * 1000
                       for sid in sorted(sids))) for column in self.columns
        ]

        # There are two dividends and 1 split for each company.

        def dividend_adjustment(sid, which):
            """The dividends occur at indices 252 // 4 and 3 * 252 / 4
            with a cash amount of sid + 1 / 10 and sid + 2 / 10
            """
            if which == 'first':
                idx = 252 // 4
            else:
                idx = 3 * 252 // 4

            return {
                idx: [
                    Float64Multiply(
                        first_row=0,
                        last_row=idx,
                        first_col=sid,
                        last_col=sid,
                        value=float(1 - ((sid + 1 +
                                          (which == 'second')) / 10) /
                                    (idx - 1 + sid * 10000 + 2000)),
                    )
                ],
            }

        def split_adjustment(sid, volume):
            """The splits occur at index 252 // 2 with a ratio of (sid + 1):1
            """
            idx = 252 // 2
            return {
                idx: [
                    Float64Multiply(
                        first_row=0,
                        last_row=idx,
                        first_col=sid,
                        last_col=sid,
                        value=(identity if volume else op.truediv(1))(sid + 2),
                    )
                ],
            }

        merge_adjustments = merge_with(flip(sum, []))

        adjustments = [
            # ohlc
            merge_adjustments(
                *tuple(dividend_adjustment(sid, 'first') for sid in sids) +
                tuple(dividend_adjustment(sid, 'second') for sid in sids) +
                tuple(split_adjustment(sid, volume=False) for sid in sids))
        ] * (len(self.columns) - 1) + [
            # volume
            merge_adjustments(
                split_adjustment(sid, volume=True) for sid in sids),
        ]

        return pricing, adjustments
コード例 #15
0
def working_hours_to_datetime_ranges(d, working_hours):
    """
    Convert working_hours to datetime_ranges on specific date.
    """
    partial_by_time_range = partial(flip(by_time_range), d)
    return map(partial_by_time_range, working_hours)
コード例 #16
0
ファイル: poser.py プロジェクト: deathbeds/poser
 def isinstance(λ, object):
     return λ[toolz.partial(toolz.flip(isinstance), object)]
コード例 #17
0
ファイル: numpy_utils.py プロジェクト: rsr2425/zipline
from toolz import flip

uint8_dtype = dtype("uint8")
bool_dtype = dtype("bool")

int64_dtype = dtype("int64")

float32_dtype = dtype("float32")
float64_dtype = dtype("float64")

complex128_dtype = dtype("complex128")

datetime64D_dtype = dtype("datetime64[D]")
datetime64ns_dtype = dtype("datetime64[ns]")

make_datetime64ns = flip(datetime64, "ns")
make_datetime64D = flip(datetime64, "D")

NaTmap = {dtype("datetime64[%s]" % unit): datetime64("NaT", unit) for unit in ("ns", "us", "ms", "s", "m", "D")}
NaT_for_dtype = NaTmap.__getitem__
NaTns = NaT_for_dtype(datetime64ns_dtype)
NaTD = NaT_for_dtype(datetime64D_dtype)


_FILLVALUE_DEFAULTS = {bool_dtype: False, float32_dtype: nan, float64_dtype: nan, datetime64ns_dtype: NaTns}


class NoDefaultMissingValue(Exception):
    pass

コード例 #18
0
def is_a(type_):
    """More curryable version of isinstance."""
    return flip(isinstance, type_)
コード例 #19
0
ファイル: poser.py プロジェクト: deathbeds/poser
 def issubclass(λ, object):
     return λ[toolz.partial(toolz.flip(istype), object)]
コード例 #20
0
uint64_dtype = dtype("uint64")
int64_dtype = dtype("int64")

float32_dtype = dtype("float32")
float64_dtype = dtype("float64")

complex128_dtype = dtype("complex128")

datetime64D_dtype = dtype("datetime64[D]")
datetime64ns_dtype = dtype("datetime64[ns]")

object_dtype = dtype("O")
# We use object arrays for strings.
categorical_dtype = object_dtype

make_datetime64ns = flip(datetime64, "ns")
make_datetime64D = flip(datetime64, "D")

# Array compare that works across versions of numpy
try:
    assert_array_compare = np.testing.utils.assert_array_compare
except AttributeError:
    assert_array_compare = np.testing.assert_array_compare

NaTmap = {
    dtype("datetime64[%s]" % unit): datetime64("NaT", unit)
    for unit in ("ns", "us", "ms", "s", "m", "D")
}


def NaT_for_dtype(dtype):
コード例 #21
0
ファイル: poser.py プロジェクト: deathbeds/poser
 def flip(x, object=None):
     return λ[toolz.flip(x)] if object == None else x[toolz.flip(object)]
コード例 #22
0
int64_dtype = dtype('int64')

float32_dtype = dtype('float32')
float64_dtype = dtype('float64')

complex128_dtype = dtype('complex128')

datetime64D_dtype = dtype('datetime64[D]')
datetime64ns_dtype = dtype('datetime64[ns]')

object_dtype = dtype('O')
# We use object arrays for strings.
categorical_dtype = object_dtype

make_datetime64ns = flip(datetime64, 'ns')
make_datetime64D = flip(datetime64, 'D')

NaTmap = {
    dtype('datetime64[%s]' % unit): datetime64('NaT', unit)
    for unit in ('ns', 'us', 'ms', 's', 'm', 'D')
}
NaT_for_dtype = NaTmap.__getitem__
NaTns = NaT_for_dtype(datetime64ns_dtype)
NaTD = NaT_for_dtype(datetime64D_dtype)

_FILLVALUE_DEFAULTS = {
    bool_dtype: False,
    float32_dtype: nan,
    float64_dtype: nan,
    datetime64ns_dtype: NaTns,
コード例 #23
0
ファイル: load.py プロジェクト: theendsofinvention/snug
"""deserialization tools"""
import typing as t
from datetime import datetime
from functools import partial

from toolz import flip
from valuable import load

from . import types

registry = load.PrimitiveRegistry({
    datetime:
    partial(flip(datetime.strptime), '%Y-%m-%dT%H:%M:%SZ'),
    **{c: c
       for c in [int, float, bool, str, types.Issue.State]}
}) | load.GenericRegistry({
    t.List: load.list_loader
}) | load.get_optional_loader | load.AutoDataclassRegistry()
コード例 #24
0
 def __rxor__(self, other: Any) -> Deferred:
     return self._defer(toolz.flip(operator.xor), other)
コード例 #25
0
def isValid(n: int) -> bool:
    curryEquals = curry(operator.eq)
    curryMod = curry(operator.mod)
    return compose(curryEquals(0), flip(curryMod, 10), checkSum)(n)
コード例 #26
0
ファイル: test_yahoo.py プロジェクト: JasonGiedymin/zipline
    def _expected_data(self):
        sids = 0, 1, 2
        modifier = {
            'low': 0,
            'open': 1,
            'close': 2,
            'high': 3,
            'volume': 0,
        }
        pricing = [
            np.hstack((
                np.arange(252, dtype='float64')[:, np.newaxis] +
                1 +
                sid * 10000 +
                modifier[column] * 1000
                for sid in sorted(sids)
            ))
            for column in self.columns
        ]

        # There are two dividends and 1 split for each company.

        def dividend_adjustment(sid, which):
            """The dividends occur at indices 252 // 4 and 3 * 252 / 4
            with a cash amount of sid + 1 / 10 and sid + 2 / 10
            """
            if which == 'first':
                idx = 252 // 4
            else:
                idx = 3 * 252 // 4

            return {
                idx: [Float64Multiply(
                    first_row=0,
                    last_row=idx,
                    first_col=sid,
                    last_col=sid,
                    value=float(
                        1 -
                        ((sid + 1 + (which == 'second')) / 10) /
                        (idx - 1 + sid * 10000 + 2000)
                    ),
                )],
            }

        def split_adjustment(sid, volume):
            """The splits occur at index 252 // 2 with a ratio of (sid + 1):1
            """
            idx = 252 // 2
            return {
                idx: [Float64Multiply(
                    first_row=0,
                    last_row=idx,
                    first_col=sid,
                    last_col=sid,
                    value=(identity if volume else op.truediv(1))(sid + 2),
                )],
            }

        merge_adjustments = merge_with(flip(sum, []))

        adjustments = [
            # ohlc
            merge_adjustments(
                *tuple(dividend_adjustment(sid, 'first') for sid in sids) +
                tuple(dividend_adjustment(sid, 'second') for sid in sids) +
                tuple(split_adjustment(sid, volume=False) for sid in sids)
            )
        ] * (len(self.columns) - 1) + [
            # volume
            merge_adjustments(
                split_adjustment(sid, volume=True) for sid in sids
            ),
        ]

        return pricing, adjustments
コード例 #27
0
 def contains_dt_range_in_wh_of_date(d, merge_tomorrow=True):
     working_dt_ranges = working_datetime_ranges_of_date(
         d, a['special_working_hours'], a['week_working_hours'],
         merge_tomorrow=merge_tomorrow)
     return any_match(partial(flip(contains), dt_range), working_dt_ranges)
コード例 #28
0
ファイル: numpy_utils.py プロジェクト: AlexanderAA/zipline
from toolz import flip

uint8_dtype = dtype('uint8')
bool_dtype = dtype('bool')

int64_dtype = dtype('int64')

float32_dtype = dtype('float32')
float64_dtype = dtype('float64')

complex128_dtype = dtype('complex128')

datetime64D_dtype = dtype('datetime64[D]')
datetime64ns_dtype = dtype('datetime64[ns]')

make_datetime64ns = flip(datetime64, 'ns')
make_datetime64D = flip(datetime64, 'D')

NaTmap = {
    dtype('datetime64[%s]' % unit): datetime64('NaT', unit)
    for unit in ('ns', 'us', 'ms', 's', 'm', 'D')
}
NaT_for_dtype = NaTmap.__getitem__
NaTns = NaT_for_dtype(datetime64ns_dtype)
NaTD = NaT_for_dtype(datetime64D_dtype)


_FILLVALUE_DEFAULTS = {
    bool_dtype: False,
    float32_dtype: nan,
    float64_dtype: nan,
コード例 #29
0
ファイル: functional.py プロジェクト: thedrow/codetransformer
def is_a(type_):
    """More curryable version of isinstance."""
    return flip(isinstance, type_)
コード例 #30
0
 def __rsub__(self, other: Any) -> Deferred:
     return self._defer(toolz.flip(operator.sub), other)
コード例 #31
0
from numpy import arange, newaxis, cumsum, vectorize, array, bincount, argmax, apply_along_axis, asarray, array, argmax, argsort, transpose, bincount, equal, where, isnan, maximum, minimum, exp, logical_not, logical_and, logical_or, select, less_equal, greater_equal, less, greater, nan, inf, log
from scipy.special import expit
from pandas import DataFrame
from toolz import compose
from functools import partial

def weighted_median(data, weights):
    data = data.T
    sorted_idx = argsort(data, axis=1)
    weight_cdf = cumsum(weights[sorted_idx], axis=1)
    median_or_above = weight_cdf >= 0.5 * weight_cdf[:, -1][:, newaxis]
    median_idx = median_or_above.argmax(axis=1)
    medians = sorted_idx[arange(data.shape[0]), median_idx]
    return data[arange(data.shape[0]), medians]

<%!
from toolz import flip
%>
%for function in functions:
def ${namer(function)}(dataframe):
    dataframe = dataframe.copy(deep=False)
%for assignments, (called_function, arguments) in function.calls:
    dataframe[[${', '.join(map(lambda x: '\'%s\'' % str(x), assignments))}]] = ${namer(called_function)}(dataframe[[${', '.join(map(lambda x: '\'%s\'' % str(x), arguments))}]].rename(columns=${repr(dict(zip(map(flip(getattr)('name'), arguments), map(flip(getattr)('name'), called_function.inputs))))}, copy=False))
%endfor
    result = DataFrame(index=dataframe.index)
% for i, output in enumerate(function.outputs):
    result[${i}] = ${printer(output)}
%endfor
    return result
%endfor
コード例 #32
0
def build(constraints, jobs, environment):
    with open('.metadata', mode='rb') as f:
        metadata = pickle.load(f)

    if not environment.keys() <= metadata.keys():
        missing_packages = environment.keys() - metadata.keys()
        raise click.ClickException(
            'Environment variables defined for missing packages {}'.format(
                set(missing_packages)))

    if not constraints:
        constraints = 'python >=2.7,<3|>=3.4', 'numpy >=1.10', 'r-base >=3.3.2'

    constraint_specifications = {
        key.name: value
        for key, value in zip(*itertools.tee(map(MatchSpec, constraints)))
    }

    raw_index = conda.api.get_index(
        ('defaults', 'conda-forge'),
        platform='linux-64',
    )

    index = {
        dist: record
        for dist, record in raw_index.items()
        if dist.name not in constraint_specifications or (
            dist.name in constraint_specifications
            and constraint_specifications[dist.name].match(dist))
    }

    get_version_matrix = toolz.flip(special_case_version_matrix)(index)

    with open('.recipedir', mode='rt') as f:
        recipes_directory = f.read().strip()

    recipes = glob.glob(os.path.join(recipes_directory, '*'))

    if not recipes:
        raise click.ClickException(
            'No recipes found in {}'.format(recipes_directory))

    with open('.artifactdir', mode='rt') as f:
        artifact_directory = f.read().strip()

    build_artifacts = os.path.join(artifact_directory, 'build_artifacts')

    with animated('Constraining special versions (e.g., numpy and python)'):
        with process_pool(metadata) as executor:
            results = executor.map(get_version_matrix, metadata.values())

    matrices = dict(zip(metadata.keys(), results))

    scripts = {(
        package,
        constraints.get('python', '').replace('.', ''),
        constraints.get('numpy', '').replace('.', ''),
    ): SCRIPT.format(
        package=package,
        python=constraints.get('python', '').replace('.', ''),
        numpy=constraints.get('numpy', '').replace('.', ''),
    )
               for package, matrix in matrices.items()
               for constraints in map(dict, matrix)}

    os.makedirs('logs', exist_ok=True)
    for text_file in glob.glob(os.path.join('logs', '*.txt')):
        os.remove(text_file)

    args = (
        '-a',
        'stdin',
        '-a',
        'stdout',
        '-a',
        'stderr',
        '-v',
        '{}:/build_artifacts'.format(os.path.abspath(build_artifacts)),
        '-v',
        '{}:/recipes'.format(os.path.abspath(recipes_directory)),
        '--dns',
        '8.8.8.8',
        '--dns',
        '8.8.4.4',
        '-e',
        'HOST_USER_ID={:d}'.format(os.getuid()),
    )

    tasks = {package: [] for package in matrices.keys()}

    for (package, python, numpy), script in scripts.items():
        task_args = functools.reduce(
            lambda args, var: args + ('-e', var),
            environment.get(package, ()),
            args,
        )
        log_path = os.path.join(
            'logs', '{package}-py{python}-np{numpy}.txt'.format(
                package=package,
                python=python or '_none',
                numpy=numpy or '_none',
            ))
        task = sh.docker.run.bake(
            *task_args,
            interactive=True,
            rm=True,
            _in=script,
            _out=log_path,
        )
        tasks[package].append(task)

    ntasks = sum(map(len, tasks.values()))
    built = itertools.count()
    first = next(built)
    format_string = 'Built {{:{padding}d}}/{:{padding}d} packages'
    formatter = format_string.format(ntasks, padding=len(str(ntasks))).format
    animation = animated(formatter(first))
    update_when_done = functools.partial(
        update_animation,
        animation,
        formatter,
        built,
    )

    with open('.ordering', mode='rb') as f:
        ordering = pickle.load(f)

    for package in ordering:  # TODO: parallelize on special versions
        futures = []

        with thread_pool(tasks, max_workers=jobs) as executor:
            for task in tasks[package]:
                future = executor.submit(task, 'condaforge/linux-anvil',
                                         'bash')
                future.add_done_callback(update_when_done)
                futures.append(future)

            with animation:
                for future in concurrent.futures.as_completed(futures):
                    try:
                        future.result()
                    except sh.ErrorReturnCode as e:
                        click.get_binary_stream('stderr').write(e.stderr)
                        raise SystemExit(e.exit_code)

    built_packages = [
        tarball for tarball in glob.glob(
            os.path.join(build_artifacts, 'linux-64', '*.tar.bz2'))
        if not os.path.basename(tarball).startswith('repodata')
    ]

    if not built_packages:
        raise click.ClickException(
            'No packages found in {}'.format(build_artifacts))

    for package in tqdm.tqdm(built_packages, desc='Copying packages'):
        shutil.copyfile(
            package,
            os.path.join(artifact_directory, os.path.basename(package)),
        )
コード例 #33
0
 def __rfloordiv__(self, other: Any) -> Deferred:
     return self._defer(toolz.flip(operator.floordiv), other)
コード例 #34
0
 def __rpow__(self, other: Any) -> Deferred:
     return self._defer(toolz.flip(operator.pow), other)
コード例 #35
0
 def __rmod__(self, other: Any) -> Deferred:
     return self._defer(toolz.flip(operator.mod), other)
コード例 #36
0
from zipline.utils.numpy_utils import repeat_last_axis


AD_FIELD_NAME = 'asof_date'
TS_FIELD_NAME = 'timestamp'
SID_FIELD_NAME = 'sid'
valid_deltas_node_types = (
    bz.expr.Field,
    bz.expr.ReLabel,
    bz.expr.Symbol,
)
traversable_nodes = (
    bz.expr.Field,
    bz.expr.Label,
)
is_invalid_deltas_node = complement(flip(isinstance, valid_deltas_node_types))
getname = op.attrgetter('__name__')


class _ExprRepr(object):
    """Box for repring expressions with the str of the expression.

    Parameters
    ----------
    expr : Expr
        The expression to box for repring.
    """
    __slots__ = 'expr',

    def __init__(self, expr):
        self.expr = expr
コード例 #37
0
ファイル: poser.py プロジェクト: deathbeds/poser
    def __getattr__(x, object):
        def partial(*args, **kwargs):
            return x.partial(attribute(object, *args, **kwargs))

        return partial


for binop in "add sub mul matmul truediv floordiv mod eq lt gt ne xor".split():
    for cls in (ThisType, Λ):
        setattr(
            cls,
            f"__{binop}__",
            functools.wraps(getattr(operator, binop))(
                functools.partialmethod(
                    Composition.partial, toolz.flip(getattr(operator, binop))
                )
            ),
        )
        setattr(
            cls,
            f"__i{binop}__",
            functools.wraps(getattr(operator, binop))(
                functools.partialmethod(
                    Composition.partial, toolz.flip(getattr(operator, binop))
                )
            ),
        )
        setattr(
            cls,
            f"__r{binop}__",