def __init__(self, execute, name=None, provides=None, requires=None, auto_extract=True, rebind=None, revert=None, version=None, inject=None): if not six.callable(execute): raise ValueError("Function to use for executing must be" " callable") if revert is not None: if not six.callable(revert): raise ValueError("Function to use for reverting must" " be callable") if name is None: name = reflection.get_callable_name(execute) super(FunctorTask, self).__init__(name, provides=provides, inject=inject) self._execute = execute self._revert = revert if version is not None: self.version = version mapping = self._build_arg_mapping(execute, requires, rebind, auto_extract) self.rebind, exec_requires, self.optional = mapping if revert: revert_mapping = self._build_arg_mapping(revert, requires, rebind, auto_extract) else: revert_mapping = (self.rebind, exec_requires, self.optional) (self.revert_rebind, revert_requires, self.revert_optional) = revert_mapping self.requires = exec_requires.union(revert_requires)
def _select_function(sort, typ): if typ in ['F','D']: if callable(sort): #assume the user knows what they're doing sfunction = sort elif sort == 'lhp': sfunction = lambda x,y: (np.real(x/y) < 0.0) elif sort == 'rhp': sfunction = lambda x,y: (np.real(x/y) >= 0.0) elif sort == 'iuc': sfunction = lambda x,y: (abs(x/y) <= 1.0) elif sort == 'ouc': sfunction = lambda x,y: (abs(x/y) > 1.0) else: raise ValueError("sort parameter must be None, a callable, or " "one of ('lhp','rhp','iuc','ouc')") elif typ in ['f','d']: if callable(sort): #assume the user knows what they're doing sfunction = sort elif sort == 'lhp': sfunction = lambda x,y,z: (np.real((x+y*1j)/z) < 0.0) elif sort == 'rhp': sfunction = lambda x,y,z: (np.real((x+y*1j)/z) >= 0.0) elif sort == 'iuc': sfunction = lambda x,y,z: (abs((x+y*1j)/z) <= 1.0) elif sort == 'ouc': sfunction = lambda x,y,z: (abs((x+y*1j)/z) > 1.0) else: raise ValueError("sort parameter must be None, a callable, or " "one of ('lhp','rhp','iuc','ouc')") else: # to avoid an error later raise ValueError("dtype %s not understood" % typ) return sfunction
def add_state(self, state, terminal=False, on_enter=None, on_exit=None): """Adds a given state to the state machine. :param on_enter: callback, if provided will be expected to take two positional parameters, these being state being entered and the second parameter is the event that is being processed that caused the state transition :param on_exit: callback, if provided will be expected to take two positional parameters, these being state being entered and the second parameter is the event that is being processed that caused the state transition :param state: state being entered or exited :type state: string """ if state in self._states: raise excp.Duplicate("State '%s' already defined" % state) if on_enter is not None: if not six.callable(on_enter): raise ValueError("On enter callback must be callable") if on_exit is not None: if not six.callable(on_exit): raise ValueError("On exit callback must be callable") self._states[state] = { 'terminal': bool(terminal), 'reactions': {}, 'on_enter': on_enter, 'on_exit': on_exit, } self._transitions[state] = OrderedDict()
def add_state(self, state, on_enter=None, on_exit=None, target=None, terminal=None): """Adds a given state to the state machine. The on_enter and on_exit callbacks, if provided will be expected to take two positional parameters, these being the state being exited (for on_exit) or the state being entered (for on_enter) and a second parameter which is the event that is being processed that caused the state transition. """ if state in self._states: raise excp.Duplicate(_("State '%s' already defined") % state) if on_enter is not None: if not six.callable(on_enter): raise ValueError(_("On enter callback must be callable")) if on_exit is not None: if not six.callable(on_exit): raise ValueError(_("On exit callback must be callable")) if target is not None and target not in self._states: raise excp.InvalidState(_("Target state '%s' does not exist") % target) self._states[state] = { 'terminal': bool(terminal), 'reactions': {}, 'on_enter': on_enter, 'on_exit': on_exit, 'target': target, } self._transitions[state] = OrderedDict()
def register(self, event_type, callback, args=None, kwargs=None, details_filter=None): """Register a callback to be called when event of a given type occurs. Callback will be called with provided ``args`` and ``kwargs`` and when event type occurs (or on any event if ``event_type`` equals to :attr:`.ANY`). It will also get additional keyword argument, ``details``, that will hold event details provided to the :meth:`.notify` method (if a details filter callback is provided then the target callback will *only* be triggered if the details filter callback returns a truthy value). :param event_type: event type input :param callback: function callback to be registered. :param args: non-keyworded arguments :type args: list :param kwargs: key-value pair arguments :type kwargs: dictionary """ if not six.callable(callback): raise ValueError("Event callback must be callable") if details_filter is not None: if not six.callable(details_filter): raise ValueError("Details filter must be callable") if not self.can_be_registered(event_type): raise ValueError("Disallowed event type '%s' can not have a" " callback registered" % event_type) if self.is_registered(event_type, callback, details_filter=details_filter): raise ValueError("Event callback already registered with" " equivalent details filter") if kwargs: for k in self.RESERVED_KEYS: if k in kwargs: raise KeyError("Reserved key '%s' not allowed in " "kwargs" % k) self._topics[event_type].append(Listener(callback, args=args, kwargs=kwargs, details_filter=details_filter))
def add_state(self, state, terminal=False, on_enter=None, on_exit=None): """Adds a given state to the state machine. The on_enter and on_exit callbacks, if provided will be expected to take two positional parameters, these being the state being exited (for on_exit) or the state being entered (for on_enter) and a second parameter which is the event that is being processed that caused the state transition. """ if self.frozen: raise excp.FrozenMachine() if state in self._states: raise excp.Duplicate("State '%s' already defined" % state) if on_enter is not None: if not six.callable(on_enter): raise ValueError("On enter callback must be callable") if on_exit is not None: if not six.callable(on_exit): raise ValueError("On exit callback must be callable") self._states[state] = { 'terminal': bool(terminal), 'reactions': {}, 'on_enter': on_enter, 'on_exit': on_exit, } self._transitions[state] = collections.OrderedDict()
def _prepare(cls, elem, attr): if not isinstance(elem, six.binary_type): serialize = getattr(elem, 'serialize', None) if six.callable(serialize): return serialize() attr_class = cls._get_attr_class(attr) serialize = getattr(attr_class, 'serialize', None) if six.callable(serialize): return serialize(elem) return elem
def _set_obj_extra_data_key(obj, eng): my_value = value my_key = key if callable(my_value): while callable(my_value): my_value = my_value(obj, eng) if callable(my_key): while callable(my_key): my_key = my_key(obj, eng) obj.extra_data[str(my_key)] = my_value
def _foreach(obj, eng): my_list_to_process = [] step = str(eng.getCurrTaskId()) try: if "_Iterators" not in eng.extra_data: eng.extra_data["_Iterators"] = {} except KeyError: eng.extra_data["_Iterators"] = {} if step not in eng.extra_data["_Iterators"]: eng.extra_data["_Iterators"][step] = {} if cache_data: if callable(get_list_function): eng.extra_data["_Iterators"][step]["cache"] = get_list_function(obj, eng) elif isinstance(get_list_function, list): eng.extra_data["_Iterators"][step]["cache"] = get_list_function else: eng.extra_data["_Iterators"][step]["cache"] = [] my_list_to_process = eng.extra_data["_Iterators"][step]["cache"] if order == "ASC": eng.extra_data["_Iterators"][step].update({"value": 0}) elif order == "DSC": eng.extra_data["_Iterators"][step].update({"value": len(my_list_to_process) - 1}) eng.extra_data["_Iterators"][step]["previous_data"] = obj.data if callable(get_list_function): if cache_data: my_list_to_process = eng.extra_data["_Iterators"][step]["cache"] else: my_list_to_process = get_list_function(obj, eng) elif isinstance(get_list_function, list): my_list_to_process = get_list_function else: my_list_to_process = [] if order == "ASC" and eng.extra_data["_Iterators"][step]["value"] < len(my_list_to_process): obj.data = my_list_to_process[eng.extra_data["_Iterators"][step]["value"]] if savename is not None: obj.extra_data[savename] = obj.data eng.extra_data["_Iterators"][step]["value"] += 1 elif order == "DSC" and eng.extra_data["_Iterators"][step]["value"] > -1: obj.data = my_list_to_process[eng.extra_data["_Iterators"][step]["value"]] if savename is not None: obj.extra_data[savename] = obj.data eng.extra_data["_Iterators"][step]["value"] -= 1 else: obj.data = eng.extra_data["_Iterators"][step]["previous_data"] del eng.extra_data["_Iterators"][step] coordonatex = len(eng.getCurrTaskId()) - 1 coordonatey = eng.getCurrTaskId()[coordonatex] new_vector = eng.getCurrTaskId() new_vector[coordonatex] = coordonatey + 2 eng.setPosition(eng.getCurrObjId(), new_vector)
def add_listener(self, rule, view_func, **options): """Adds a listener to the listeners container; verifies that `rule` and `view_func` are callable. :raises TypeError: if rule is not callable. :raises TypeError: if view_func is not callable """ if not six.callable(rule): raise TypeError('rule should be callable') if not six.callable(view_func): raise TypeError('view_func should be callable') self.listeners.append(Listener(rule, view_func, options))
def _get_files_list(obj, eng): if callable(parameter): unknown = parameter while callable(unknown): unknown = unknown(obj, eng) else: unknown = parameter result = glob.glob1(path, unknown) for i in range(0, len(result)): result[i] = path + os.sep + result[i] return result
def test_namespace_dereferencing(self): service = self.object.nested assert service is self.object.nested assert six.callable(service) assert hasattr(service, 'method') assert six.callable(service.method) service = self.object.nested.deeper assert service is self.object.nested.deeper assert six.callable(service) assert hasattr(service, 'method') assert six.callable(service.method)
def register(self, event_type, callback, args=None, kwargs=None, details_filter=None, weak=False): """Register a callback to be called when event of a given type occurs. Callback will be called with provided ``args`` and ``kwargs`` and when event type occurs (or on any event if ``event_type`` equals to :attr:`.ANY`). It will also get additional keyword argument, ``details``, that will hold event details provided to the :meth:`.notify` method (if a details filter callback is provided then the target callback will *only* be triggered if the details filter callback returns a truthy value). :param event_type: event type to get triggered on :param callback: function callback to be registered. :param args: non-keyworded arguments :type args: list :param kwargs: key-value pair arguments :type kwargs: dictionary :param weak: if the callback retained should be referenced via a weak reference or a strong reference (defaults to holding a strong reference) :type weak: bool :returns: the listener that was registered :rtype: :py:class:`~.Listener` """ if not six.callable(callback): raise ValueError("Event callback must be callable") if details_filter is not None: if not six.callable(details_filter): raise ValueError("Details filter must be callable") if not self.can_be_registered(event_type): raise ValueError("Disallowed event type '%s' can not have a" " callback registered" % event_type) if kwargs: for k in self.RESERVED_KEYS: if k in kwargs: raise KeyError("Reserved key '%s' not allowed in " "kwargs" % k) with self._lock: if self.is_registered(event_type, callback, details_filter=details_filter): raise ValueError("Event callback already registered with" " equivalent details filter") listener = Listener(_make_ref(callback, weak=weak), args=args, kwargs=kwargs, details_filter=details_filter, weak=weak) listeners = self._topics.setdefault(event_type, []) listeners.append(listener) return listener
def _update_last_update(obj, eng): if "_should_last_run_be_update" in obj.extra_data: if obj.extra_data["_should_last_run_be_update"]: repository_list_to_process = repository_list if not isinstance(repository_list_to_process, list): if callable(repository_list_to_process): while callable(repository_list_to_process): repository_list_to_process = repository_list_to_process( obj, eng) else: repository_list_to_process = [ repository_list_to_process] for repository in repository_list_to_process: update_lastrun(repository["id"])
def __init__(self, root, default_renderer='mako', template_path='templates', hooks=lambda: [], custom_renderers={}, extra_template_vars={}, force_canonical=True, guess_content_type_from_ext=True, context_local_factory=None, **kw): self.init_context_local(context_local_factory) if isinstance(root, six.string_types): root = self.__translate_root__(root) self.root = root self.renderers = RendererFactory(custom_renderers, extra_template_vars) self.default_renderer = default_renderer # pre-sort these so we don't have to do it per-request if six.callable(hooks): hooks = hooks() self.hooks = list(sorted( hooks, key=operator.attrgetter('priority') )) self.template_path = template_path self.force_canonical = force_canonical self.guess_content_type_from_ext = guess_content_type_from_ext
def url_encode(obj, charset="utf8", encode_keys=False): items = [] if isinstance(obj, dict): for k, v in list(obj.items()): items.append((k, v)) else: items = list(items) tmp = [] for k, v in items: if encode_keys: k = encode(k, charset) if not isinstance(v, (tuple, list)): v = [v] for v1 in v: if v1 is None: v1 = '' elif six.callable(v1): v1 = encode(v1(), charset) else: v1 = encode(v1, charset) tmp.append('%s=%s' % (quote(k), quote_plus(v1))) return '&'.join(tmp)
def power(*args): '''Returns the "power" composition of several functions. Examples:: >>> import operator >>> f = power(partial(operator.mul, 3), 3) >>> f(23) == 3*(3*(3*23)) True >>> power(operator.neg) Traceback (most recent call last): ... TypeError: Function `power` requires at least two arguments ''' try: funcs, times = args[:-1], args[-1] except IndexError: msg = "Function `power` requires at least two arguments" raise TypeError(msg) if not funcs: raise TypeError('Function `power` requires at least two arguments') if any(not callable(func) for func in funcs): raise TypeError('First arguments of `power` must be callables') if not isinstance(times, int): raise TypeError('Last argument of `power` must be int') if len(funcs) > 1: base = (compose(funcs), ) else: base = (funcs[0], ) return compose(*(base * times))
def __getattr__(self, name): attr = getattr(self.obj, name) if not six.callable(attr): return attr return functools.partial(self.proxy_callable, name)
def _func(request, *args, **kwargs): user = getattr(request, 'user', None) is_authenticated = getattr(user, 'is_authenticated', lambda: False) if ((user is not None and six.callable(is_authenticated) and not is_authenticated()) or user is None): user = None try: creds = args[:len(authentication_arguments)] if len(creds) == 0: raise IndexError # Django's authenticate() method takes arguments as dict user = _authenticate(username=creds[0], password=creds[1], *creds[2:]) if user is not None: args = args[len(authentication_arguments):] except IndexError: auth_kwargs = {} try: for auth_kwarg in authentication_arguments: auth_kwargs[auth_kwarg] = kwargs[auth_kwarg] except KeyError: raise InvalidParamsError( 'Authenticated methods require at least ' '[%s] or {%s} arguments', authentication_arguments) user = _authenticate(**auth_kwargs) if user is not None: for auth_kwarg in authentication_arguments: kwargs.pop(auth_kwarg) if user is None: raise InvalidCredentialsError request.user = user return func(request, *args, **kwargs)
def merge_graphs(graph, *graphs, **kwargs): """Merges a bunch of graphs into a new graph. If no additional graphs are provided the first graph is returned unmodified otherwise the merged graph is returned. """ tmp_graph = graph allow_overlaps = kwargs.get('allow_overlaps', False) overlap_detector = kwargs.get('overlap_detector') if overlap_detector is not None and not six.callable(overlap_detector): raise ValueError("Overlap detection callback expected to be callable") elif overlap_detector is None: overlap_detector = (lambda to_graph, from_graph: len(to_graph.subgraph(from_graph.nodes_iter()))) for g in graphs: # This should ensure that the nodes to be merged do not already exist # in the graph that is to be merged into. This could be problematic if # there are duplicates. if not allow_overlaps: # Attempt to induce a subgraph using the to be merged graphs nodes # and see if any graph results. overlaps = overlap_detector(graph, g) if overlaps: raise ValueError("Can not merge graph %s into %s since there " "are %s overlapping nodes (and we do not " "support merging nodes)" % (g, graph, overlaps)) graph = nx.algorithms.compose(graph, g) # Keep the first graphs name. if graphs: graph.name = tmp_graph.name return graph
def process_response(self, request, response): """Sets the cache, if needed.""" if ( not hasattr(request, '_cache_update_cache') or not request._cache_update_cache ): # We don't need to update the cache, just return. return response if request.method != 'GET': # This is a stronger requirement than above. It is needed # because of interactions between this middleware and the # HTTPMiddleware, which throws the body of a HEAD-request # away before this middleware gets a chance to cache it. return response if not response.status_code == 200: return response # Try to get the timeout from the "max-age" section of the "Cache- # Control" header before reverting to using the default cache_timeout # length. timeout = get_max_age(response) if timeout is None: timeout = self.cache_timeout elif timeout == 0: # max-age was set to 0, don't bother caching. return response if self.patch_headers: patch_response_headers(response, timeout) if timeout: if callable(self.key_prefix): key_prefix = self.key_prefix(request) else: key_prefix = self.key_prefix if self.post_process_response: response = self.post_process_response( response, request ) with RequestPath(request, self.only_get_keys, self.forget_get_keys): cache_key = learn_cache_key( request, response, timeout, key_prefix ) if self.remember_all_urls: self.remember_url(request, cache_key, timeout) self.cache.set(cache_key, response, timeout) if self.post_process_response_always: response = self.post_process_response_always( response, request ) return response
def handle_key(self, key, check_parent=True): '''Handle a key press in this window.''' # First try the first responder if this window has one, but don't allow # it to check with its parent (False second parameter) so we don't recurse # and get a stack overflow for first_responder in reversed(self.first_responders): if first_responder.handle_key(key, False): return True # Check our key map to see if we have any actions. Actions don't take # any arguments, they must be callable if key in self.key_actions: key_action = self.key_actions[key] key_action['callback']() return True # Check if there is a wildcard key for any key if -1 in self.key_actions: key_action = self.key_actions[-1] key_action['callback']() return True # Check if the window delegate wants to handle this key press if self.delegate: if six.callable(getattr(self.delegate, "handle_key", None)): if self.delegate.handle_key(self, key): return True if self.delegate(self, key): return True # Check if we have a parent window and if so, let the parent # window handle the key press if check_parent and self.parent: return self.parent.handle_key(key, True) else: return False # Key not handled
def convert(self, value, receiver, context, function_spec, engine, *convert_args, **convert_kwargs): if six.callable(value) and hasattr(value, '__unwrapped__'): value = value.__unwrapped__ def func(*args, **kwargs): function_context = self._find_function_context( function_spec, context) parent_function_context = function_context.parent if parent_function_context is None: raise exceptions.NoFunctionRegisteredException( function_spec.name) new_name = function_spec.name if self.with_name: new_name = args[0] args = args[1:] new_receiver = receiver if self.method is True: new_receiver = args[0] args = args[1:] elif self.method is False: new_receiver = utils.NO_VALUE if self.with_context: new_context = args[0] args = args[1:] else: new_context = context.create_child_context() return parent_function_context( new_name, engine, new_receiver, new_context)(*args, **kwargs) func.__unwrapped__ = value return func
def convert(self, value, receiver, context, function_spec, engine, *convert_args, **convert_kwargs): if six.callable(value) and hasattr(value, '__unwrapped__'): value = value.__unwrapped__ def func(*args, **kwargs): name = self.name if not name: name = args[0] args = args[1:] new_receiver = utils.NO_VALUE if self.method: new_receiver = args[0] args = args[1:] if self.with_context: new_context = args[0] args = args[1:] else: new_context = context.create_child_context() return new_context( name, engine, new_receiver, use_convention=self.use_convention)(*args, **kwargs) func.__unwrapped__ = value return func
def set_bandwidth(self, bw_method=None): """Compute the estimator bandwidth with given method. The new bandwidth calculated after a call to `set_bandwidth` is used for subsequent evaluations of the estimated density. Parameters ---------- bw_method : str, scalar or callable, optional The method used to calculate the estimator bandwidth. This can be 'scott', 'silverman', a scalar constant or a callable. If a scalar, this will be used directly as `kde.factor`. If a callable, it should take a `gaussian_kde` instance as only parameter and return a scalar. If None (default), nothing happens; the current `kde.covariance_factor` method is kept. Notes ----- .. versionadded:: 0.11 Examples -------- >>> x1 = np.array([-7, -5, 1, 4, 5.]) >>> kde = stats.gaussian_kde(x1) >>> xs = np.linspace(-10, 10, num=50) >>> y1 = kde(xs) >>> kde.set_bandwidth(bw_method='silverman') >>> y2 = kde(xs) >>> kde.set_bandwidth(bw_method=kde.factor / 3.) >>> y3 = kde(xs) >>> fig = plt.figure() >>> ax = fig.add_subplot(111) >>> ax.plot(x1, np.ones(x1.shape) / (4. * x1.size), 'bo', ... label='Data points (rescaled)') >>> ax.plot(xs, y1, label='Scott (default)') >>> ax.plot(xs, y2, label='Silverman') >>> ax.plot(xs, y3, label='Const (1/3 * Silverman)') >>> ax.legend() >>> plt.show() """ if bw_method is None: pass elif bw_method == 'scott': self.covariance_factor = self.scotts_factor elif bw_method == 'silverman': self.covariance_factor = self.silverman_factor elif np.isscalar(bw_method) and not isinstance(bw_method, string_types): self._bw_method = 'use constant' self.covariance_factor = lambda: bw_method elif callable(bw_method): self._bw_method = bw_method self.covariance_factor = lambda: self._bw_method(self) else: msg = "`bw_method` should be 'scott', 'silverman', a scalar " \ "or a callable." raise ValueError(msg) self._compute_covariance()
def convert(self, value, receiver, context, function_spec, engine, *convert_args, **convert_kwargs): super(Lambda, self).convert( value, receiver, context, function_spec, engine, *convert_args, **convert_kwargs) if value is None: return None elif six.callable(value) and hasattr(value, '__unwrapped__'): value = value.__unwrapped__ def func(*args, **kwargs): if self.method and self.with_context: new_receiver, new_context = args[:2] args = args[2:] elif self.method and not self.with_context: new_receiver, new_context = \ args[0], context.create_child_context() args = args[1:] elif not self.method and self.with_context: new_receiver, new_context = utils.NO_VALUE, args[0] args = args[1:] else: new_receiver, new_context = \ utils.NO_VALUE, context.create_child_context() return self._call(value, new_receiver, new_context, engine, args, kwargs) func.__unwrapped__ = value return func
def add_reaction(self, state, event, reaction, *args, **kwargs): """Adds a reaction that may get triggered by the given event & state. Reaction callbacks may (depending on how the state machine is ran) be used after an event is processed (and a transition occurs) to cause the machine to react to the newly arrived at stable state. These callbacks are expected to accept three default positional parameters (although more can be passed in via *args and **kwargs, these will automatically get provided to the callback when it is activated *ontop* of the three default). The three default parameters are the last stable state, the new stable state and the event that caused the transition to this new stable state to be arrived at. The expected result of a callback is expected to be a new event that the callback wants the state machine to react to. This new event may (depending on how the state machine is ran) get processed (and this process typically repeats) until the state machine reaches a terminal state. """ if self.frozen: raise excp.FrozenMachine() if state not in self._states: raise excp.NotFound("Can not add a reaction to event '%s' for an" " undefined state '%s'" % (event, state)) if not six.callable(reaction): raise ValueError("Reaction callback must be callable") if event not in self._states[state]['reactions']: self._states[state]['reactions'][event] = (reaction, args, kwargs) else: raise excp.Duplicate("State '%s' reaction to event '%s'" " already defined" % (state, event))
def valid_int(x, cast=None): '''Ensure that an input value is integer-typed. This is primarily useful for ensuring integrable-valued array indices. Parameters ---------- x : number A scalar value to be cast to int cast : function [optional] A function to modify `x` before casting. Default: `np.floor` Returns ------- x_int : int `x_int = int(cast(x))` Raises ------ TypeError If `cast` is provided and is not callable. ''' if cast is None: cast = np.floor if not six.callable(cast): raise TypeError('cast parameter must be callable.') return int(cast(x))
def default(self, obj): ''' Converts an object and returns a ``JSON``-friendly structure. :param obj: object or structure to be converted into a ``JSON``-ifiable structure Considers the following special cases in order: * object has a callable __json__() attribute defined returns the result of the call to __json__() * date and datetime objects returns the object cast to str * Decimal objects returns the object cast to float * SQLAlchemy objects returns a copy of the object.__dict__ with internal SQLAlchemy parameters removed * SQLAlchemy ResultProxy objects Casts the iterable ResultProxy into a list of tuples containing the entire resultset data, returns the list in a dictionary along with the resultset "row" count. .. note:: {'count': 5, 'rows': [('Ed Jones',), ('Pete Jones',), ('Wendy Williams',), ('Mary Contrary',), ('Fred Smith',)]} * SQLAlchemy RowProxy objects Casts the RowProxy cursor object into a dictionary, probably losing its ordered dictionary behavior in the process but making it JSON-friendly. * webob_dicts objects returns webob_dicts.mixed() dictionary, which is guaranteed to be JSON-friendly. ''' if hasattr(obj, '__json__') and six.callable(obj.__json__): return obj.__json__() elif isinstance(obj, (date, datetime)): return str(obj) elif isinstance(obj, Decimal): # XXX What to do about JSONEncoder crappy handling of Decimals? # SimpleJSON has better Decimal encoding than the std lib # but only in recent versions return float(obj) elif is_saobject(obj): props = {} for key in obj.__dict__: if not key.startswith('_sa_'): props[key] = getattr(obj, key) return props elif isinstance(obj, ResultProxy): props = dict(rows=list(obj), count=obj.rowcount) if props['count'] < 0: props['count'] = len(props['rows']) return props elif isinstance(obj, RowProxy): return dict(obj) elif isinstance(obj, webob_dicts): return obj.mixed() else: return JSONEncoder.default(self, obj)
def expectedFailure(expected_fn, bugnumber=None): def expectedFailure_impl(func): if isinstance(func, type) and issubclass(func, unittest2.TestCase): raise Exception("Decorator can only be used to decorate a test method") @wraps(func) def wrapper(*args, **kwargs): self = args[0] if funcutils.requires_self(expected_fn): xfail_reason = expected_fn(self) else: xfail_reason = expected_fn() if xfail_reason is not None: if configuration.results_formatter_object is not None: # Mark this test as expected to fail. configuration.results_formatter_object.handle_event( EventBuilder.event_for_mark_test_expected_failure(self)) xfail_func = unittest2.expectedFailure(func) xfail_func(*args, **kwargs) else: func(*args, **kwargs) return wrapper # Some decorators can be called both with no arguments (e.g. @expectedFailureWindows) # or with arguments (e.g. @expectedFailureWindows(compilers=['gcc'])). When called # the first way, the first argument will be the actual function because decorators are # weird like that. So this is basically a check that says "which syntax was the original # function decorated with?" if six.callable(bugnumber): return expectedFailure_impl(bugnumber) else: return expectedFailure_impl
def frame(est): """ Arguments: est: either an estimator class or an estimator object. The class (or class of the object) should subclass :py:class:`ibex.sklearn.base.BaseEstimator`. Returns: If ``est`` is a class, returns a class; if ``est`` is an object, returns an object. Note that the result will subclass ``est`` and :py:class:`ibex.FrameMixin` Example: >>> from sklearn import linear_model >>> from ibex import frame We can use ``frame`` to adapt an object: >>> prd = frame(linear_model.LinearRegression()) >>> prd Adapter[LinearRegression](copy_X=True, fit_intercept=True, n_jobs=1, normalize=False) We can use ``frame`` to adapt a class: >>> PDLinearRegression = frame(linear_model.LinearRegression) >>> PDLinearRegression() Adapter[LinearRegression](copy_X=True, fit_intercept=True, n_jobs=1, normalize=False) >>> PDLinearRegression(fit_intercept=False) Adapter[LinearRegression](copy_X=True, fit_intercept=False, n_jobs=1, normalize=False) """ from ._base import FrameMixin if isinstance(est, FrameMixin): return est # Tmp Ami - what about feature union? if isinstance(est, pipeline.Pipeline): return frame(pipeline.Pipeline)(ests=est.ests) if not inspect.isclass(est): params = est.get_params() f = frame(type(est))(**params) return f _Adapter = make_adapter(est) update_class_wrapper(_Adapter, est) _Adapter.__name__ = est.__name__ for name, func in vars(_Adapter).items(): if name.startswith('_'): continue parfunc = getattr(est, name, None) if parfunc and getattr(parfunc, '__doc__', None): func.__doc__ = parfunc.__doc__ wrapped = [ 'fit_transform', 'predict_proba', 'sample_y', 'score_samples', 'score', 'staged_predict_proba', 'apply', 'bic', 'perplexity', 'fit', 'decision_function', 'aic', 'partial_fit', 'predict', 'radius_neighbors', 'staged_decision_function', 'staged_predict', 'inverse_transform', 'fit_predict', 'kneighbors', 'predict_log_proba', 'transform', ] for wrap in wrapped: if not hasattr(est, wrap) and hasattr(_Adapter, wrap): delattr(_Adapter, wrap) elif six.callable(getattr(_Adapter, wrap)): try: update_method_wrapper(_Adapter, est, wrap) except AttributeError: pass return _Adapter
def istft(stft_matrix, hop_length=None, win_length=None, window=None, center=True, dtype=np.float32): """ Inverse short-time Fourier transform (ISTFT). Converts a complex-valued spectrogram `stft_matrix` to time-series `y` by minimizing the mean squared error between `stft_matrix` and STFT of `y` as described in [1]_. In general, window function, hop length and other parameters should be same as in stft, which mostly leads to perfect reconstruction of a signal from unmodified `stft_matrix`. .. [1] D. W. Griffin and J. S. Lim, "Signal estimation from modified short-time Fourier transform," IEEE Trans. ASSP, vol.32, no.2, pp.236–243, Apr. 1984. Parameters ---------- stft_matrix : np.ndarray [shape=(1 + n_fft/2, t)] STFT matrix from `stft` hop_length : int > 0 [scalar] Number of frames between STFT columns. If unspecified, defaults to `win_length / 4`. win_length : int <= n_fft = 2 * (stft_matrix.shape[0] - 1) When reconstructing the time series, each frame is windowed and each sample is normalized by the sum of squared window according to the `window` function (see below). If unspecified, defaults to `n_fft`. window : None, function, np.ndarray [shape=(n_fft,)] - None (default): use an asymmetric Hann window - a window function, such as `scipy.signal.hanning` - a user-specified window vector of length `n_fft` center : boolean - If `True`, `D` is assumed to have centered frames. - If `False`, `D` is assumed to have left-aligned frames. dtype : numeric type Real numeric type for `y`. Default is 32-bit float. Returns ------- y : np.ndarray [shape=(n,)] time domain signal reconstructed from `stft_matrix` Raises ------ ParameterError If `window` is supplied as a vector of length `n_fft` See Also -------- stft : Short-time Fourier Transform Examples -------- >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> D = librosa.stft(y) >>> y_hat = librosa.istft(D) >>> y_hat array([ -4.812e-06, -4.267e-06, ..., 6.271e-06, 2.827e-07], dtype=float32) """ n_fft = 2 * (stft_matrix.shape[0] - 1) # By default, use the entire frame if win_length is None: win_length = n_fft # Set the default hop, if it's not already specified if hop_length is None: hop_length = int(win_length / 4) if window is None: # Default is an asymmetric Hann window. ifft_window = scipy.signal.hann(win_length, sym=False) elif six.callable(window): # User supplied a windowing function ifft_window = window(win_length) else: # User supplied a window vector. # Make it into an array ifft_window = np.asarray(window) # Verify that the shape matches if ifft_window.size != n_fft: raise ParameterError('Size mismatch between n_fft and window size') # Pad out to match n_fft ifft_window = util.pad_center(ifft_window, n_fft) n_frames = stft_matrix.shape[1] expected_signal_len = n_fft + hop_length * (n_frames - 1) y = np.zeros(expected_signal_len, dtype=dtype) ifft_window_sum = np.zeros(expected_signal_len, dtype=dtype) ifft_window_square = ifft_window * ifft_window for i in range(n_frames): sample = i * hop_length spec = stft_matrix[:, i].flatten() spec = np.concatenate((spec.conj(), spec[-2:0:-1]), 0) ytmp = ifft_window * fft.ifft(spec).real y[sample:(sample + n_fft)] = y[sample:(sample + n_fft)] + ytmp ifft_window_sum[sample:(sample + n_fft)] += ifft_window_square # Normalize by sum of squared window approx_nonzero_indices = ifft_window_sum > util.SMALL_FLOAT y[approx_nonzero_indices] /= ifft_window_sum[approx_nonzero_indices] if center: y = y[int(n_fft // 2):-int(n_fft // 2)] return y
def logamplitude(S, ref_power=1.0, amin=1e-10, top_db=80.0): """Log-scale the amplitude of a spectrogram. Parameters ---------- S : np.ndarray [shape=(d, t)] input spectrogram ref_power : scalar or function If scalar, `log(abs(S))` is compared to `log(ref_power)`. If a function, `log(abs(S))` is compared to `log(ref_power(abs(S)))`. This is primarily useful for comparing to the maximum value of `S`. amin : float > 0[scalar] minimum amplitude threshold for `abs(S)` and `ref_power` top_db : float >= 0 [scalar] threshold log amplitude at top_db below the peak: ``max(log(S)) - top_db`` Returns ------- log_S : np.ndarray [shape=(d, t)] ``log_S ~= 10 * log10(S) - 10 * log10(abs(ref_power))`` See Also -------- perceptual_weighting Examples -------- Get a power spectrogram from a waveform ``y`` >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> S = np.abs(librosa.stft(y)) >>> librosa.logamplitude(S**2) array([[-33.293, -27.32 , ..., -33.293, -33.293], [-33.293, -25.723, ..., -33.293, -33.293], ..., [-33.293, -33.293, ..., -33.293, -33.293], [-33.293, -33.293, ..., -33.293, -33.293]], dtype=float32) Compute dB relative to peak power >>> librosa.logamplitude(S**2, ref_power=np.max) array([[-80. , -74.027, ..., -80. , -80. ], [-80. , -72.431, ..., -80. , -80. ], ..., [-80. , -80. , ..., -80. , -80. ], [-80. , -80. , ..., -80. , -80. ]], dtype=float32) Or compare to median power >>> librosa.logamplitude(S**2, ref_power=np.median) array([[-0.189, 5.784, ..., -0.189, -0.189], [-0.189, 7.381, ..., -0.189, -0.189], ..., [-0.189, -0.189, ..., -0.189, -0.189], [-0.189, -0.189, ..., -0.189, -0.189]], dtype=float32) And plot the results >>> import matplotlib.pyplot as plt >>> plt.figure() >>> plt.subplot(2, 1, 1) >>> librosa.display.specshow(S**2, sr=sr, y_axis='log', x_axis='time') >>> plt.colorbar() >>> plt.title('Power spectrogram') >>> plt.subplot(2, 1, 2) >>> librosa.display.specshow(librosa.logamplitude(S**2, ref_power=np.max), ... sr=sr, y_axis='log', x_axis='time') >>> plt.colorbar(format='%+2.0f dB') >>> plt.title('Log-Power spectrogram') >>> plt.tight_layout() """ if amin <= 0: raise ParameterError('amin must be strictly positive') magnitude = np.abs(S) if six.callable(ref_power): # User supplied a function to calculate reference power __ref = ref_power(magnitude) else: __ref = np.abs(ref_power) log_spec = 10.0 * np.log10(np.maximum(amin, magnitude)) log_spec -= 10.0 * np.log10(np.maximum(amin, __ref)) if top_db is not None: if top_db < 0: raise ParameterError('top_db must be non-negative positive') log_spec = np.maximum(log_spec, log_spec.max() - top_db) return log_spec
def sinc_window(num_zeros=64, precision=9, window=None, rolloff=0.945): '''Construct a windowed sinc interpolation filter Parameters ---------- num_zeros : int > 0 The number of zero-crossings to retain in the sinc filter precision : int > 0 The number of filter coefficients to retain for each zero-crossing window : callable The window function. By default, uses Blackman-Harris. rolloff : float > 0 The roll-off frequency (as a fraction of nyquist) Returns ------- interp_window: np.ndarray [shape=(num_zeros * num_table + 1)] The interpolation window (right-hand side) num_bits: int The number of bits of precision to use in the filter table rolloff : float > 0 The roll-off frequency of the filter, as a fraction of Nyquist Raises ------ TypeError if `window` is not callable or `None` ValueError if `num_zeros < 1`, `precision < 1`, or `rolloff` is outside the range `(0, 1]`. Examples -------- >>> # A filter with 10 zero-crossings, 32 samples per crossing, and a ... # Hann window for tapering. >>> halfwin, prec, rolloff = resampy.filters.sinc_window(num_zeros=10, precision=5, ... window=scipy.signal.hann) >>> halfwin array([ 9.450e-01, 9.436e-01, ..., -7.455e-07, -0.000e+00]) >>> prec 32 >>> rolloff 0.945 >>> # Or using sinc-window filter construction directly in resample >>> y = resampy.resample(x, sr_orig, sr_new, filter='sinc_window', ... num_zeros=10, precision=5, ... window=scipy.signal.hann) ''' if window is None: window = _blackman_harris_window elif not six.callable(window): raise TypeError('window must be callable, not type(window)={}'.format( type(window))) if not 0 < rolloff <= 1: raise ValueError('Invalid roll-off: rolloff={}'.format(rolloff)) if num_zeros < 1: raise ValueError('Invalid num_zeros: num_zeros={}'.format(num_zeros)) if precision < 0: raise ValueError('Invalid precision: precision={}'.format(precision)) # Generate the right-wing of the sinc num_bits = 2**precision n = num_bits * num_zeros sinc_win = rolloff * np.sinc( rolloff * np.linspace(0, num_zeros, num=n + 1, endpoint=True)) # Build the window function and cut off the left half taper = window(2 * n + 1)[n:] interp_win = (taper * sinc_win) return interp_win, num_bits, rolloff
def get_simple_name(component): if six.callable(component): return component.__name__ return str(component)
def default(self, obj): # pylint: disable=method-hidden if hasattr(obj, '__json__') and six.callable(obj.__json__): return obj.__json__() else: return JSONEncoder.default(self, obj)
def on(self, name, callback): assert six.callable(callback), 'callback is not callable.' if callback in self.__listeners[name]: raise DuplicateListenerError() self.__listeners[name].append(callback)
def get_name(component): if six.callable(component): name = getattr(component, "__qualname__", component.__name__) return '.'.join([component.__module__, name]) return str(component)
def fn(self): skip_for_os = _match_decorator_property( lldbplatform.translate(oslist), self.getPlatform()) skip_for_hostos = _match_decorator_property( lldbplatform.translate(hostoslist), lldbplatformutil.getHostPlatform()) skip_for_compiler = _match_decorator_property( compiler, self.getCompiler()) and self.expectedCompilerVersion(compiler_version) skip_for_arch = _match_decorator_property( archs, self.getArchitecture()) skip_for_debug_info = _match_decorator_property( debug_info, self.getDebugInfo()) skip_for_triple = _match_decorator_property( triple, lldb.DBG.GetSelectedPlatform().GetTriple()) skip_for_remote = _match_decorator_property( remote, lldb.remote_platform is not None) skip_for_swig_version = ( swig_version is None) or ( not hasattr( lldb, 'swig_version')) or ( _check_expected_version( swig_version[0], swig_version[1], lldb.swig_version)) skip_for_py_version = ( py_version is None) or _check_expected_version( py_version[0], py_version[1], sys.version_info) skip_for_macos_version = (macos_version is None) or ( _check_expected_version( macos_version[0], macos_version[1], platform.mac_ver()[0])) # For the test to be skipped, all specified (e.g. not None) parameters must be True. # An unspecified parameter means "any", so those are marked skip by default. And we skip # the final test if all conditions are True. conditions = [(oslist, skip_for_os, "target o/s"), (hostoslist, skip_for_hostos, "host o/s"), (compiler, skip_for_compiler, "compiler or version"), (archs, skip_for_arch, "architecture"), (debug_info, skip_for_debug_info, "debug info format"), (triple, skip_for_triple, "target triple"), (swig_version, skip_for_swig_version, "swig version"), (py_version, skip_for_py_version, "python version"), (macos_version, skip_for_macos_version, "macOS version"), (remote, skip_for_remote, "platform locality (remote/local)")] reasons = [] final_skip_result = True for this_condition in conditions: final_skip_result = final_skip_result and this_condition[1] if this_condition[0] is not None and this_condition[1]: reasons.append(this_condition[2]) reason_str = None if final_skip_result: mode_str = { DecorateMode.Skip: "skipping", DecorateMode.Xfail: "xfailing"}[mode] if len(reasons) > 0: reason_str = ",".join(reasons) reason_str = "{} due to the following parameter(s): {}".format( mode_str, reason_str) else: reason_str = "{} unconditionally" if bugnumber is not None and not six.callable(bugnumber): reason_str = reason_str + " [" + str(bugnumber) + "]" return reason_str
def _log_info(obj, eng): message_buffer = message while callable(message_buffer): message_buffer = message_buffer(obj, eng) eng.log.info(message_buffer)
def ifgram(y, sr=22050, n_fft=2048, hop_length=None, win_length=None, window='hann', norm=False, center=True, ref_power=1e-6, clip=True, dtype=np.complex64, pad_mode='reflect'): '''Compute the instantaneous frequency (as a proportion of the sampling rate) obtained as the time-derivative of the phase of the complex spectrum as described by [1]_. Calculates regular STFT as a side effect. .. [1] Abe, Toshihiko, Takao Kobayashi, and Satoshi Imai. "Harmonics tracking and pitch extraction based on instantaneous frequency." International Conference on Acoustics, Speech, and Signal Processing, ICASSP-95., Vol. 1. IEEE, 1995. Parameters ---------- y : np.ndarray [shape=(n,)] audio time series sr : number > 0 [scalar] sampling rate of `y` n_fft : int > 0 [scalar] FFT window size hop_length : int > 0 [scalar] hop length, number samples between subsequent frames. If not supplied, defaults to `win_length / 4`. win_length : int > 0, <= n_fft Window length. Defaults to `n_fft`. See `stft` for details. window : string, tuple, number, function, or np.ndarray [shape=(n_fft,)] - a window specification (string, tuple, number); see `scipy.signal.get_window` - a window function, such as `scipy.signal.hanning` - a user-specified window vector of length `n_fft` See `stft` for details. .. see also:: `filters.get_window` norm : bool Normalize the STFT. center : boolean - If `True`, the signal `y` is padded so that frame `D[:, t]` (and `if_gram`) is centered at `y[t * hop_length]`. - If `False`, then `D[:, t]` at `y[t * hop_length]` ref_power : float >= 0 or callable Minimum power threshold for estimating instantaneous frequency. Any bin with `np.abs(D[f, t])**2 < ref_power` will receive the default frequency estimate. If callable, the threshold is set to `ref_power(np.abs(D)**2)`. clip : boolean - If `True`, clip estimated frequencies to the range `[0, 0.5 * sr]`. - If `False`, estimated frequencies can be negative or exceed `0.5 * sr`. dtype : numeric type Complex numeric type for `D`. Default is 64-bit complex. pad_mode : string If `center=True`, the padding mode to use at the edges of the signal. By default, STFT uses reflection padding. Returns ------- if_gram : np.ndarray [shape=(1 + n_fft/2, t), dtype=real] Instantaneous frequency spectrogram: `if_gram[f, t]` is the frequency at bin `f`, time `t` D : np.ndarray [shape=(1 + n_fft/2, t), dtype=complex] Short-time Fourier transform See Also -------- stft : Short-time Fourier Transform Examples -------- >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> frequencies, D = librosa.ifgram(y, sr=sr) >>> frequencies array([[ 0.000e+00, 0.000e+00, ..., 0.000e+00, 0.000e+00], [ 3.150e+01, 3.070e+01, ..., 1.077e+01, 1.077e+01], ..., [ 1.101e+04, 1.101e+04, ..., 1.101e+04, 1.101e+04], [ 1.102e+04, 1.102e+04, ..., 1.102e+04, 1.102e+04]]) ''' if win_length is None: win_length = n_fft if hop_length is None: hop_length = int(win_length // 4) # Construct a padded hann window fft_window = util.pad_center(get_window(window, win_length, fftbins=True), n_fft) # Window for discrete differentiation freq_angular = np.linspace(0, 2 * np.pi, n_fft, endpoint=False) d_window = np.sin(-freq_angular) * np.pi / n_fft stft_matrix = stft(y, n_fft=n_fft, hop_length=hop_length, win_length=win_length, window=window, center=center, dtype=dtype, pad_mode=pad_mode) diff_stft = stft(y, n_fft=n_fft, hop_length=hop_length, window=d_window, center=center, dtype=dtype, pad_mode=pad_mode).conj() # Compute power normalization. Suppress zeros. mag, phase = magphase(stft_matrix) if six.callable(ref_power): ref_power = ref_power(mag**2) elif ref_power < 0: raise ParameterError('ref_power must be non-negative or callable.') # Pylint does not correctly infer the type here, but it's correct. # pylint: disable=maybe-no-member freq_angular = freq_angular.reshape((-1, 1)) bin_offset = (-phase * diff_stft).imag / mag bin_offset[mag < ref_power**0.5] = 0 if_gram = freq_angular[:n_fft // 2 + 1] + bin_offset if norm: stft_matrix = stft_matrix * 2.0 / fft_window.sum() if clip: np.clip(if_gram, 0, np.pi, out=if_gram) if_gram *= float(sr) * 0.5 / np.pi return if_gram, stft_matrix
def istft(stft_matrix, hop_length=None, win_length=None, window=None, center=True, dtype=np.float32): """ Inverse short-time Fourier transform. Converts a complex-valued spectrogram `stft_matrix` to time-series `y`. Parameters ---------- stft_matrix : np.ndarray [shape=(1 + n_fft/2, t)] STFT matrix from `stft` hop_length : int > 0 [scalar] Number of frames between STFT columns. If unspecified, defaults to `win_length / 4`. win_length : int <= n_fft = 2 * (stft_matrix.shape[0] - 1) When reconstructing the time series, each frame is windowed according to the `window` function (see below). If unspecified, defaults to `n_fft`. window : None, function, np.ndarray [shape=(n_fft,)] - None (default): use an asymmetric Hann window * 2/3 - a window function, such as `scipy.signal.hanning` - a user-specified window vector of length `n_fft` center : boolean - If `True`, `D` is assumed to have centered frames. - If `False`, `D` is assumed to have left-aligned frames. dtype : numeric type Real numeric type for `y`. Default is 32-bit float. Returns ------- y : np.ndarray [shape=(n,)] time domain signal reconstructed from `stft_matrix` Raises ------ ParameterError If `window` is supplied as a vector of length `n_fft` See Also -------- stft : Short-time Fourier Transform Examples -------- >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> D = librosa.stft(y) >>> y_hat = librosa.istft(D) >>> y_hat array([ -4.812e-06, -4.267e-06, ..., 6.271e-06, 2.827e-07], dtype=float32) """ n_fft = 2 * (stft_matrix.shape[0] - 1) # By default, use the entire frame if win_length is None: win_length = n_fft # Set the default hop, if it's not already specified if hop_length is None: hop_length = int(win_length / 4) if window is None: # Default is an asymmetric Hann window. # 2/3 scaling is to make stft(istft(.)) identity for 25% hop ifft_window = scipy.signal.hann(win_length, sym=False) * (2.0 / 3) elif six.callable(window): # User supplied a windowing function ifft_window = window(win_length) else: # User supplied a window vector. # Make it into an array ifft_window = np.asarray(window) # Verify that the shape matches if ifft_window.size != n_fft: raise ParameterError('Size mismatch between n_fft and window size') # Pad out to match n_fft ifft_window = pad_center(ifft_window, n_fft) n_frames = stft_matrix.shape[1] y = np.zeros(n_fft + hop_length * (n_frames - 1), dtype=dtype) for i in range(n_frames): sample = i * hop_length spec = stft_matrix[:, i].flatten() spec = np.concatenate((spec.conj(), spec[-2:0:-1]), 0) ytmp = ifft_window * fft.ifft(spec).real y[sample:(sample + n_fft)] = y[sample:(sample + n_fft)] + ytmp if center: y = y[int(n_fft // 2):-int(n_fft // 2)] return y
def _actionformat(self, action): if six.callable(action): return self.sublist_template.format(action.__name__, "function") else: return self.sublist_template.format(action, "command")
def stft(y, n_fft=2048, hop_length=None, win_length=None, window=None, center=True, dtype=np.complex64): """Short-time Fourier transform (STFT) Returns a complex-valued matrix D such that `np.abs(D[f, t])` is the magnitude of frequency bin `f` at frame `t` `np.angle(D[f, t])` is the phase of frequency bin `f` at frame `t` Parameters ---------- y : np.ndarray [shape=(n,)], real-valued the input signal (audio time series) n_fft : int > 0 [scalar] FFT window size hop_length : int > 0 [scalar] number audio of frames between STFT columns. If unspecified, defaults `win_length / 4`. win_length : int <= n_fft [scalar] Each frame of audio is windowed by `window()`. The window will be of length `win_length` and then padded with zeros to match `n_fft`. If unspecified, defaults to ``win_length = n_fft``. window : None, function, np.ndarray [shape=(n_fft,)] - None (default): use an asymmetric Hann window - a window function, such as `scipy.signal.hanning` - a vector or array of length `n_fft` center : boolean - If `True`, the signal `y` is padded so that frame `D[:, t]` is centered at `y[t * hop_length]`. - If `False`, then `D[:, t]` begins at `y[t * hop_length]` dtype : numeric type Complex numeric type for `D`. Default is 64-bit complex. Returns ------- D : np.ndarray [shape=(1 + n_fft/2, t), dtype=dtype] STFT matrix Raises ------ ParameterError If `window` is supplied as a vector of length `n_fft`. See Also -------- istft : Inverse STFT ifgram : Instantaneous frequency spectrogram Examples -------- >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> D = librosa.stft(y) >>> D array([[ 2.576e-03 -0.000e+00j, 4.327e-02 -0.000e+00j, ..., 3.189e-04 -0.000e+00j, -5.961e-06 -0.000e+00j], [ 2.441e-03 +2.884e-19j, 5.145e-02 -5.076e-03j, ..., -3.885e-04 -7.253e-05j, 7.334e-05 +3.868e-04j], ..., [ -7.120e-06 -1.029e-19j, -1.951e-09 -3.568e-06j, ..., -4.912e-07 -1.487e-07j, 4.438e-06 -1.448e-05j], [ 7.136e-06 -0.000e+00j, 3.561e-06 -0.000e+00j, ..., -5.144e-07 -0.000e+00j, -1.514e-05 -0.000e+00j]], dtype=complex64) Use left-aligned frames, instead of centered frames >>> D_left = librosa.stft(y, center=False) Use a shorter hop length >>> D_short = librosa.stft(y, hop_length=64) Display a spectrogram >>> import matplotlib.pyplot as plt >>> librosa.display.specshow(librosa.logamplitude(np.abs(D)**2, ... ref_power=np.max), ... y_axis='log', x_axis='time') >>> plt.title('Power spectrogram') >>> plt.colorbar(format='%+2.0f dB') >>> plt.tight_layout() """ # By default, use the entire frame if win_length is None: win_length = n_fft # Set the default hop, if it's not already specified if hop_length is None: hop_length = int(win_length / 4) if window is None: # Default is an asymmetric Hann window fft_window = scipy.signal.hann(win_length, sym=False) elif six.callable(window): # User supplied a window function fft_window = window(win_length) else: # User supplied a window vector. # Make sure it's an array: fft_window = np.asarray(window) # validate length compatibility if fft_window.size != n_fft: raise ParameterError('Size mismatch between n_fft and len(window)') # Pad the window out to n_fft size fft_window = pad_center(fft_window, n_fft) # Reshape so that the window can be broadcast fft_window = fft_window.reshape((-1, 1)) # Pad the time series so that frames are centered if center: valid_audio(y) y = np.pad(y, int(n_fft // 2), mode='reflect') # Window the time series. y_frames = frame(y, frame_length=n_fft, hop_length=hop_length) # Pre-allocate the STFT matrix stft_matrix = np.empty((int(1 + n_fft // 2), y_frames.shape[1]), dtype=dtype, order='F') # how many columns can we fit within MAX_MEM_BLOCK? n_columns = int(MAX_MEM_BLOCK / (stft_matrix.shape[0] * stft_matrix.itemsize)) for bl_s in range(0, stft_matrix.shape[1], n_columns): bl_t = min(bl_s + n_columns, stft_matrix.shape[1]) # RFFT and Conjugate here to match phase from DPWE code stft_matrix[:, bl_s:bl_t] = fft.fft(fft_window * y_frames[:, bl_s:bl_t], axis=0)[:stft_matrix.shape[0]].conj() return stft_matrix
def istft_noDiv(stft_matrix, hop_length=None, win_length=None, window=None, center=True, dtype=np.float32): """ #Copied from librosa's spectrum.py file, removing division by squared window, which shouldn't be necessary and can cause problems in recon. Inverse short-time Fourier transform (ISTFT). Converts a complex-valued spectrogram `stft_matrix` to time-series `y` by minimizing the mean squared error between `stft_matrix` and STFT of `y` as described in [1]_. In general, window function, hop length and other parameters should be same as in stft, which mostly leads to perfect reconstruction of a signal from unmodified `stft_matrix`. Parameters ---------- stft_matrix : np.ndarray [shape=(1 + n_fft/2, t)] STFT matrix from `stft` hop_length : int > 0 [scalar] Number of frames between STFT columns. If unspecified, defaults to `win_length / 4`. win_length : int <= n_fft = 2 * (stft_matrix.shape[0] - 1) When reconstructing the time series, each frame is windowed and each sample is normalized by the sum of squared window according to the `window` function (see below). If unspecified, defaults to `n_fft`. window : None, function, np.ndarray [shape=(n_fft,)] - None (default): use an asymmetric Hann window - a window function, such as `scipy.signal.hanning` - a user-specified window vector of length `n_fft` center : boolean - If `True`, `D` is assumed to have centered frames. - If `False`, `D` is assumed to have left-aligned frames. dtype : numeric type Real numeric type for `y`. Default is 32-bit float. Returns ------- y : np.ndarray [shape=(n,)] time domain signal reconstructed from `stft_matrix` Raises ------ ParameterError If `window` is supplied as a vector of length `n_fft` See Also -------- stft : Short-time Fourier Transform Examples -------- >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> D = librosa.stft(y) >>> y_hat = librosa.istft(D) >>> y_hat array([ -4.812e-06, -4.267e-06, ..., 6.271e-06, 2.827e-07], dtype=float32) Exactly preserving length of the input signal requires explicit padding. Otherwise, a partial frame at the end of `y` will not be represented. >>> n = len(y) >>> n_fft = 2048 >>> y_pad = librosa.util.fix_length(y, n + n_fft // 2) >>> D = librosa.stft(y_pad, n_fft=n_fft) >>> y_out = librosa.util.fix_length(librosa.istft(D), n) >>> np.max(np.abs(y - y_out)) 1.4901161e-07 """ n_fft = 2 * (stft_matrix.shape[0] - 1) # By default, use the entire frame if win_length is None: win_length = n_fft # Set the default hop, if it's not already specified if hop_length is None: hop_length = int(win_length / 4) if window is None: # Default is an asymmetric Hann window. ifft_window = scipy.signal.hann(win_length, sym=False) elif six.callable(window): # User supplied a windowing function ifft_window = window(win_length) else: # User supplied a window vector. # Make it into an array ifft_window = np.asarray(window) # Verify that the shape matches if ifft_window.size != n_fft: raise ParameterError('Size mismatch between n_fft and window size') # Pad out to match n_fft ifft_window = util.pad_center(ifft_window, n_fft) # scale the window ifft_window = ifft_window * (2.0 / (win_length / hop_length)) n_frames = stft_matrix.shape[1] expected_signal_len = n_fft + hop_length * (n_frames - 1) y = np.zeros(expected_signal_len, dtype=dtype) ifft_window_sum = np.zeros(expected_signal_len, dtype=dtype) ifft_window_square = ifft_window * ifft_window for i in range(n_frames): sample = i * hop_length spec = stft_matrix[:, i].flatten() spec = np.concatenate((spec.conj(), spec[-2:0:-1]), 0) ytmp = ifft_window * fft.ifft(spec).real y[sample:(sample + n_fft)] = y[sample:(sample + n_fft)] + ytmp # shouldn't need to do this sum of the squared window: #ifft_window_sum[sample:(sample + n_fft)] += ifft_window_square # don't do this: ## Normalize by sum of squared window #approx_nonzero_indices = ifft_window_sum > util.SMALL_FLOAT #y[approx_nonzero_indices] /= ifft_window_sum[approx_nonzero_indices] if center: y = y[int(n_fft // 2):-int(n_fft // 2)] return y
def skip_if_callable(test, mycallable, reason): if six.callable(mycallable): if mycallable(test): test.skipTest(reason) return True return False
def zero_crossings(y, threshold=1e-10, ref_magnitude=None, pad=True, zero_pos=True, axis=-1): '''Find the zero-crossings of a signal `y`: indices `i` such that `sign(y[i]) != sign(y[j])`. If `y` is multi-dimensional, then zero-crossings are computed along the specified `axis`. Parameters ---------- y : np.ndarray The input array threshold : float > 0 or None If specified, values where `-threshold <= y <= threshold` are clipped to 0. ref_magnitude : float > 0 or callable If numeric, the threshold is scaled relative to `ref_magnitude`. If callable, the threshold is scaled relative to `ref_magnitude(np.abs(y))`. pad : boolean If `True`, then `y[0]` is considered a valid zero-crossing. zero_pos : boolean If `True` then the value 0 is interpreted as having positive sign. If `False`, then 0, -1, and +1 all have distinct signs. axis : int Axis along which to compute zero-crossings. Returns ------- zero_crossings : np.ndarray [shape=y.shape, dtype=boolean] Indicator array of zero-crossings in `y` along the selected axis. Notes ----- This function caches at level 20. Examples -------- >>> # Generate a time-series >>> y = np.sin(np.linspace(0, 4 * 2 * np.pi, 20)) >>> y array([ 0.000e+00, 9.694e-01, 4.759e-01, -7.357e-01, -8.372e-01, 3.247e-01, 9.966e-01, 1.646e-01, -9.158e-01, -6.142e-01, 6.142e-01, 9.158e-01, -1.646e-01, -9.966e-01, -3.247e-01, 8.372e-01, 7.357e-01, -4.759e-01, -9.694e-01, -9.797e-16]) >>> # Compute zero-crossings >>> z = librosa.zero_crossings(y) >>> z array([ True, False, False, True, False, True, False, False, True, False, True, False, True, False, False, True, False, True, False, True], dtype=bool) >>> # Stack y against the zero-crossing indicator >>> np.vstack([y, z]).T array([[ 0.000e+00, 1.000e+00], [ 9.694e-01, 0.000e+00], [ 4.759e-01, 0.000e+00], [ -7.357e-01, 1.000e+00], [ -8.372e-01, 0.000e+00], [ 3.247e-01, 1.000e+00], [ 9.966e-01, 0.000e+00], [ 1.646e-01, 0.000e+00], [ -9.158e-01, 1.000e+00], [ -6.142e-01, 0.000e+00], [ 6.142e-01, 1.000e+00], [ 9.158e-01, 0.000e+00], [ -1.646e-01, 1.000e+00], [ -9.966e-01, 0.000e+00], [ -3.247e-01, 0.000e+00], [ 8.372e-01, 1.000e+00], [ 7.357e-01, 0.000e+00], [ -4.759e-01, 1.000e+00], [ -9.694e-01, 0.000e+00], [ -9.797e-16, 1.000e+00]]) >>> # Find the indices of zero-crossings >>> np.nonzero(z) (array([ 0, 3, 5, 8, 10, 12, 15, 17, 19]),) ''' # Clip within the threshold if threshold is None: threshold = 0.0 if six.callable(ref_magnitude): threshold = threshold * ref_magnitude(np.abs(y)) elif ref_magnitude is not None: threshold = threshold * ref_magnitude if threshold > 0: y = y.copy() y[np.abs(y) <= threshold] = 0 # Extract the sign bit if zero_pos: y_sign = np.signbit(y) else: y_sign = np.sign(y) # Find the change-points by slicing slice_pre = [slice(None)] * y.ndim slice_pre[axis] = slice(1, None) slice_post = [slice(None)] * y.ndim slice_post[axis] = slice(-1) # Since we've offset the input by one, pad back onto the front padding = [(0, 0)] * y.ndim padding[axis] = (1, 0) return np.pad((y_sign[tuple(slice_post)] != y_sign[tuple(slice_pre)]), padding, mode='constant', constant_values=pad)
def _crawl(self, url, **kwargs): """ real crawl API checking kwargs, and repack them to each sub-dict """ task = {} if kwargs.get('callback'): callback = kwargs['callback'] if isinstance(callback, six.string_types) and hasattr( self, callback): func = getattr(self, callback) elif six.callable( callback) and six.get_method_self(callback) is self: func = callback kwargs['callback'] = func.__name__ else: raise NotImplementedError("self.%s() not implemented!" % callback) if hasattr(func, '_config'): for k, v in iteritems(func._config): kwargs.setdefault(k, v) for k, v in iteritems(self.crawl_config): kwargs.setdefault(k, v) url = quote_chinese(_build_url(url.strip(), kwargs.get('params'))) if kwargs.get('files'): assert isinstance( kwargs.get('data', {}), dict), "data must be a dict when using with files!" content_type, data = _encode_multipart_formdata( kwargs.get('data', {}), kwargs.get('files', {})) kwargs.setdefault('headers', {}) kwargs['headers']['Content-Type'] = content_type kwargs['data'] = data if kwargs.get('data'): kwargs['data'] = _encode_params(kwargs['data']) if kwargs.get('data'): kwargs.setdefault('method', 'POST') schedule = {} for key in ('priority', 'retries', 'exetime', 'age', 'itag', 'force_update'): if key in kwargs and kwargs[key] is not None: schedule[key] = kwargs[key] task['schedule'] = schedule fetch = {} for key in ('method', 'headers', 'data', 'timeout', 'allow_redirects', 'cookies', 'proxy', 'etag', 'last_modifed', 'save', 'js_run_at', 'js_script', 'load_images', 'fetch_type'): if key in kwargs and kwargs[key] is not None: fetch[key] = kwargs[key] task['fetch'] = fetch process = {} for key in ('callback', ): if key in kwargs and kwargs[key] is not None: process[key] = kwargs[key] task['process'] = process task['project'] = self.project_name task['url'] = url task['taskid'] = task.get('taskid') or self.get_taskid(task) cache_key = "%(project)s:%(taskid)s" % task if cache_key not in self._follows_keys: self._follows_keys.add(cache_key) self._follows.append(task) return task
def combine_pipeline(source, pipeline, debugger=None): """Build the "pipeline" generator. Optionally a debugging object may be passed in. >>> def step(iterator): ... for entry in iterator: ... yield entry >>> def remover(iterator): ... for count, entry in enumerate(iterator): ... if count % 2 == 0: ... yield entry >>> class Identity(PipelineStep): ... def process(self, iterator): ... for entry in iterator: ... yield entry >>> assert hasattr(Identity, '__call__') It's possible for iterator-factories to return lists of iterators. These will then be flattened into the pipeline in the order of occurrence. >>> pipeline = [step, [remover, Identity()], step] Example: We construct 1000 elements and pass it through our pipeline. As the remover removes every second element we can expect to have only half as many elements out than we sent in. >>> gen = combine_pipeline(range(1000), pipeline) >>> assert len(list(gen)) == 500 The `source` can also be a callable. If it is, `combine_pipeline` will also return a callable, which - when called - will pass the supplied arguments on to the source callable. >>> gen = combine_pipeline(range, pipeline) >>> assert len(list(gen(1000))) == 500 """ def identity(x): return x if debugger is not None: head = debugger.head tail = debugger.tail track = debugger.track report = debugger.report else: head = tail = track = report = identity gen = source if _six.callable(gen): # Source is (hopefully) an iterator-factory, so we need # to convert all our steps into iterator-factories # as well, to enable passing on the arguments passed # to our collapsed pipeline. defer = defer_call else: defer = identity track = defer(track) head = defer(head) tail = defer(tail) report = defer(report) gen = head(gen) for step in recursive_flatten(pipeline): gen = defer(step)(track(gen)) return report(tail(track(gen)))
def contains(self, mouseevent): """ Test whether the mouse event occurred on the line. The pick radius determines the precision of the location test (usually within five points of the value). Use :meth:`~matplotlib.lines.Line2D.get_pickradius` or :meth:`~matplotlib.lines.Line2D.set_pickradius` to view or modify it. Returns *True* if any values are within the radius along with ``{'ind': pointlist}``, where *pointlist* is the set of points within the radius. TODO: sort returned indices by distance """ if six.callable(self._contains): return self._contains(self, mouseevent) if not is_numlike(self.pickradius): raise ValueError("pick radius should be a distance") # Make sure we have data to plot if self._invalidy or self._invalidx: self.recache() if len(self._xy) == 0: return False, {} # Convert points to pixels transformed_path = self._get_transformed_path() path, affine = transformed_path.get_transformed_path_and_affine() path = affine.transform_path(path) xy = path.vertices xt = xy[:, 0] yt = xy[:, 1] # Convert pick radius from points to pixels if self.figure is None: warnings.warn('no figure set when check if mouse is on line') pixels = self.pickradius else: pixels = self.figure.dpi / 72. * self.pickradius # the math involved in checking for containment (here and inside of # segment_hits) assumes that it is OK to overflow. In case the # application has set the error flags such that an exception is raised # on overflow, we temporarily set the appropriate error flags here and # set them back when we are finished. olderrflags = np.seterr(all='ignore') try: # Check for collision if self._linestyle in ['None', None]: # If no line, return the nearby point(s) d = (xt - mouseevent.x)**2 + (yt - mouseevent.y)**2 ind, = np.nonzero(np.less_equal(d, pixels**2)) else: # If line, return the nearby segment(s) ind = segment_hits(mouseevent.x, mouseevent.y, xt, yt, pixels) finally: np.seterr(**olderrflags) ind += self.ind_offset # Debugging message if False and self._label != '': print("Checking line", self._label, "at", mouseevent.x, mouseevent.y) print('xt', xt) print('yt', yt) #print 'dx,dy', (xt-mouseevent.x)**2., (yt-mouseevent.y)**2. print('ind', ind) # Return the point(s) within radius return len(ind) > 0, dict(ind=ind)
def _crawl(self, url, **kwargs): """ real crawl API checking kwargs, and repack them to each sub-dict """ task = {} assert len(url) < 1024, "Maximum (1024) URL length error." if kwargs.get('callback'): callback = kwargs['callback'] if isinstance(callback, six.string_types) and hasattr( self, callback): func = getattr(self, callback) elif six.callable( callback) and six.get_method_self(callback) is self: func = callback kwargs['callback'] = func.__name__ else: raise NotImplementedError("self.%s() not implemented!" % callback) if hasattr(func, '_config'): for k, v in iteritems(func._config): if isinstance(v, dict) and isinstance(kwargs.get(k), dict): kwargs[k].update(v) else: kwargs.setdefault(k, v) url = quote_chinese(_build_url(url.strip(), kwargs.pop('params', None))) if kwargs.get('files'): assert isinstance( kwargs.get('data', {}), dict), "data must be a dict when using with files!" content_type, data = _encode_multipart_formdata( kwargs.pop('data', {}), kwargs.pop('files', {})) kwargs.setdefault('headers', {}) kwargs['headers']['Content-Type'] = content_type kwargs['data'] = data if kwargs.get('data'): kwargs['data'] = _encode_params(kwargs['data']) if kwargs.get('data'): kwargs.setdefault('method', 'POST') if kwargs.get('user_agent'): kwargs.setdefault('headers', {}) kwargs['headers']['User-Agent'] = kwargs.get('user_agent') schedule = {} for key in self.schedule_fields: if key in kwargs: schedule[key] = kwargs.pop(key) elif key in self.crawl_config: schedule[key] = self.crawl_config[key] task['schedule'] = schedule fetch = {} for key in self.fetch_fields: if key in kwargs: fetch[key] = kwargs.pop(key) task['fetch'] = fetch process = {} for key in self.process_fields: if key in kwargs: process[key] = kwargs.pop(key) task['process'] = process task['project'] = self.project_name task['url'] = url if 'taskid' in kwargs: task['taskid'] = kwargs.pop('taskid') else: task['taskid'] = self.get_taskid(task) if kwargs: raise TypeError('crawl() got unexpected keyword argument: %s' % kwargs.keys()) if self.is_debugger(): task = self.task_join_crawl_config(task, self.crawl_config) cache_key = "%(project)s:%(taskid)s" % task if cache_key not in self._follows_keys: self._follows_keys.add(cache_key) self._follows.append(task) return task
def wraps(callable_thing): if isinstance(callable_thing, _types.FunctionType): return _functools.wraps(callable_thing) else: assert _six.callable(callable_thing) return identity_step
def replica_device_setter(ps_tasks=0, ps_device="/job:ps", worker_device="/job:worker", merge_devices=True, cluster=None, ps_ops=None, ps_strategy=None): """Return a `device function` to use when building a Graph for replicas. Device Functions are used in `with tf.device(device_function):` statement to automatically assign devices to `Operation` objects as they are constructed, Device constraints are added from the inner-most context first, working outwards. The merging behavior adds constraints to fields that are yet unset by a more inner context. Currently the fields are (job, task, cpu/gpu). If `cluster` is `None`, and `ps_tasks` is 0, the returned function is a no-op. Otherwise, the value of `ps_tasks` is derived from `cluster`. By default, only Variable ops are placed on ps tasks, and the placement strategy is round-robin over all ps tasks. A custom `ps_strategy` may be used to do more intelligent placement, such as `tf.contrib.training.GreedyLoadBalancingStrategy`. For example, ```python # To build a cluster with two ps jobs on hosts ps0 and ps1, and 3 worker # jobs on hosts worker0, worker1 and worker2. cluster_spec = { "ps": ["ps0:2222", "ps1:2222"], "worker": ["worker0:2222", "worker1:2222", "worker2:2222"]} with tf.device(tf.compat.v1.train.replica_device_setter(cluster=cluster_spec)): # Build your graph v1 = tf.Variable(...) # assigned to /job:ps/task:0 v2 = tf.Variable(...) # assigned to /job:ps/task:1 v3 = tf.Variable(...) # assigned to /job:ps/task:0 # Run compute ``` Args: ps_tasks: Number of tasks in the `ps` job. Ignored if `cluster` is provided. ps_device: String. Device of the `ps` job. If empty no `ps` job is used. Defaults to `ps`. worker_device: String. Device of the `worker` job. If empty no `worker` job is used. merge_devices: `Boolean`. If `True`, merges or only sets a device if the device constraint is completely unset. merges device specification rather than overriding them. cluster: `ClusterDef` proto or `ClusterSpec`. ps_ops: List of strings representing `Operation` types that need to be placed on `ps` devices. If `None`, defaults to `STANDARD_PS_OPS`. ps_strategy: A callable invoked for every ps `Operation` (i.e. matched by `ps_ops`), that takes the `Operation` and returns the ps task index to use. If `None`, defaults to a round-robin strategy across all `ps` devices. Returns: A function to pass to `tf.device()`. Raises: TypeError if `cluster` is not a dictionary or `ClusterDef` protocol buffer, or if `ps_strategy` is provided but not a callable. """ if cluster is not None: if isinstance(cluster, server_lib.ClusterSpec): cluster_spec = cluster.as_dict() else: cluster_spec = server_lib.ClusterSpec(cluster).as_dict() # Get ps_job_name from ps_device by stripping "/job:". ps_job_name = pydev.DeviceSpec.from_string(ps_device).job if ps_job_name not in cluster_spec or cluster_spec[ps_job_name] is None: return None ps_tasks = len(cluster_spec[ps_job_name]) if ps_tasks == 0: return None if ps_ops is None: # TODO(sherrym): Variables in the LOCAL_VARIABLES collection should not be # placed in the parameter server. ps_ops = list(STANDARD_PS_OPS) if not merge_devices: logging.warning( "DEPRECATION: It is recommended to set merge_devices=true in " "replica_device_setter") if ps_strategy is None: ps_strategy = _RoundRobinStrategy(ps_tasks) if not six.callable(ps_strategy): raise TypeError("ps_strategy must be callable") chooser = _ReplicaDeviceChooser(ps_tasks, ps_device, worker_device, merge_devices, ps_ops, ps_strategy) return chooser.device_function
def _crawl(self, url, **kwargs): """ real crawl API checking kwargs, and repack them to each sub-dict """ task = {} assert len(url) < 1024, "Maximum (1024) URL length error." if kwargs.get('callback'): callback = kwargs['callback'] if isinstance(callback, six.string_types) and hasattr( self, callback): func = getattr(self, callback) elif six.callable( callback) and six.get_method_self(callback) is self: func = callback kwargs['callback'] = func.__name__ else: raise NotImplementedError("self.%s() not implemented!" % callback) if hasattr(func, '_config'): for k, v in iteritems(func._config): if isinstance(v, dict) and isinstance(kwargs.get(k), dict): kwargs[k].update(v) else: kwargs.setdefault(k, v) for k, v in iteritems(self.crawl_config): if isinstance(v, dict) and isinstance(kwargs.get(k), dict): kwargs[k].update(v) else: kwargs.setdefault(k, v) url = quote_chinese(_build_url(url.strip(), kwargs.pop('params', None))) if kwargs.get('files'): assert isinstance( kwargs.get('data', {}), dict), "data must be a dict when using with files!" content_type, data = _encode_multipart_formdata( kwargs.pop('data', {}), kwargs.pop('files', {})) kwargs.setdefault('headers', {}) kwargs['headers']['Content-Type'] = content_type kwargs['data'] = data if kwargs.get('data'): kwargs['data'] = _encode_params(kwargs['data']) if kwargs.get('data'): kwargs.setdefault('method', 'POST') schedule = {} for key in ('priority', 'retries', 'exetime', 'age', 'itag', 'force_update', 'auto_recrawl', 'cancel'): if key in kwargs: schedule[key] = kwargs.pop(key) task['schedule'] = schedule fetch = {} for key in ('method', 'headers', 'data', 'connect_timeout', 'timeout', 'allow_redirects', 'cookies', 'proxy', 'etag', 'last_modifed', 'last_modified', 'save', 'js_run_at', 'js_script', 'js_viewport_width', 'js_viewport_height', 'load_images', 'fetch_type', 'use_gzip', 'validate_cert', 'max_redirects', 'robots_txt'): if key in kwargs: fetch[key] = kwargs.pop(key) task['fetch'] = fetch process = {} for key in ('callback', ): if key in kwargs: process[key] = kwargs.pop(key) task['process'] = process task['project'] = self.project_name task['url'] = url if 'taskid' in kwargs: task['taskid'] = kwargs.pop('taskid') else: task['taskid'] = self.get_taskid(task) if kwargs: raise TypeError('crawl() got unexpected keyword argument: %s' % kwargs.keys()) cache_key = "%(project)s:%(taskid)s" % task if cache_key not in self._follows_keys: self._follows_keys.add(cache_key) self._follows.append(task) return task
def css_raw(self): if six.callable(self._css_raw): self._css_raw = self._css_raw() return self._css_raw
def fBmnd(shape, power_spectrum, unit_length=1, seed=None, statistic=np.random.normal, fft=np.fft, fft_args=dict()): """ Generates a field given a stastitic and a power_spectrum. author: A. Marchal based on FieldGenerator (C. Cadiou) and MAMDLIB Parameters ---------- statistic: callable A function that takes returns a random array of a given signature, with signature (s) -> (B) with s == B.shape shape: tuple The shape of the output field unit_length: float How much physical length represent 1pixel. For example a value of 10 mean that each pixel stands for 10 physical units. It has the dimension of a physical_unit/pixel. fft: a numpy-like fft API fft_args: array a dictionary of kwargs to pass to the FFT calls Returns: -------- field: a real array of shape `shape` following the statistic with the given power_spectrum """ if seed is not None: np.random.seed(seed) if not six.callable(statistic): raise Exception('`statistic` should be callable') # Draw a random sample normal = statistic(size=shape) # Compute the FFT of the field and take the phase phase = np.angle(fft.fftn(normal, **fft_args)) try: fftfreq = fft.fftfreq except: # Fallback on numpy for the frequencies fftfreq = np.fft.fftfreq # Compute the k grid ks = [np.fft.fftshift(np.fft.fftfreq(s, d=unit_length)) for s in shape] if len(ks) == 3: kgrid = np.meshgrid(ks[1], ks[0], ks[2]) else: kgrid = np.meshgrid(*ks) knorm = np.sqrt(np.sum(np.power(kgrid, 2), axis=0)) power_k = np.where(knorm == 0, 0, np.sqrt(power_spectrum(knorm))) imfft = np.zeros(shape, dtype=complex) imfft.real = power_k * np.cos(phase) imfft.imag = power_k * np.sin(phase) return fft.ifftn(np.fft.ifftshift(imfft)).real
def get_window(window, Nx, fftbins=True): '''Compute a window function. This is a wrapper for `scipy.signal.get_window` that additionally supports callable or pre-computed windows. Parameters ---------- window : string, tuple, number, callable, or list-like The window specification: - If string, it's the name of the window function (e.g., `'hann'`) - If tuple, it's the name of the window function and any parameters (e.g., `('kaiser', 4.0)`) - If numeric, it is treated as the beta parameter of the `'kaiser'` window, as in `scipy.signal.get_window`. - If callable, it's a function that accepts one integer argument (the window length) - If list-like, it's a pre-computed window of the correct length `Nx` Nx : int > 0 The length of the window fftbins : bool, optional If True (default), create a periodic window for use with FFT If False, create a symmetric window for filter design applications. Returns ------- get_window : np.ndarray A window of length `Nx` and type `window` See Also -------- scipy.signal.get_window Notes ----- This function caches at level 10. Raises ------ ParameterError If `window` is supplied as a vector of length != `n_fft`, or is otherwise mis-specified. ''' if six.callable(window): return window(Nx) elif (isinstance(window, (six.string_types, tuple)) or np.isscalar(window)): # TODO: if we add custom window functions in librosa, call them here return scipy.signal.get_window(window, Nx, fftbins=fftbins) elif isinstance(window, (np.ndarray, list)): if len(window) == Nx: return np.asarray(window) raise ParameterError('Window size mismatch: ' '{:d} != {:d}'.format(len(window), Nx)) else: raise ParameterError('Invalid window specification: {}'.format(window))
def _maybe_model_repr(obj): if hasattr(obj, '_repr_model_') and six.callable(obj._repr_model_): return obj._repr_model_() return obj
def power_to_db(S, ref=1.0, amin=1e-10, top_db=80.0): """Convert a power spectrogram (amplitude squared) to decibel (dB) units This computes the scaling ``10 * log10(S / ref)`` in a numerically stable way. Parameters ---------- S : np.ndarray input power ref : scalar or callable If scalar, the amplitude `abs(S)` is scaled relative to `ref`: `10 * log10(S / ref)`. Zeros in the output correspond to positions where `S == ref`. If callable, the reference value is computed as `ref(S)`. amin : float > 0 [scalar] minimum threshold for `abs(S)` and `ref` top_db : float >= 0 [scalar] threshold the output at `top_db` below the peak: ``max(10 * log10(S)) - top_db`` Returns ------- S_db : np.ndarray ``S_db ~= 10 * log10(S) - 10 * log10(ref)`` See Also -------- perceptual_weighting db_to_power amplitude_to_db db_to_amplitude Notes ----- This function caches at level 30. Examples -------- Get a power spectrogram from a waveform ``y`` >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> S = np.abs(librosa.stft(y)) >>> librosa.power_to_db(S**2) array([[-33.293, -27.32 , ..., -33.293, -33.293], [-33.293, -25.723, ..., -33.293, -33.293], ..., [-33.293, -33.293, ..., -33.293, -33.293], [-33.293, -33.293, ..., -33.293, -33.293]], dtype=float32) Compute dB relative to peak power >>> librosa.power_to_db(S**2, ref=np.max) array([[-80. , -74.027, ..., -80. , -80. ], [-80. , -72.431, ..., -80. , -80. ], ..., [-80. , -80. , ..., -80. , -80. ], [-80. , -80. , ..., -80. , -80. ]], dtype=float32) Or compare to median power >>> librosa.power_to_db(S**2, ref=np.median) array([[-0.189, 5.784, ..., -0.189, -0.189], [-0.189, 7.381, ..., -0.189, -0.189], ..., [-0.189, -0.189, ..., -0.189, -0.189], [-0.189, -0.189, ..., -0.189, -0.189]], dtype=float32) And plot the results >>> import matplotlib.pyplot as plt >>> plt.figure() >>> plt.subplot(2, 1, 1) >>> librosa.display.specshow(S**2, sr=sr, y_axis='log') >>> plt.colorbar() >>> plt.title('Power spectrogram') >>> plt.subplot(2, 1, 2) >>> librosa.display.specshow(librosa.power_to_db(S**2, ref=np.max), ... sr=sr, y_axis='log', x_axis='time') >>> plt.colorbar(format='%+2.0f dB') >>> plt.title('Log-Power spectrogram') >>> plt.tight_layout() """ S = np.asarray(S) if amin <= 0: raise ParameterError('amin must be strictly positive') if np.issubdtype(S.dtype, np.complexfloating): warnings.warn( 'power_to_db was called on complex input so phase ' 'information will be discarded. To suppress this warning, ' 'call power_to_db(magphase(D, power=2)[0]) instead.') magnitude = np.abs(S) else: magnitude = S if six.callable(ref): # User supplied a function to calculate reference power ref_value = ref(magnitude) else: ref_value = np.abs(ref) log_spec = 10.0 * np.log10(np.maximum(amin, magnitude)) log_spec -= 10.0 * np.log10(np.maximum(amin, ref_value)) if top_db is not None: if top_db < 0: raise ParameterError('top_db must be non-negative') log_spec = np.maximum(log_spec, log_spec.max() - top_db) return log_spec
def getConsumer(self, consumerId, username=None, password=None): if hasattr(self, 'consumer') and self.consumer: return self.consumer if six.callable(self.registered_consumer_info): return self.registered_consumer_info() return self.registered_consumer_info