Пример #1
2
 def debug(self, msg, traceback=1):
     msg = str(msg)
     curframe = None
     """
     for x in xrange(0,100):
         frame = inspect.currentframe(x)
         print "trying frame["+str(x)+"]"
         if frame.f_back is None:
             print "this frame is none, will use frame["+str(x-1)+"] instead"
             break
         else:
             curframe = frame
             lineno = curframe.f_lineno
     """
     curframe = inspect.currentframe(traceback)
     lineno = curframe.f_lineno
     self.curframe = curframe
     frame_code = curframe.f_code
     frame_globals = curframe.f_globals
     functype = type(lambda: 0)
     funcs = []
     for func in gc.get_referrers(frame_code):
         if type(func) is functype:
             if getattr(func, "func_code", None) is frame_code:
                 if getattr(func, "func_globals", None) is frame_globals:
                     funcs.append(func)
                     if len(funcs) > 1:
                         return None
         cur_method = funcs[0].func_name if funcs else ""
     for line in msg.split("\n"):
         self.tester.debug("(" + str(cur_method) + ":" + str(lineno) + "): " + line.strip())
Пример #2
1
    def compare(self, static, moving, event_index=None, result_index=None):

        index = moving._index

        # Check resid if we have a time index specified
        if event_index is not None:
            ref = static._resid_raw[-1]

            label = index[event_index]

            res = moving.resid[label]

            assert_almost_equal(ref, res)

            ref = static._y_fitted_raw[-1]
            res = moving.y_fitted[label]

            assert_almost_equal(ref, res)

        # Check y_fitted

        for field in self.FIELDS:
            attr = "_%s_raw" % field

            ref = getattr(static, attr)
            res = getattr(moving, attr)

            if result_index is not None:
                res = res[result_index]

            assert_almost_equal(ref, res)
Пример #3
1
    def baseline_recovery_test(self, model):

        baseline_method = getattr(model, "baseline_" + model._method_name_to_decorate)
        baseline_result = baseline_method(halo_table=self.toy_halo_table2)

        method = getattr(model, model._method_name_to_decorate)
        result = method(halo_table=self.toy_halo_table2)

        mask = self.toy_halo_table2["halo_zform_percentile"] >= model._split_ordinates[0]
        oldmean = result[mask].mean()
        youngmean = result[np.invert(mask)].mean()
        baseline_mean = baseline_result.mean()
        assert oldmean != youngmean
        assert oldmean != baseline_mean
        assert youngmean != baseline_mean

        param_key = model._get_assembias_param_dict_key(0)
        param = model.param_dict[param_key]
        if param > 0:
            assert oldmean > youngmean
        elif param < 0:
            assert oldmean < youngmean
        else:
            assert oldmean == youngmean

        split = model.percentile_splitting_function(halo_table=self.toy_halo_table2)
        split = np.where(mask, split, 1 - split)
        derived_result = split * oldmean
        derived_result[np.invert(mask)] = split[np.invert(mask)] * youngmean
        derived_mean = derived_result[mask].mean() + derived_result[np.invert(mask)].mean()
        baseline_mean = baseline_result.mean()
        np.testing.assert_allclose(baseline_mean, derived_mean, rtol=1e-3)
Пример #4
1
    def get_aliases(self):
        """
        Get a dict mapping *fullname* -> *alias* for each *alias* in
        the :class:`~matplotlib.artist.ArtistInspector`.

        Eg., for lines::

          {'markerfacecolor': 'mfc',
           'linewidth'      : 'lw',
          }

        """
        names = [
            name
            for name in dir(self.o)
            if (name.startswith("set_") or name.startswith("get_")) and callable(getattr(self.o, name))
        ]
        aliases = {}
        for name in names:
            func = getattr(self.o, name)
            if not self.is_alias(func):
                continue
            docstring = func.__doc__
            fullname = docstring[10:]
            aliases.setdefault(fullname[4:], {})[name[4:]] = None
        return aliases
Пример #5
1
    def _find_actions(self, subparsers, actions_module, version, do_help):
        msg = _(" (Supported by API versions '%(start)s' - '%(end)s')")
        for attr in (a for a in dir(actions_module) if a.startswith("do_")):
            # I prefer to be hyphen-separated instead of underscores.
            command = attr[3:].replace("_", "-")
            callback = getattr(actions_module, attr)
            desc = callback.__doc__ or ""
            if hasattr(callback, "versioned"):
                additional_msg = ""
                subs = api_versions.get_substitutions(utils.get_function_name(callback))
                if do_help:
                    additional_msg = msg % {
                        "start": subs[0].start_version.get_string(),
                        "end": subs[-1].end_version.get_string(),
                    }
                    if version.is_latest():
                        additional_msg += HINT_HELP_MSG
                subs = [
                    versioned_method
                    for versioned_method in subs
                    if version.matches(versioned_method.start_version, versioned_method.end_version)
                ]
                if subs:
                    # use the "latest" substitution
                    callback = subs[-1].func
                else:
                    # there is no proper versioned method
                    continue
                desc = callback.__doc__ or desc
                desc += additional_msg

            action_help = desc.strip()
            arguments = getattr(callback, "arguments", [])

            subparser = subparsers.add_parser(
                command, help=action_help, description=desc, add_help=False, formatter_class=OpenStackHelpFormatter
            )
            subparser.add_argument("-h", "--help", action="help", help=argparse.SUPPRESS)
            self.subcommands[command] = subparser
            for (args, kwargs) in arguments:
                start_version = kwargs.get("start_version", None)
                if start_version:
                    start_version = api_versions.APIVersion(start_version)
                    end_version = kwargs.get("end_version", None)
                    if end_version:
                        end_version = api_versions.APIVersion(end_version)
                    else:
                        end_version = api_versions.APIVersion("%s.latest" % start_version.ver_major)
                    if do_help:
                        kwargs["help"] = kwargs.get("help", "") + (
                            msg % {"start": start_version.get_string(), "end": end_version.get_string()}
                        )
                    else:
                        if not version.matches(start_version, end_version):
                            continue
                kw = kwargs.copy()
                kw.pop("start_version", None)
                kw.pop("end_version", None)
                subparser.add_argument(*args, **kw)
            subparser.set_defaults(func=callback)
Пример #6
1
    def imhorner(self, x, y, coeff):
        _coeff = list(coeff)
        _coeff.extend([0, 0, 0])
        alpha = self._alpha()
        r0 = _coeff[0]
        nalpha = len(alpha)

        karr = np.diff(alpha, axis=0)
        kfunc = self._fcache(x, y)
        x_terms = self.x_degree + 1
        y_terms = self.y_degree + 1
        nterms = x_terms + y_terms
        for n in range(1, nterms + 1 + 3):
            setattr(self, "r" + str(n), 0.0)

        for n in range(1, nalpha):
            k = karr[n - 1].nonzero()[0].max() + 1
            rsum = 0
            for i in range(1, k + 1):
                rsum = rsum + getattr(self, "r" + str(i))
            val = kfunc[k - 1] * (r0 + rsum)
            setattr(self, "r" + str(k), val)
            r0 = _coeff[n]
            for i in range(1, k):
                setattr(self, "r" + str(i), 0.0)
        result = r0
        for i in range(1, nterms + 1 + 3):
            result = result + getattr(self, "r" + str(i))
        return result
Пример #7
1
    def test_edit_collection_name_and_description_multiple_translations(self):
        self.make_publisher()
        updates = {
            "name": {
                "en-US": u"Basta the potato",
                "fr": u"Basta la pomme de terre",
                "es": u"Basta la pâtätà",
                "it": u"Basta la patata",
            },
            "description": {
                "en-US": "Basta likes potatoes and Le Boulanger",
                "fr": "Basta aime les patates et Le Boulanger",
                "es": "Basta gusta las patatas y Le Boulanger",
                "it": "Basta ama patate e Le Boulanger",
            },
        }
        res, data = self.edit_collection(self.client, **updates)
        eq_(res.status_code, 200)
        self.collection = Collection.objects.get(pk=self.collection.pk)
        for key, value in updates.iteritems():
            eq_(getattr(self.collection, key), updates[key]["en-US"])

        with translation.override("es"):
            collection_in_es = Collection.objects.get(pk=self.collection.pk)
            eq_(getattr(collection_in_es, key), updates[key]["es"])

        with translation.override("fr"):
            collection_in_fr = Collection.objects.get(pk=self.collection.pk)
            eq_(getattr(collection_in_fr, key), updates[key]["fr"])
    def process_request(self, request):
        __traceback_hide__ = True
        if self.show_toolbar(request):

            urlconf = getattr(request, "urlconf", settings.ROOT_URLCONF)
            if isinstance(urlconf, basestring):
                urlconf = import_module(getattr(request, "urlconf", settings.ROOT_URLCONF))

            if urlconf not in self._urlconfs:
                new_urlconf = imp.new_module("urlconf")
                new_urlconf.urlpatterns = debug_toolbar.urls.urlpatterns + patterns("", ("", include(urlconf)))

                if hasattr(urlconf, "handler404"):
                    new_urlconf.handler404 = urlconf.handler404
                if hasattr(urlconf, "handler500"):
                    new_urlconf.handler500 = urlconf.handler500

                self._urlconfs[urlconf] = new_urlconf

            request.urlconf = self._urlconfs[urlconf]

            toolbar = DebugToolbar(request)
            for panel in toolbar.panels:
                panel.process_request(request)
            self.__class__.debug_toolbars[thread.get_ident()] = toolbar
Пример #9
0
    def object_to_flat_dict(cls, inst_cls, value, hier_delim="_", retval=None, prefix=None, parent=None):
        """Converts a native python object to a flat dict.

        See :func:`spyne.model.complex.ComplexModelBase.get_flat_type_info`.
        """

        if retval is None:
            retval = {}
        if prefix is None:
            prefix = []

        fti = inst_cls.get_flat_type_info(inst_cls)
        for k, v in fti.items():
            new_prefix = list(prefix)
            new_prefix.append(k)
            subvalue = getattr(value, k, None)
            if getattr(v, "get_flat_type_info", None) is None:  # Not a ComplexModel
                key = hier_delim.join(new_prefix)

                if retval.get(key, None) is not None:
                    raise ValueError("%r.%s conflicts with previous value %r" % (inst_cls, k, retval[key]))

                try:
                    retval[key] = subvalue
                except:
                    retval[key] = None

            else:
                cls.object_to_flat_dict(fti[k], subvalue, hier_delim, retval, new_prefix, parent=inst_cls)

        return retval
Пример #10
0
def sequence_order(self):
    """
	add a strict sequential constraint between the tasks generated by task generators
	it uses the fact that task generators are posted in order
	it will not post objects which belong to other folders
	there is also an awesome trick for executing the method in last position

	to use:
	bld(features='javac seq')
	bld(features='jar seq')

	to start a new sequence, set the attribute seq_start, for example:
	obj.seq_start = True
	"""
    if self.meths and self.meths[-1] != "sequence_order":
        self.meths.append("sequence_order")
        return

    if getattr(self, "seq_start", None):
        return

        # all the tasks previously declared must be run before these
    if getattr(self.bld, "prev", None):
        self.bld.prev.post()
        for x in self.bld.prev.tasks:
            for y in self.tasks:
                y.set_run_after(x)

    self.bld.prev = self
Пример #11
0
    def _default_arguments(self, obj):
        """Return the list of default arguments of obj if it is callable,
        or empty list otherwise."""
        call_obj = obj
        ret = []
        if inspect.isbuiltin(obj):
            pass
        elif not (inspect.isfunction(obj) or inspect.ismethod(obj)):
            if inspect.isclass(obj):
                # for cython embededsignature=True the constructor docstring
                # belongs to the object itself not __init__
                ret += self._default_arguments_from_docstring(getattr(obj, "__doc__", ""))
                # for classes, check for __init__,__new__
                call_obj = getattr(obj, "__init__", None) or getattr(obj, "__new__", None)
            # for all others, check if they are __call__able
            elif hasattr(obj, "__call__"):
                call_obj = obj.__call__

        ret += self._default_arguments_from_docstring(getattr(call_obj, "__doc__", ""))

        try:
            args, _, _1, defaults = inspect.getargspec(call_obj)
            if defaults:
                ret += args[-len(defaults) :]
        except TypeError:
            pass

        return list(set(ret))
Пример #12
0
 def offline_cluster_upgrade_with_reinstall(self):
     self._install(self.servers[: self.nodes_init])
     self.operations(self.servers[: self.nodes_init])
     if self.ddocs_num:
         self.create_ddocs_and_views()
     if self.during_ops:
         for opn in self.during_ops:
             getattr(self, opn)()
     num_nodes_reinstall = self.input.param("num_nodes_reinstall", 1)
     stoped_nodes = self.servers[self.nodes_init - (self.nodes_init - num_nodes_reinstall) : self.nodes_init]
     nodes_reinstall = self.servers[:num_nodes_reinstall]
     for upgrade_version in self.upgrade_versions:
         self.sleep(
             self.sleep_time,
             "Pre-setup of old version is done. Wait for upgrade to {0} version".format(upgrade_version),
         )
         for server in stoped_nodes:
             remote = RemoteMachineShellConnection(server)
             remote.stop_server()
             remote.disconnect()
         self.sleep(self.sleep_time)
         upgrade_threads = self._async_update(upgrade_version, stoped_nodes)
         self.force_reinstall(nodes_reinstall)
         for upgrade_thread in upgrade_threads:
             upgrade_thread.join()
         success_upgrade = True
         while not self.queue.empty():
             success_upgrade &= self.queue.get()
         if not success_upgrade:
             self.fail("Upgrade failed!")
         self.dcp_rebalance_in_offline_upgrade_from_version2_to_version3()
         self.verification(self.servers[: self.nodes_init])
Пример #13
0
    def testMethod(self):
        module = getattr(self.portal, module_id)
        portal_type_list = portal_type.split("/")

        object = createSubContent(module, portal_type_list)
        view = getattr(object, view_name)
        self.assert_(*validate_xhtml(validator=validator, source=view(), view_name=view_name, bt_name=bt_name))
Пример #14
0
    def compare(self, static, moving, event_index=None, result_index=None):

        # Check resid if we have a time index specified
        if event_index is not None:
            staticSlice = _period_slice(static, -1)
            movingSlice = _period_slice(moving, event_index)

            ref = static._resid_raw[staticSlice]
            res = moving._resid_raw[movingSlice]

            assert_almost_equal(ref, res)

            ref = static._y_fitted_raw[staticSlice]
            res = moving._y_fitted_raw[movingSlice]

            assert_almost_equal(ref, res)

        # Check y_fitted

        for field in self.FIELDS:
            attr = "_%s_raw" % field

            ref = getattr(static, attr)
            res = getattr(moving, attr)

            if result_index is not None:
                res = res[result_index]

            assert_almost_equal(ref, res)
Пример #15
0
    def test_wls_panel(self):
        y = tm.makeTimeDataFrame()
        x = Panel({"x1": tm.makeTimeDataFrame(), "x2": tm.makeTimeDataFrame()})

        y.ix[[1, 7], "A"] = np.nan
        y.ix[[6, 15], "B"] = np.nan
        y.ix[[3, 20], "C"] = np.nan
        y.ix[[5, 11], "D"] = np.nan

        stack_y = y.stack()
        stack_x = DataFrame(dict((k, v.stack()) for k, v in compat.iteritems(x)))

        weights = x.std("items")
        stack_weights = weights.stack()

        stack_y.index = stack_y.index._tuple_index
        stack_x.index = stack_x.index._tuple_index
        stack_weights.index = stack_weights.index._tuple_index

        result = ols(y=y, x=x, weights=1 / weights)
        expected = ols(y=stack_y, x=stack_x, weights=1 / stack_weights)

        assert_almost_equal(result.beta, expected.beta)

        for attr in ["resid", "y_fitted"]:
            rvals = getattr(result, attr).stack().values
            evals = getattr(expected, attr).values
            assert_almost_equal(rvals, evals)
Пример #16
0
    def call(self, context, method, *args, **kwargs):
        """Call a glance client method.

        If we get a connection error,
        retry the request according to CONF.glance_num_retries.
        """
        version = kwargs.pop("version", self.version)

        retry_excs = (
            glanceclient.exc.ServiceUnavailable,
            glanceclient.exc.InvalidEndpoint,
            glanceclient.exc.CommunicationError,
        )
        num_attempts = 1 + CONF.glance_num_retries

        for attempt in range(1, num_attempts + 1):
            client = self.client or self._create_onetime_client(context, version)
            try:
                controller = getattr(client, kwargs.pop("controller", "images"))
                return getattr(controller, method)(*args, **kwargs)
            except retry_excs as e:
                netloc = self.netloc
                extra = "retrying"
                error_msg = _LE("Error contacting glance server " "'%(netloc)s' for '%(method)s', " "%(extra)s.")
                if attempt == num_attempts:
                    extra = "done trying"
                    LOG.exception(error_msg, {"netloc": netloc, "method": method, "extra": extra})
                    raise exception.GlanceConnectionFailed(reason=e)

                LOG.exception(error_msg, {"netloc": netloc, "method": method, "extra": extra})
                time.sleep(1)
            except glanceclient.exc.HTTPOverLimit as e:
                raise exception.ImageLimitExceeded(e)
Пример #17
0
    def _translate_from_glance(self, context, image):
        """Get image metadata from glance image.

        Extract metadata from image and convert it's properties
        to type cinder expected.

        :param image: glance image object
        :return: image metadata dictionary
        """
        if CONF.glance_api_version == 2:
            if self._image_schema is None:
                self._image_schema = self._client.call(
                    context, "get", controller="schemas", schema_name="image", version=2
                )
            # NOTE(aarefiev): get base image property, store image 'schema'
            #                 is redundant, so ignore it.
            image_meta = {
                key: getattr(image, key)
                for key in image.keys()
                if self._image_schema.is_base_property(key) is True and key != "schema"
            }

            # NOTE(aarefiev): nova is expected that all image properties
            # (custom or defined in schema-image.json) stores in
            # 'properties' key.
            image_meta["properties"] = {
                key: getattr(image, key) for key in image.keys() if self._image_schema.is_base_property(key) is False
            }
        else:
            image_meta = _extract_attributes(image)

        image_meta = _convert_timestamps_to_datetimes(image_meta)
        image_meta = _convert_from_string(image_meta)
        return image_meta
Пример #18
0
    def process_entry(self, e, appid):
        title = e.title
        allura_base.log.info(" ...entry '%s'", title)
        parsed_content = filter(None, e.get("content") or [e.get("summary_detail")])
        if parsed_content:
            content = u""
            for ct in parsed_content:
                if ct.type != "text/html":
                    content += plain2markdown(ct.value)
                else:
                    html2md = html2text.HTML2Text(baseurl=e.link)
                    html2md.escape_snob = True
                    markdown_content = html2md.handle(ct.value)
                    content += markdown_content
        else:
            content = plain2markdown(getattr(e, "summary", getattr(e, "subtitle", getattr(e, "title"))))

        content += u" [link](%s)" % e.link
        updated = datetime.utcfromtimestamp(calendar.timegm(e.updated_parsed))

        base_slug = BM.BlogPost.make_base_slug(title, updated)
        b_count = BM.BlogPost.query.find(dict(slug=base_slug, app_config_id=appid)).count()
        if b_count == 0:
            post = BM.BlogPost(title=title, text=content, timestamp=updated, app_config_id=appid, state="published")
            post.neighborhood_id = c.project.neighborhood_id
            post.make_slug()
            post.commit()
Пример #19
0
 def post(self):
     table = self.get_argument("module")
     data = str(self.get_argument("data"))
     data = json.loads(data)
     date = self.parse_date(self.get_argument("date", None))
     getattr(db, table).load_report(data, date)
     self.set_status(204)
    def _perform_date_checks(self, date_checks):
        errors = {}
        for model_class, lookup_type, field, unique_for in date_checks:
            lookup_kwargs = {}
            # there's a ticket to add a date lookup, we can remove this special
            # case if that makes it's way in
            date = getattr(self, unique_for)
            if date is None:
                continue
            if lookup_type == "date":
                lookup_kwargs["%s__day" % unique_for] = date.day
                lookup_kwargs["%s__month" % unique_for] = date.month
                lookup_kwargs["%s__year" % unique_for] = date.year
            else:
                lookup_kwargs["%s__%s" % (unique_for, lookup_type)] = getattr(date, lookup_type)
            lookup_kwargs[field] = getattr(self, field)

            qs = model_class._default_manager.filter(**lookup_kwargs)
            # Exclude the current object from the query if we are editing an
            # instance (as opposed to creating a new one)
            if not self._state.adding and self.pk is not None:
                qs = qs.exclude(pk=self.pk)

            if qs.exists():
                errors.setdefault(field, []).append(self.date_error_message(lookup_type, field, unique_for))
        return errors
Пример #21
0
    def do_mouseDown(self, event):
        what, message, when, where, modifiers = event
        partcode, wid = FindWindow(where)
        if partcode in partname:
            name = "do_" + partname[partcode]
        else:
            name = "do_%d" % partcode
        if wid is None:
            try:
                handler = getattr(self, name)
            except AttributeError:
                if hasattr(MacOS, "HandleEvent"):
                    MacOS.HandleEvent(event)
                return

        elif wid in self._windows:
            window = self._windows[wid]
            try:
                handler = getattr(window, name)
            except AttributeError:
                handler = self.do_unknownpartcode

        else:
            handler = self.do_unknownwindow
        handler(partcode, wid, event)
        return
Пример #22
0
    def translate_connect_args(self, names=[], **kw):
        """Translate url attributes into a dictionary of connection arguments.

        Returns attributes of this url (`host`, `database`, `username`,
        `password`, `port`) as a plain dictionary.  The attribute names are
        used as the keys by default.  Unset or false attributes are omitted
        from the final dictionary.

        :param \**kw: Optional, alternate key names for url attributes.

        :param names: Deprecated.  Same purpose as the keyword-based alternate names,
            but correlates the name to the original positionally.
        """

        translated = {}
        attribute_names = ["host", "database", "username", "password", "port"]
        for sname in attribute_names:
            if names:
                name = names.pop(0)
            elif sname in kw:
                name = kw[sname]
            else:
                name = sname
            if name is not None and getattr(self, sname, False):
                translated[name] = getattr(self, sname)
        return translated
Пример #23
0
    def pprint_getters(self):
        """
        Return the getters and actual values as list of strings.
        """

        o = self.oorig
        getters = [name for name in dir(o) if name.startswith("get_") and callable(getattr(o, name))]
        # print getters
        getters.sort()
        lines = []
        for name in getters:
            func = getattr(o, name)
            if self.is_alias(func):
                continue

            try:
                val = func()
            except:
                continue
            if getattr(val, "shape", ()) != () and len(val) > 6:
                s = str(val[:6]) + "..."
            else:
                s = str(val)
            s = s.replace("\n", " ")
            if len(s) > 50:
                s = s[:50] + "..."
            name = self.aliased_name(name[4:])
            lines.append("    %s = %s" % (name, s))
        return lines
Пример #24
0
    def test_duplicate_has_perms(self):
        self.make_publisher()
        original = self.collection

        res, data = self.duplicate(self.client)
        eq_(res.status_code, 201)
        new_collection = Collection.objects.get(pk=data["id"])
        ok_(new_collection.pk != original.pk)
        ok_(new_collection.slug)
        ok_(new_collection.slug != original.slug)

        # Verify that the collection metadata is correct. We duplicated
        # self.collection, which was created with self.collection_data, so
        # use that.
        original = self.collection
        keys = self.collection_data.keys()
        keys.remove("name")
        keys.remove("description")
        keys.remove("slug")
        for field in keys:
            eq_(data[field], self.collection_data[field])
            eq_(getattr(new_collection, field), self.collection_data[field])
            eq_(getattr(new_collection, field), getattr(original, field))

        # Test name and description separately as we return the whole dict
        # with all translations.
        eq_(data["name"], self.collection_data["name"])
        eq_(new_collection.name, data["name"]["en-US"])
        eq_(new_collection.name, original.name)

        eq_(data["description"], self.collection_data["description"])
        eq_(new_collection.description, data["description"]["en-US"])
        eq_(new_collection.description, original.description)
Пример #25
0
def setInitList(_InstanceVariable, _DoStr, _TagStr):

    # get
    Variable = getattr(_InstanceVariable, DoStrToDoingStrOrderedDict[_DoStr] + _TagStr + "Variable")

    # type
    Type = type(Variable)

    # import
    import numpy as np

    # set
    SetKeyStr = DoStrToDoneStrOrderedDict[_DoStr] + _TagStr + "FloatsList"

    # Check
    if Type in [list, np.array]:

        # array
        setattr(_InstanceVariable, SetKeyStr, list(Variable))

    else:

        # array
        setattr(_InstanceVariable, SetKeyStr, [Variable])

        # Check
    NewDoUnitsInt = len(getattr(_InstanceVariable, SetKeyStr))

    # Check
    if _InstanceVariable.DoUnitsInt < NewDoUnitsInt:
        _InstanceVariable.DoUnitsInt = NewDoUnitsInt
Пример #26
0
    def getMetricDependencies(self, metric):

        fieldmap = {}
        valid = {}
        for ctype in metric.catalog_type:
            if getattr(self, ctype) is None:
                raise ValueError(
                    "This Ministry does not have "
                    "a catalog of type {0} as required "
                    "by {1}".format(ctype, metric.__class__.__name__)
                )
            else:
                cat = getattr(self, ctype)

            # go through metric dependencies, checking if
            # this catalog satisfies them. If it does, create
            # the map from the metric dependency to the catalog
            # fields as specified by the catalog's field map

            mk = copy(metric.mapkeys)
            mk.extend(cat.necessaries)

            for mapkey in mk:
                if mapkey not in valid.keys():
                    valid[mapkey] = False

                if mapkey in cat.fieldmap.keys():
                    fileinfo = cat.fieldmap[mapkey]
                else:
                    continue

                for field in fileinfo.keys():
                    filetypes = fileinfo[field]
                    for ft in filetypes:
                        if ft in cat.filetypes:
                            # if already have one field
                            # make a list of fields
                            if ft not in fieldmap.keys():
                                fieldmap[ft] = {}
                            if mapkey in fieldmap[ft].keys():
                                if hasattr(fieldmap[ft][mapkey], "__iter__"):
                                    fieldmap[ft][mapkey].append(field)
                                else:
                                    fieldmap[ft][mapkey] = [fieldmap[ft][mapkey], field]
                            else:
                                fieldmap[ft][mapkey] = field

                            valid[mapkey] = True

        notavail = []
        for key in valid.keys():
            if not valid[key]:
                notavail.append(key)

        if len(notavail) > 0:
            raise Exception(
                "Mapkeys {0} are not available. Required by {1}!".format(notavail, metric.__class__.__name__)
            )

        return fieldmap
Пример #27
0
    def compose(a, b):
        def c(z):
            return b(a(z))

        c.name = "%s(%s)" % (b.name, a.name)
        c.params = getattr(b, "params", []) + getattr(a, "params", [])
        return c
Пример #28
0
Файл: cards.py Проект: bqv/anki
 def rebuildQA(self, deck, media=True):
     # format qa
     d = {}
     for f in self.fact.model.fieldModels:
         d[f.name] = (f.id, self.fact[f.name])
     qa = formatQA(None, self.fact.modelId, d, self.splitTags(), self.cardModel, deck)
     # find old media references
     files = {}
     for type in ("question", "answer"):
         for f in mediaFiles(getattr(self, type) or ""):
             if f in files:
                 files[f] -= 1
             else:
                 files[f] = -1
     # update q/a
     self.question = qa["question"]
     self.answer = qa["answer"]
     # determine media delta
     for type in ("question", "answer"):
         for f in mediaFiles(getattr(self, type)):
             if f in files:
                 files[f] += 1
             else:
                 files[f] = 1
     # update media counts if we're attached to deck
     if media:
         for (f, cnt) in files.items():
             updateMediaCount(deck, f, cnt)
     self.setModified()
Пример #29
0
    def load_data(self, readonly=False):
        self.data = []
        cols = len(self.hheaders)

        def blank_line():
            return [BlankCell(self) for x in range(0, cols)]

        for cap in self.report.caps():

            # add a blank line for the section header
            self.data.append(blank_line())

            cap_cons_report = getattr(self.report, "cons_%s_report" % cap)
            cap_order_report = getattr(self.report, "order_%s_report" % cap)

            for icr in cap_cons_report.nutinput_reports:

                ior = InputOrderReport.filter(order_report=cap_order_report, nut_input=icr.nut_input).get()

                self.vheaders.append(icr.nut_input.name.upper())
                cells = []
                cells.append(ReportValueEditItem(self, icr, "initial", None))
                cells.append(ReportValueEditItem(self, icr, "received", None))
                cells.append(ReportConsUsedValueEditItem(self, icr, "used", None))
                cells.append(ReportValueEditItem(self, icr, "lost", None))
                cells.append(ReportAutoQuantitiesLeft(self, icr, "left", None))
                cells.append(ReportOrderValueEditItem(self, ior, "quantity", None))

                self.data.append(cells)
        # add total line
        self.data.append([ColumnSumItem(self, None, None) for x in range(0, cols)])

        self.rows_for_cap("mam")
        self.rows_for_cap("sam")
Пример #30
0
    def sql_indexes_for_model(self, model, *args, **kwargs):
        """Creates ``model`` indexes.

        :param model: The model containing the fields inside group.
        :param \*args: Extra args not used in this engine.
        :param \*\*kwargs: Extra kwargs not used in this engine.
        """
        if not model._meta.managed or model._meta.proxy:
            return []
        fields = [f for f in model._meta.local_fields if f.db_index]
        if not fields and not hasattr(model._meta, "index_together") and not hasattr(model._meta, "unique_together"):
            return []
        print "Installing index for %s.%s model" % (model._meta.app_label, model._meta.object_name)
        for field in fields:
            self.sql_indexes_for_field(model, field)
        for group in getattr(model._meta, "index_together", []):
            self.index_fields_group(model, group)

        # unique_together support
        unique_together = getattr(model._meta, "unique_together", [])
        # Django should do this, I just wanted to be REALLY sure.
        if len(unique_together) > 0 and isinstance(unique_together[0], basestring):
            unique_together = (unique_together,)
        for fields in unique_together:
            group = {"fields": fields, "unique": True}
            self.index_fields_group(model, group)
        return []