Ejemplo n.º 1
0
 def _augment_model(model):
     # type: (type) -> type
     try:
         Webhooks = model.webhooks
     except AttributeError:
         Webhooks, attrs, handlers = None, {}, {}
     else:
         if not inspect.isclass(Webhooks):
             raise TypeError(
                 'Model.webhooks is not a class, but {0!r}'.format(
                     type(Webhooks)))
         # get all the attributes from the webhooks class description.
         attrs = dict(Webhooks.__dict__)
         # extract all the `on_*` event handlers.
         handlers = {k: v for k, v in items(attrs) if k.startswith('on_')}
         # preserve non-event handler attributes.
         attrs = {k: v for k, v in items(attrs) if k not in handlers}
     if Webhooks is None or not issubclass(Webhooks, WebhookCapable):
         # Model.webhooks should inherit from WebhookCapable.
         # Note that we keep this attribute for introspection purposes
         # only (kind of like Model._meta), it doesn't implement
         # any functionality at this point.
         Webhooks = type(bytes_if_py2('webhooks'), (WebhookCapable, ),
                         attrs)
     model_webhooks = Webhooks(**dict(handlers, **kwargs))
     return model_webhooks.contribute_to_model(model)
Ejemplo n.º 2
0
 def _augment_model(model):
     # type: (type) -> type
     try:
         Webhooks = model.webhooks
     except AttributeError:
         Webhooks, attrs, handlers = None, {}, {}
     else:
         if not inspect.isclass(Webhooks):
             raise TypeError(
                 'Model.webhooks is not a class, but {0!r}'.format(
                     type(Webhooks)))
         # get all the attributes from the webhooks class description.
         attrs = dict(Webhooks.__dict__)
         # extract all the `on_*` event handlers.
         handlers = {k: v for k, v in items(attrs) if k.startswith('on_')}
         # preserve non-event handler attributes.
         attrs = {k: v for k, v in items(attrs) if k not in handlers}
     if Webhooks is None or not issubclass(Webhooks, WebhookCapable):
         # Model.webhooks should inherit from WebhookCapable.
         # Note that we keep this attribute for introspection purposes
         # only (kind of like Model._meta), it doesn't implement
         # any functionality at this point.
         Webhooks = type(
             bytes_if_py2('webhooks'), (WebhookCapable,), attrs)
     model_webhooks = Webhooks(**dict(handlers, **kwargs))
     return model_webhooks.contribute_to_model(model)
Ejemplo n.º 3
0
def assign_proto(proto, name, val):
    """Assign a Python object to a protobuf message, based on the Python
    type (in recursive fashion). Lists become repeated fields/messages, dicts
    become messages, and other types are assigned directly. For convenience,
    repeated fields whose values are not lists are converted to single-element
    lists; e.g., `my_repeated_int_field=3` is converted to
    `my_repeated_int_field=[3]`."""

    is_repeated_field = hasattr(getattr(proto, name), 'extend')
    if is_repeated_field and not isinstance(val, list):
        val = [val]
    if isinstance(val, list):
        if isinstance(val[0], dict):
            for item in val:
                proto_item = getattr(proto, name).add()
                # for k, v in six.iteritems(item):
                for k, v in six.items(item):  # lyq
                    assign_proto(proto_item, k, v)
        else:
            getattr(proto, name).extend(val)
    elif isinstance(val, dict):
        # for k, v in six.iteritems(val):
        for k, v in six.items(val):  # lyq
            assign_proto(getattr(proto, name), k, v)
    else:
        setattr(proto, name, val)
 def to_proto(self):
     names = {v: k for k, v in six.items(self.tops)}
     autonames = Counter()
     layers = OrderedDict()
     for name, top in six.items(self.tops):
         top._to_proto(layers, names, autonames)
     net = caffe_pb2.NetParameter()
     net.layer.extend(layers.values())
     return net
Ejemplo n.º 5
0
    def flush(self):
        """
        Save dictionary with metadata, move to new chunk,
        and prepare it for writing.
        """
        self.meta_file_.keynames = list(self.data_.keys())
        self.meta_file_.chunks.append(
            AttrDict((k, len(v)) for k, v in six.items(self.data_)))
        self.meta_file_.save(self.meta_filename_)
        for k, v in six.items(self.data_):
            self.lengths_[k] += len(v)

        self.data_.save(self.get_chunk_filename(self.chunk_idx_))
        self._next_chunk_to_write()
        print('Flushing', self.meta_file_.chunks[-1])
Ejemplo n.º 6
0
def varmap(func, var, context=None, name=None):
    """
    Executes ``func(key_name, value)`` on all values
    recurisively discovering dict and list scoped
    values.
    """
    if context is None:
        context = set()

    objid = id(var)
    if objid in context:
        return func(name, '<...>')
    context.add(objid)

    if isinstance(var, dict):
        ret = dict((k, varmap(func, v, context, k)) for k, v in six.items(var))
    elif isinstance(var, (list, tuple)):
        # treat it like a mapping
        if all(isinstance(v, (list, tuple)) and len(v) == 2 for v in var):
            ret = [[k, varmap(func, v, context, k)] for k, v in var]
        else:
            ret = [varmap(func, f, context, name) for f in var]
    else:
        ret = func(name, var)
    context.remove(objid)
    return ret
Ejemplo n.º 7
0
    def _to_proto(self, layers, names, autonames):
        if self in layers:
            return
        bottom_names = []
        for inp in self.inputs:
            inp._to_proto(layers, names, autonames)
            bottom_names.append(layers[inp.fn].top[inp.n])
        layer = caffe_pb2.LayerParameter()
        layer.type = self.type_name
        layer.bottom.extend(bottom_names)

        if self.in_place:
            layer.top.extend(layer.bottom)
        else:
            for top in self.tops:
                layer.top.append(self._get_top_name(top, names, autonames))
        layer.name = self._get_name(names, autonames)

        # for k, v in six.iteritems(self.params):
        for k, v in six.items(self.params):  # lyq
            # special case to handle generic *params
            if k.endswith('param'):
                assign_proto(layer, k, v)
            else:
                try:
                    assign_proto(
                        getattr(layer,
                                _param_names[self.type_name] + '_param'), k, v)
                except (AttributeError, KeyError):
                    assign_proto(layer, k, v)

        layers[self] = layer
Ejemplo n.º 8
0
Archivo: case.py Proyecto: celery/case
    def assertDictContainsSubset(self, expected, actual, msg=None):
        missing, mismatched = [], []

        for key, value in items(expected):
            if key not in actual:
                missing.append(key)
            elif value != actual[key]:
                mismatched.append('%s, expected: %s, actual: %s' % (
                    safe_repr(key), safe_repr(value),
                    safe_repr(actual[key])))

        if not (missing or mismatched):
            return

        standard_msg = ''
        if missing:
            standard_msg = 'Missing: %s' % ','.join(map(safe_repr, missing))

        if mismatched:
            if standard_msg:
                standard_msg += '; '
            standard_msg += 'Mismatched values: %s' % (
                ','.join(mismatched))

        self.fail(self._formatMessage(msg, standard_msg))
Ejemplo n.º 9
0
    def __init__(self, name, *args, **kwargs):
        # type: (str, *Any, **Any) -> None
        super(ModelEvent, self).__init__(name, **kwargs)
        self._kwargs = kwargs
        self._kwargs.pop('app', None)  # don't use app in __reduce__
        self._filterargs = args

        self.models = WeakSet()

        # initialize the filter fields: {field}__{op}
        self.filter_fields = {
            k: v for k, v in items(kwargs) if '__' in k
        }
        # optimization: Django: Transition operators require the unchanged
        # database fields before saving, a pre_save signal
        # handles this, but we want to avoid the extra database hit
        # when they are not in use.
        self.use_transitions = any('__now_' in k for k in keys(self.filter_fields))

        # _filterargs is set by __reduce__ to restore *args
        restored_args = kwargs.get('_filterargs') or ()
        self._init_attrs(**kwargs)
        self._filter_predicate = (
            Q(*args + restored_args, **self.filter_fields)
            if args or self.filter_fields else _true)
Ejemplo n.º 10
0
def b64img(preprocessor, tag, markup):
    attrs = None

    # Parse the markup string
    match = ReImg.search(markup)
    if match:
        attrs = dict([(key, val.strip())
                      for (key, val) in six.items(match.groupdict()) if val])
    else:
        raise ValueError('Error processing input. '
                         'Expected syntax: {0}'.format(SYNTAX))

    # Check if alt text is present -- if so, split it from title
    if 'title' in attrs:
        match = ReTitleAlt.search(attrs['title'])
        if match:
            attrs.update(match.groupdict())
        if not attrs.get('alt'):
            attrs['alt'] = attrs['title']

    attrs['src'] = 'data:;base64,{}'.format(base64image(attrs['src']))

    # Return the formatted text
    return "<img {0}>".format(' '.join('{0}="{1}"'.format(key, val)
                                       for (key, val) in six.iteritems(attrs)))
Ejemplo n.º 11
0
    def flush(self, data=None, group=None, table=None, force=True):
        if data is None:
            data = self.data
        if table is None:
            table = self.tables
        if group is None:
            group = self.groups

        # print 'Keys: ', data.keys()
        for k, v in six.items(data):
            # print 'key,val', k,v, type(v)

            if not isinstance(k, str):
                self.log.debug('Cannot save to DB, key is not string %s ' % k)
                continue

            # Clean up before writing
            if force:
                try:
                    self.h5f.remove_node(self.get_node(
                        group._gp, k), recursive=True)
                except tb.NoSuchNodeError:
                    pass

            # print 'In Group: ', group, k, v
            if isinstance(v, dict):
                self.log.debug('Attempting to save dict type')
                # assert(k not in table);
                table[k] = AttrDict()
                group[k] = AttrDict()
                group[k]._gp = self.h5f.create_group(group._gp, k)
                self.h5f.flush()
                self.log.debug('Out Group: %s' % group[k])
                self.flush(data=v, group=group[k], table=table[k])
            elif isinstance(v, np.ndarray):
                self.log.debug('Attempting to save ndarray %s' % type(v))
                table[k] = self.h5f.create_array(group._gp, k, v)
                self.log.debug('Out Table: %s' % table[k])
            # elif isinstance(v,io_utils.TableWriter):
            #     self.log.debug('Attempting to save with custom writer')
            #     table[k] = self.h5f.createTable(group._gp, name=k,
            #                                     description=v.description,
            #                                     title='%s-data' % (k) )
            #     v.write(table[k])
            #     # print 'Creating table with group:%s name:%s desc:%s' % (group._gp, k, writer.description)
            #     # print 'Out Table: ', table[k]
            else:
                self.log.debug(
                    'Attempting to save arbitrary type %s' % type(v))
                try:
                    assert v is not None
                    table[k] = self.h5f.create_carray(group._gp, k, obj=v)
                except (TypeError, ValueError, AssertionError):
                    v = 'OBJ_' + cPickle.dumps(v, -1)
                    table[k] = self.h5f.create_array(group._gp, k, v)
                    # print 'TypeError', v
                finally:
                    self.h5f.flush()
        return
Ejemplo n.º 12
0
 def __call__(self, instance, app=None, **kw):
     return app_or_default(app).reverse(
         self.view_name,
         args=[attrgetter(arg)(instance) for arg in self.args],
         kwargs={
             key: attrgetter(value)(instance)
             for key, value in items(self.kwargs)
         },
         **kw)
Ejemplo n.º 13
0
def legacy_export_as_marc(json, tabsize=4):
    """Create the MARCXML representation using the producer rules."""
    def encode_for_marcxml(value):
        from xml.sax.saxutils import escape
        if isinstance(value, unicode):
            value = value.encode('utf8')
        return escape(str(value))

    export = ['<record>\n']

    for key, value in sorted(six.items(json)):
        if not value:
            continue
        if key.startswith('00') and len(key) == 3:
            # Controlfield
            if isinstance(value, list):
                value = value[0]
            export += [
                '\t<controlfield tag="%s">%s'
                '</controlfield>\n'.expandtabs(tabsize) %
                (key, encode_for_marcxml(value))
            ]
        else:
            tag = key[:3]
            try:
                ind1 = key[3].replace("_", "")
            except:
                ind1 = ""
            try:
                ind2 = key[4].replace("_", "")
            except:
                ind2 = ""
            if isinstance(value, dict):
                value = [value]
            for field in value:
                export += [
                    '\t<datafield tag="%s" ind1="%s" '
                    'ind2="%s">\n'.expandtabs(tabsize) % (tag, ind1, ind2)
                ]
                for code, subfieldvalue in six.iteritems(field):
                    if subfieldvalue:
                        if isinstance(subfieldvalue, list):
                            for val in subfieldvalue:
                                export += [
                                    '\t\t<subfield code="%s">%s'
                                    '</subfield>\n'.expandtabs(tabsize) %
                                    (code, encode_for_marcxml(val))
                                ]
                        else:
                            export += [
                                '\t\t<subfield code="%s">%s'
                                '</subfield>\n'.expandtabs(tabsize) %
                                (code, encode_for_marcxml(subfieldvalue))
                            ]
                export += ['\t</datafield>\n'.expandtabs(tabsize)]
    export += ['</record>\n']
    return "".join(export)
Ejemplo n.º 14
0
def _reset_signals(wanted=None):
    _sigstate = {
        sig: _reset_signal(sig, []) for sig in wanted or DEFAULT_SIGNALS
    }
    try:
        yield
    finally:
        for signal, receivers in items(_sigstate):
            _reset_signal(signal, receivers)
Ejemplo n.º 15
0
def _reset_signals(wanted=None):
    _sigstate = {
        sig: _reset_signal(sig, [])
        for sig in wanted or DEFAULT_SIGNALS
    }
    try:
        yield
    finally:
        for signal, receivers in items(_sigstate):
            _reset_signal(signal, receivers)
Ejemplo n.º 16
0
Archivo: mock.py Proyecto: celery/case
 def __new__(cls, *args, **kwargs):
     r = Mock(name=cls.__name__)
     _get_class_fun(cls.__init__)(r, *args, **kwargs)
     for key, value in items(vars(cls)):
         if key not in ('__dict__', '__weakref__', '__new__', '__init__'):
             if inspect.ismethod(value) or inspect.isfunction(value):
                 r.__getattr__(key).side_effect = _bind(value, r)
             else:
                 r.__setattr__(key, value)
     return r
Ejemplo n.º 17
0
 def to_proto(self):
     if '3' == Python_Main_Version:
         names = {v: k for k, v in six.items(self.tops)}  # python3 # iter
         autonames = Counter()
         layers = OrderedDict()
         for name, top in six.items(self.tops):  # python3 # iter
             top._to_proto(layers, names, autonames)
         net = caffe_pb2.NetParameter()
         net.layer.extend(layers.values())
     else:
         names = {v: k
                  for k, v in six.iteritems(self.tops)}  # python2 # iter
         autonames = Counter()
         layers = OrderedDict()
         for name, top in six.iteritems(self.tops):  # python2 # iter
             top._to_proto(layers, names, autonames)
         net = caffe_pb2.NetParameter()
         net.layer.extend(layers.values())
     return net
Ejemplo n.º 18
0
 def __new__(cls, *args, **kwargs):
     r = Mock(name=cls.__name__)
     _get_class_fun(cls.__init__)(r, *args, **kwargs)
     for key, value in items(vars(cls)):
         if key not in ('__dict__', '__weakref__', '__new__', '__init__'):
             if inspect.ismethod(value) or inspect.isfunction(value):
                 r.__getattr__(key).side_effect = _bind(value, r)
             else:
                 r.__setattr__(key, value)
     return r
Ejemplo n.º 19
0
 def wrapper(*args, **kwargs):
     try:
         return f(*args, **kwargs)
     except tuple(keys(defaults)) as e:
         if logger:
             logger.error(e)
         return next(
             d() if callable(d) else d
             for ex_type, d in items(defaults) if isinstance(e, ex_type)
         )
Ejemplo n.º 20
0
def dmap(f, d, *path):
    key, path_tail = path[0], path[1:]
    if path_tail:
        if key is None:
            func = lambda k, v: dmap(f, v, *path_tail)
        else:
            func = lambda k, v: dmap(f, v, *path_tail) if k == key else v
    else:
        func = lambda k, v: f(v) if k == key or key is None else v
    return {k: func(k, v) for k, v in items(d)}
Ejemplo n.º 21
0
 def __call__(self, instance, app=None, **kw):
     return app_or_default(app).reverse(
         self.view_name,
         args=[attrgetter(arg)(instance) for arg in self.args],
         kwargs={
             key: attrgetter(value)(instance)
             for key, value in items(self.kwargs)
         },
         **kw
     )
Ejemplo n.º 22
0
def patch_modules(*modules):
    prev = {}
    for mod in modules:
        prev[mod] = sys.modules.get(mod)
        sys.modules[mod] = types.ModuleType(module_name_t(mod))
    try:
        yield
    finally:
        for name, mod in items(prev):
            if mod is None:
                sys.modules.pop(name, None)
            else:
                sys.modules[name] = mod
Ejemplo n.º 23
0
    def __init__(self,
                 on_create=None, on_change=None, on_delete=None,
                 reverse=None, sender_field=None, **kwargs):
        self.reverse = reverse
        self.sender_field = sender_field
        self.events = {}

        self.update_events(
            {k: v for k, v in items(kwargs) if k.startswith('on_')},
            on_create=on_create and on_create.dispatches_on_create(),
            on_change=on_change and on_change.dispatches_on_change(),
            on_delete=on_delete and on_delete.dispatches_on_delete(),
        )
Ejemplo n.º 24
0
def patch_modules(*modules):
    prev = {}
    for mod in modules:
        prev[mod] = sys.modules.get(mod)
        sys.modules[mod] = types.ModuleType(module_name_t(mod))
    try:
        yield
    finally:
        for name, mod in items(prev):
            if mod is None:
                sys.modules.pop(name, None)
            else:
                sys.modules[name] = mod
Ejemplo n.º 25
0
 def __init__(self,
              on_create=None, on_change=None, on_delete=None,
              reverse=None, sender_field=None, **kwargs):
     # type: (Event, Event, Event, model_reverser, str, **Any) -> None
     self.events = {}
     if reverse is not None:
         self.reverse = reverse
     if sender_field is not None:
         self.sender_field = sender_field
     self.update_events(
         {k: v for k, v in items(kwargs) if k.startswith('on_')},
         on_create=on_create and on_create.dispatches_on_create(),
         on_change=on_change and on_change.dispatches_on_change(),
         on_delete=on_delete and on_delete.dispatches_on_delete(),
     )
Ejemplo n.º 26
0
    def __init__(self,
                 on_create=None,
                 on_change=None,
                 on_delete=None,
                 reverse=None,
                 sender_field=None,
                 **kwargs):
        self.reverse = reverse
        self.sender_field = sender_field
        self.events = {}

        self.update_events(
            {k: v
             for k, v in items(kwargs) if k.startswith('on_')},
            on_create=on_create and on_create.dispatches_on_create(),
            on_change=on_change and on_change.dispatches_on_change(),
            on_delete=on_delete and on_delete.dispatches_on_delete(),
        )
Ejemplo n.º 27
0
def combine_dict(one, two, three, four, five, six, seven, imageName):
    one['file'] = imageName

    for k, v in two.items():
        if k not in one.keys():
            one[k] = v
        else:
            one["{}_2".format(k)] = v

    for k, v in three.items():
        if k not in one.keys():
            one[k] = v
        else:
            one["{}_3".format(k)] = v

    for k, v in four.items():
        if k not in one.keys():
            one[k] = v
        else:
            one["{}_4".format(k)] = v

    for k, v in five.items():
        if k not in one.keys():
            one[k] = v
        else:
            print(k, '4')
            one["{}_5".format(k)] = v

    for k, v in six.items():
        if k not in one.keys():
            one[k] = v
        else:
            one["{}_6".format(k)] = v

    for k, v in seven.items():
        if k not in one.keys():
            one[k] = v
        else:
            print(k, '6')
            one["{}_7".format(k)] = v

    return one
Ejemplo n.º 28
0
    def __init__(self, name, *args, **kwargs):
        super(ModelEvent, self).__init__(name, **kwargs)
        self._kwargs = kwargs
        self._filterargs = args

        self.models = WeakSet()

        # initialize the filter fields: {field}__{op}
        self.filter_fields = {
            k: v for k, v in items(kwargs) if '__' in k
        }
        # optimization: Django: Transition operators require the unchanged
        # database fields before saving, a pre_save signal
        # handles this, but we want to avoid the extra database hit
        # when they are not in use.
        self.use_transitions = any(
            '__now_' in k for k in keys(self.filter_fields),
        )
        # _filterargs is set by __reduce__ to restore *args
        restored_args = kwargs.get('_filterargs') or ()
        self._init_attrs(**kwargs)
        self._filter_predicate = (
            Q(*args + restored_args, **self.filter_fields)
            if args or self.filter_fields else _true)
Ejemplo n.º 29
0
    def assertDictContainsSubset(self, expected, actual, msg=None):
        missing, mismatched = [], []

        for key, value in items(expected):
            if key not in actual:
                missing.append(key)
            elif value != actual[key]:
                mismatched.append(
                    '%s, expected: %s, actual: %s' %
                    (safe_repr(key), safe_repr(value), safe_repr(actual[key])))

        if not (missing or mismatched):
            return

        standard_msg = ''
        if missing:
            standard_msg = 'Missing: %s' % ','.join(map(safe_repr, missing))

        if mismatched:
            if standard_msg:
                standard_msg += '; '
            standard_msg += 'Mismatched values: %s' % (','.join(mismatched))

        self.fail(self._formatMessage(msg, standard_msg))
Ejemplo n.º 30
0
def replace(dct, key, new_value):
    return dict(chain(items(dct), ((key, new_value),)))
Ejemplo n.º 31
0
 def connect(self, sender=None, weak=False, **kwargs):
     [
         self._connect_signal(signal, handler, sender, weak, **kwargs)
         for signal, handler in items(self.signals)
     ]
Ejemplo n.º 32
0
 def load_signals(self, signals):
     return {
         symbol_by_name(sig): handler
         for sig, handler in items(signals)
     }
Ejemplo n.º 33
0
 def assert_log_matches(self, log, **expected):
     for k, v in items(expected):
         assert log[k] == v, 'key={0} expected {1} == {2}'.format(
             k, v, log[k])
Ejemplo n.º 34
0
 def assert_log_matches(self, log, **expected):
     for k, v in items(expected):
         assert log[k] == v, 'key={0} expected {1} == {2}'.format(
             k, v, log[k])
Ejemplo n.º 35
0
 def load_signals(self, signals):
     return {
         symbol_by_name(sig): handler
         for sig, handler in items(signals)
     }
Ejemplo n.º 36
0
 def connect_events(self, events, **kwargs):
     # type: (Mapping[str, Event], **Any) -> Mapping[str, Event]
     return {
         k: self.contribute_to_event(v)
         for k, v in chain(items(events), items(kwargs))
     }
Ejemplo n.º 37
0
 def __init__(self, *args, **kwargs):
     attrs = kwargs.pop('attrs', None) or {}
     super(Mock, self).__init__(*args, **kwargs)
     for attr_name, attr_value in items(attrs):
         setattr(self, attr_name, attr_value)
Ejemplo n.º 38
0
 def connect_events(self, events, **kwargs):
     # type: (Mapping[str, Event], **Any) -> Mapping[str, Event]
     return {
         k: self.contribute_to_event(v)
         for k, v in chain(items(events), items(kwargs))
     }
Ejemplo n.º 39
0
def avg_checkpoints(model_dir, num_last_checkpoints, global_step,
                    global_step_name):
    """Average the last N checkpoints in the model_dir."""
    checkpoint_state = tf.train.get_checkpoint_state(model_dir)
    if not checkpoint_state:
        utils.print_out("# No checkpoint file found in directory: %s" %
                        model_dir)
        return None

    # Checkpoints are ordered from oldest to newest.
    checkpoints = (
        checkpoint_state.all_model_checkpoint_paths[-num_last_checkpoints:])

    if len(checkpoints) < num_last_checkpoints:
        utils.print_out(
            "# Skipping averaging checkpoints because not enough checkpoints is "
            "avaliable.")
        return None

    avg_model_dir = os.path.join(model_dir, "avg_checkpoints")
    if not tf.gfile.Exists(avg_model_dir):
        utils.print_out(
            "# Creating new directory %s for saving averaged checkpoints." %
            avg_model_dir)
        tf.gfile.MakeDirs(avg_model_dir)

    utils.print_out("# Reading and averaging variables in checkpoints:")
    var_list = tf.contrib.framework.list_variables(checkpoints[0])
    var_values, var_dtypes = {}, {}
    for (name, shape) in var_list:
        if name != global_step_name:
            var_values[name] = np.zeros(shape)

    for checkpoint in checkpoints:
        utils.print_out("    %s" % checkpoint)
        reader = tf.contrib.framework.load_checkpoint(checkpoint)
        for name in var_values:
            tensor = reader.get_tensor(name)
            var_dtypes[name] = tensor.dtype
            var_values[name] += tensor

    for name in var_values:
        var_values[name] /= len(checkpoints)

    # Build a graph with same variables in the checkpoints, and save the averaged
    # variables into the avg_model_dir.
    with tf.Graph().as_default():
        tf_vars = [
            tf.get_variable(v,
                            shape=var_values[v].shape,
                            dtype=var_dtypes[name]) for v in var_values
        ]

        placeholders = [
            tf.placeholder(v.dtype, shape=v.shape) for v in tf_vars
        ]
        assign_ops = [tf.assign(v, p) for (v, p) in zip(tf_vars, placeholders)]
        global_step_var = tf.Variable(global_step,
                                      name=global_step_name,
                                      trainable=False)
        saver = tf.train.Saver(tf.all_variables())

        with tf.Session() as sess:
            sess.run(tf.initialize_all_variables())
            for p, assign_op, (name, value) in zip(placeholders, assign_ops,
                                                   six.items(var_values)):
                sess.run(assign_op, {p: value})

            # Use the built saver to save the averaged checkpoint. Only keep 1
            # checkpoint and the best checkpoint will be moved to avg_best_metric_dir.
            saver.save(sess, os.path.join(avg_model_dir, "translate.ckpt"))

    return avg_model_dir
Ejemplo n.º 40
0
 def on_teardown_reset_signals(self):
     for signal, receivers in items(self._sigstate):
         _reset_signal(signal, receivers)
Ejemplo n.º 41
0
    def _filter(self, **kwargs):
        params = {}
        headers = None
        uri = '/'.join([self.base_url, self.name])

        if kwargs:
            if 'since' in kwargs:
                val = kwargs['since']
                headers = self.prepare_filtering_date(val)
                del kwargs['since']

            def get_filter_params(key, value):
                last_key = key.split('_')[-1]
                if last_key.upper().endswith('ID'):
                    return 'Guid("%s")' % six.text_type(value)
                if key in self.BOOLEAN_FIELDS:
                    return 'true' if value else 'false'
                elif key in self.DATE_FIELDS:
                    return 'DateTime(%s,%s,%s)' % (value.year, value.month,
                                                   value.day)
                elif key in self.DATETIME_FIELDS:
                    return value.isoformat()
                else:
                    return '"%s"' % six.text_type(value)

            def generate_param(key, value):
                parts = key.split("__")
                field = key.replace('_', '.')
                fmt = '%s==%s'
                if len(parts) == 2:
                    # support filters:
                    # Name__Contains=John becomes Name.Contains("John")
                    if parts[1] in ["contains", "startswith", "endswith"]:
                        field = parts[0]
                        fmt = ''.join(['%s.', parts[1], '(%s)'])
                    elif parts[1] in self.OPERATOR_MAPPINGS:
                        field = parts[0]
                        key = field
                        fmt = '%s' + self.OPERATOR_MAPPINGS[parts[1]] + '%s'
                    elif parts[1] in ["isnull"]:
                        sign = '=' if value else '!'
                        return '%s%s=null' % (parts[0], sign)
                return fmt % (field, get_filter_params(key, value))

            # Move any known parameter names to the query string
            KNOWN_PARAMETERS = ['order', 'offset', 'page']
            for param in KNOWN_PARAMETERS:
                if param in kwargs:
                    params[param] = kwargs.pop(param)

            # Treat any remaining arguments as filter predicates
            # Xero will break if you search without a check for null in the first position:
            # http://developer.xero.com/documentation/getting-started/http-requests-and-responses/#title3
            sortedkwargs = sorted(six.items(kwargs),
                                  key=lambda item: -1
                                  if 'isnull' in item[0] else 0)
            filter_params = [
                generate_param(key, value) for key, value in sortedkwargs
            ]
            if filter_params:
                params['where'] = '&&'.join(filter_params)

        return uri, params, 'get', None, headers, False
Ejemplo n.º 42
0
 def __init__(self, *args, **kwargs):
     attrs = kwargs.pop('attrs', None) or {}
     super(Mock, self).__init__(*args, **kwargs)
     for attr_name, attr_value in items(attrs):
         setattr(self, attr_name, attr_value)
Ejemplo n.º 43
0
 def on_teardown_reset_signals(self):
     for signal, receivers in items(self._sigstate):
         _reset_signal(signal, receivers)
Ejemplo n.º 44
0
 def connect(self, sender=None, weak=False, **kwargs):
     for signal, handler in items(self.signals):
         signal.connect(handler, sender=sender, weak=weak, **kwargs)
Ejemplo n.º 45
0
 def _mock_update_attributes(self, attrs={}, **kwargs):
     for key, value in items(attrs):
         setattr(self, key, value)
Ejemplo n.º 46
0
 def connect_events(self, events, **kwargs):
     return {
         k: self.contribute_to_event(v)
         for k, v in chain(items(events), items(kwargs))
     }
Ejemplo n.º 47
0
 def _value_or_mock(self, value, new, name, path, **kwargs):
     if value is sentinel:
         value = new(name=name or path.rpartition('.')[2])
     for k, v in items(kwargs):
         setattr(value, k, v)
     return value
Ejemplo n.º 48
0
Archivo: mock.py Proyecto: celery/case
 def _mock_update_attributes(self, attrs={}, **kwargs):
     for key, value in items(attrs):
         setattr(self, key, value)
Ejemplo n.º 49
0
 def connect(self, sender=None, weak=False, **kwargs):
     [self._connect_signal(signal, handler, sender, weak, **kwargs)
      for signal, handler in items(self.signals)]
Ejemplo n.º 50
0
 def connect_events(self, events, **kwargs):
     return {
         k: self.contribute_to_event(v)
         for k, v in chain(items(events), items(kwargs))
     }
Ejemplo n.º 51
0
def to_primitive(value, convert_instances=False, convert_datetime=True,
                 level=0, max_depth=3):
    """Convert a complex object into primitives.

    Handy for JSON serialization. We can optionally handle instances,
    but since this is a recursive function, we could have cyclical
    data structures.

    To handle cyclical data structures we could track the actual objects
    visited in a set, but not all objects are hashable. Instead we just
    track the depth of the object inspections and don't go too deep.

    Therefore, convert_instances=True is lossy ... be aware.

    """
    # handle obvious types first - order of basic types determined by running
    # full tests on nova project, resulting in the following counts:
    # 572754 <type 'NoneType'>
    # 460353 <type 'int'>
    # 379632 <type 'unicode'>
    # 274610 <type 'str'>
    # 199918 <type 'dict'>
    # 114200 <type 'datetime.datetime'>
    #  51817 <type 'bool'>
    #  26164 <type 'list'>
    #   6491 <type 'float'>
    #    283 <type 'tuple'>
    #     19 <type 'long'>
    if isinstance(value, _simple_types):
        return value

    if isinstance(value, datetime.datetime):
        if convert_datetime:
            return timeutils.strtime(value)
        else:
            return value

    # value of itertools.count doesn't get caught by nasty_type_tests
    # and results in infinite loop when list(value) is called.
    if type(value) == itertools.count:
        return six.text_type(value)

    # FIXME(vish): Workaround for LP bug 852095. Without this workaround,
    #              tests that raise an exception in a mocked method that
    #              has a @wrap_exception with a notifier will fail. If
    #              we up the dependency to 0.5.4 (when it is released) we
    #              can remove this workaround.
    if getattr(value, '__module__', None) == 'mox':
        return 'mock'

    if level > max_depth:
        return '?'

    # The try block may not be necessary after the class check above,
    # but just in case ...
    try:
        recursive = functools.partial(to_primitive,
                                      convert_instances=convert_instances,
                                      convert_datetime=convert_datetime,
                                      level=level,
                                      max_depth=max_depth)
        if isinstance(value, dict):
            return dict((k, recursive(v)) for k, v in six.items(value))
        elif isinstance(value, (list, tuple)):
            return [recursive(lv) for lv in value]

        # It's not clear why xmlrpclib created their own DateTime type, but
        # for our purposes, make it a datetime type which is explicitly
        # handled
        if isinstance(value, xmlrpc.client.DateTime):
            value = datetime.datetime(*tuple(value.timetuple())[:6])

        if convert_datetime and isinstance(value, datetime.datetime):
            return timeutils.strtime(value)
        elif isinstance(value, gettextutils.Message):
            return value.data
        elif hasattr(value, 'items'):
            return recursive(dict(iter(value.items())), level=level + 1)
        elif hasattr(value, '__iter__'):
            return recursive(list(value))
        elif convert_instances and hasattr(value, '__dict__'):
            # Likely an instance of something. Watch for cycles.
            # Ignore class member vars.
            return recursive(value.__dict__, level=level + 1)
        elif netaddr and isinstance(value, netaddr.IPAddress):
            return six.text_type(value)
        else:
            if any(test(value) for test in _nasty_type_tests):
                return six.text_type(value)
            return value
    except TypeError:
        # Class objects are tricky since they may define something like
        # __iter__ defined but it isn't callable as list().
        return six.text_type(value)
Ejemplo n.º 52
0
 def _value_or_mock(self, value, new, name, path, **kwargs):
     if value is sentinel:
         value = new(name=name or path.rpartition('.')[2])
     for k, v in items(kwargs):
         setattr(value, k, v)
     return value