Example #1
0
    def trigger_attribute_change_events(object_, action):
        from sqlalchemy import inspect
        from sqlalchemy.orm import object_mapper, ColumnProperty

        if object_mapper(object_).class_ not in registry:
            return False

        for mapper_property in object_mapper(object_).iterate_properties:
            if isinstance(mapper_property, ColumnProperty) and \
                            mapper_property.class_attribute in registry[object_mapper(object_).class_]:

                an_index = (object_mapper(object_).class_, mapper_property.class_attribute)

                key = mapper_property.key
                attribute_state = inspect(object_).attrs.get(key)
                new_value = attribute_state.value
                old_value = get_old_value(attribute_state)
                if action == 'insert':
                    old_value = None
                if action == 'delete':
                    new_value = None
                if action == 'update':
                    if not attribute_state.history.has_changes():
                        new_value = ''
                        old_value = ''

                g.functions_to_call_after_commit[an_index] = []
                if new_value != old_value:
                    for f in registry[object_mapper(object_).class_][mapper_property.class_attribute]:
                        add = f(object_, new_value, old_value, action)
                        if add:
                            g.functions_to_call_after_commit[an_index].append(add)
Example #2
0
 def __init__(self, user=None, service=None, action=None,
              field_name=None, old_value=None, new_value=None, **kw):
     """
     The *service* argument should be a string such as 'Scheduler' or 
     'XMLRPC', describing the means by which the change has been made. This 
     constructor will override it with something more specific (such as the 
     name of an external service) if appropriate.
     """
     super(Activity, self).__init__(**kw)
     self.user = user
     self.service = service
     try:
         if identity.current.proxied_by_user is not None:
             self.service = identity.current.proxied_by_user.user_name
     except identity.RequestRequiredException:
         pass
     self.field_name = field_name
     self.action = action
     # These values are likely to be truncated by MySQL, so let's make sure 
     # we don't end up with invalid UTF-8 chars at the end
     if old_value and isinstance(old_value, unicode):
         old_value = unicode_truncate(old_value,
             bytes_length=object_mapper(self).c.old_value.type.length)
     if new_value and isinstance(new_value, unicode):
         new_value = unicode_truncate(new_value,
             bytes_length=object_mapper(self).c.new_value.type.length)
     self.old_value = old_value
     self.new_value = new_value
Example #3
0
	def copy(self, obj_source):
		pk_keys = set([c.key for c in object_mapper(obj_source).primary_key])
		#pk_keys = []
		keys = [p.key for p in object_mapper(obj_source).iterate_properties if (p.key not in pk_keys) & (isinstance(p, sqlalchemy.orm.ColumnProperty))]

		obj_dest = obj_source.__class__.__new__(obj_source.__class__)
		obj_dest.__init__()

		if app.verbose:
			src = "src(" + str(type(obj_source)) + " " + str(obj_source)
			dst = "dst(" + str(type(obj_dest)) + " " + str(obj_dest) + ")"

		for k in keys:
			v = getattr(obj_source, k)
			if (k == "password") & (obj_source != app.user):
				v = "hidden_password"
			else:
				if  type(v) is str:
					v = self.unicode(v)
			if app.verbose:
				src += ", " + str(k) + ": " + str(type(v)) + " " + str(v)
			setattr(obj_dest, k, v)

		if app.verbose:
			src += ")"
			dst += ")"
			print src + "->" + dst

		return obj_dest
Example #4
0
 def as_json(self):
     date_formatter = date.getLocaleFormatter(common.get_request(), "date",
         "medium"
     )
     items = [
         dict(
             item_type = self.item_type,
             item_id = orm.object_mapper(item).primary_key_from_instance(
                 item
             )[0],
             item_title = IDCDescriptiveProperties(item).title,
             status = IWorkflow(item).get_state(item.status).title,
             status_date = ( date_formatter.format(item.submission_date) 
                 if (hasattr(item, "submission_date") and 
                     getattr(item, "submission_date")
                 )
                 else None
             ),
             registry_number = ( item.registry_number if
                 hasattr(item, "registry_number") else None
             ),
             item_mover = ( IDCDescriptiveProperties(item.owner).title if
                 hasattr(item, "owner") else None
             ),
             item_uri = "%s-%d" % (self.item_type,
                 orm.object_mapper(item).primary_key_from_instance(item)[0]
             )
         )
         for item in self.query()
     ]
     items = sorted(items, key=lambda item:item.get("status_date"),
         reverse=True
     )
     return json.dumps(dict(items=items))
Example #5
0
 def as_json(self):
     is_text = IScheduleText.implementedBy(self.domain_class)
     date_formatter = date.getLocaleFormatter(common.get_request(), "date",
         "medium"
     )
     items = [
         dict(
             item_type = self.item_type,
             item_id = orm.object_mapper(item).primary_key_from_instance(
                 item
             )[0],
             item_title = item.text if \
                 is_text else IDCDescriptiveProperties(item).title,
             status = IWorkflow(item).get_state(item.status).title if not \
                 is_text else None,
             status_date = date_formatter.format(item.submission_date) if \
                 getattr(item, "submission_date", None) else None,
             registry_number = item.registry_number if \
                 hasattr(item, "registry_number") else None,
             item_mover = IDCDescriptiveProperties(item.owner).title if \
                 hasattr(item, "owner") else None,
             item_uri = "%s-%d" % (self.item_type,
                 orm.object_mapper(item).primary_key_from_instance(item)[0]
             )
         )
         for item in self.query()
     ]
     items = sorted(items, key=lambda item:item.get("status_date"),
         reverse=True
     )
     return json.dumps(dict(items=items))
Example #6
0
    def to_dict(self, deep={}, exclude=[]):
        """Generate a JSON-style nested dict/list structure from an selfect."""
        col_prop_names = [p.key for p in object_mapper(self).iterate_properties \
                                      if isinstance(p, ColumnProperty)]
        data = dict([(name, getattr(self, name))
                     for name in col_prop_names if name not in exclude])

        # objects can provide a default view
        if not deep:
          deep = self.__default_deep__
        if not exclude:
          exclude = self.__default_exclude__

        if deep:
            for rname, rdeep in deep.iteritems():
                dbdata = getattr(self, rname)
                #FIXME: use attribute names (ie coltoprop) instead of column names
                fks = object_mapper(self).get_property(rname).remote_side
                exclude = [c.name for c in fks]
                if dbdata is None:
                    data[rname] = None
                elif isinstance(dbdata, list):
                    data[rname] = [o.to_dict(rdeep, exclude) for o in dbdata]
                else:
                    data[rname] = dbdata.to_dict(rdeep, exclude)
        return data
Example #7
0
    def __init__(self, user=None, service=None, action=None,
                 field_name=None, old_value=None, new_value=None, **kw):
        """
        The *service* argument should be a string such as 'Scheduler' or 
        'XMLRPC', describing the means by which the change has been made. This 
        constructor will override it with something more specific (such as the 
        name of an external service) if appropriate.
        """
        super(Activity, self).__init__(**kw)
        self.user = user
        self.service = service
        try:
            if identity.current.proxied_by_user is not None:
                self.service = identity.current.proxied_by_user.user_name
        except identity.RequestRequiredException:
            pass

        field_name_value_max_length = object_mapper(self).c.field_name.type.length
        old_value_max_length        = object_mapper(self).c.old_value.type.length
        new_value_max_length        = object_mapper(self).c.new_value.type.length
        self.field_name = field_name[:field_name_value_max_length]
        self.action = action

        if old_value is not None:
            old_value = unicode(old_value)[:old_value_max_length]
        if new_value is not None:
            new_value = unicode(new_value)[:new_value_max_length]

        self.old_value = old_value
        self.new_value = new_value
Example #8
0
 def __init__(self, context):
     self.context = context
     session = Session()
     trusted = removeSecurityProxy(context)        
     session.merge(trusted)
     try:
         self.oid = orm.object_mapper( trusted ).primary_key_from_instance(trusted)[0]
     except UnboundExecutionError:
         session.add(trusted)     
         self.oid = orm.object_mapper( trusted ).primary_key_from_instance(trusted)[0]         
     self.object_type = context.__class__.__name__.lower()
Example #9
0
def _to_dict(instance, deep=None, exclude=None):
    """Returns a dictionary representing the fields of the specified `instance`
    of a SQLAlchemy model.

    `deep` is a dictionary containing a mapping from a relation name (for a
    relation of `instance`) to either a list or a dictionary. This is a
    recursive structure which represents the `deep` argument when calling
    `_to_dict` on related instances. When an empty list is encountered,
    `_to_dict` returns a list of the string representations of the related
    instances.

    `exclude` specifies the columns which will *not* be present in the returned
    dictionary representation of the object.

    """
    deep = deep or {}
    exclude = exclude or ()
    # create the dictionary mapping column name to value
    columns = (p.key for p in object_mapper(instance).iterate_properties
               if isinstance(p, ColumnProperty))
    result = dict((col, getattr(instance, col)) for col in columns)
    # Convert datetime and date objects to ISO 8601 format.
    #
    # TODO We can get rid of this when issue #33 is resolved.
    for key, value in result.items():
        if isinstance(value, datetime.date):
            result[key] = value.isoformat()
    # recursively call _to_dict on each of the `deep` relations
    for relation, rdeep in deep.iteritems():
        # exclude foreign keys of the related object for the recursive call
        relationproperty = object_mapper(instance).get_property(relation)
        newexclude = (key.name for key in relationproperty.remote_side)
        # Get the related value so we can see if it is None, a list, a query
        # (as specified by a dynamic relationship loader), or an actual
        # instance of a model.
        relatedvalue = getattr(instance, relation)
        # HACK: In case the relatedvalue is a dynamically loaded
        # relationship, we need to resolve the query into a concrete
        # list of objects; see issue #89. We should also check to see
        # if relatedvalue is a many-to-one relationship, in order to
        # call relatedvalue.one() or something, but I don't know how
        # to do that.
        if isinstance(relatedvalue, (AppenderMixin, Query)):
            relatedvalue = relatedvalue.all()
        if relatedvalue is None:
            result[relation] = None
        elif isinstance(relatedvalue, list):
            result[relation] = [_to_dict(inst, rdeep, newexclude)
                                for inst in relatedvalue]
        else:
            result[relation] = _to_dict(relatedvalue, rdeep, newexclude)
    return result
Example #10
0
 def create(self, message, manual=False):
     """Store the existing state of the adapted context as a new version.
     """
     context = self.__parent__
     if manual:
         if not self.has_write_permission(context):
             raise Unauthorized
     version = self.domain_model()
     trusted = removeSecurityProxy(context)
     
     # set values on version from context
     self._copyFields(trusted, version)
     
     # content domain ids are typically not in the interfaces
     # manually inspect and look for one, by hand to save on the new version
     mapper = orm.object_mapper(trusted)
     version.content_id = mapper.primary_key_from_instance(trusted)[0]
     version.status = None
     version.manual = manual
     
     # we rely on change handler to attach the change object to the version
     event.notify(
         interfaces.VersionCreated(context, self, version, message))
     
     session = Session()
     session.add(version)
     
     version.context = context
     event.notify(ObjectCreatedEvent(version))
     
     return version
Example #11
0
def atomic_add(obj, column, delta, expire=False):
    """Performs an atomic add (or subtract) of the given column on the
    object.  This updates the object in place for reflection but does
    the real add on the server to avoid race conditions.  This assumes
    that the database's '+' operation is atomic.

    If `expire` is set to `True`, the value is expired and reloaded instead
    of added of the local value.  This is a good idea if the value should
    be used for reflection.
    """
    sess = orm.object_session(obj) or session
    mapper = orm.object_mapper(obj)
    pk = mapper.primary_key_from_instance(obj)
    assert len(pk) == 1, 'atomic_add not supported for classes with ' \
                         'more than one primary key'

    val = orm.attributes.get_attribute(obj, column)
    if expire:
        orm.attributes.instance_state(obj).expire_attributes([column])
    else:
        orm.attributes.set_committed_value(obj, column, val + delta)

    table = mapper.tables[0]
    stmt = sql.update(table, mapper.primary_key[0] == pk[0], {
        column:     table.c[column] + delta
    })
    sess.execute(stmt)
Example #12
0
    def createAndAdd(self, data):
        domain_model = self.domain_model
        # create the object, inspect data for constructor args
        try:
            ob = createInstance(domain_model, data)
        except TypeError:
            ob = domain_model()

        # apply any context values
        self.finishConstruction(ob)

        # apply extra form values
        form.applyChanges(ob, self.form_fields, data, self.adapters)

        # save the object, id is generated by db on flush
        self.context[""] = ob

        # flush so we have database id
        bungeni.alchemist.Session().flush()

        # fire an object created event
        notify(ObjectCreatedEvent(ob))

        # signal to add form machinery to go to next url
        self._finished_add = True

        mapper = orm.object_mapper(ob)

        # TODO single primary key (need changes to base container)
        oid = mapper.primary_key_from_instance(ob)

        # retrieve the object with location and security information
        return self.context[oid]
Example #13
0
    def polymorphic_config(self):
        """ Returns a namedtuple with the following:

        pm : a filtered polymorphic map (if polymorphic_children is found) or an
        empty dict (if polymorphic_loading if False) or the base mapper
        polymorphic map (if polymorphic loading is True).

        base : the base-most Mapper in the inheritance chain.

        cls : classes which are found in the polymorphic map """

        base_mapper = orm.object_mapper(self).base_mapper

        # If polymorphic loading is enabled for this folder then "pm" will
        # contain the mappers that should be joined, if not then "pm" will be
        # empty.
        pm = base_mapper.polymorphic_map if self.polymorphic_loading else {}

        if pm and self.polymorphic_children:
            # Remove mappers from the original polymorphic_map for which
            # identity is not found in polymorphic_children.
            identities = {i.id for i in self.polymorphic_children}
            pm = {k: v for (k, v) in pm.items() if k in identities}

        # Classes that should be automatically joined in the polymorphic
        # loading. It can be directly used with orm.with_polymorphic()
        cls = [m.class_ for m in pm.values()]

        return PolymorphicConfig(pm, base_mapper, cls)
Example #14
0
def stringKey(obj):
    """Get a string identifier for an item conatined in this container.
    
    Note that the primary_key is no longer determined by 
    sqlalchemy.orm.mapper.primary_key_from_instance(obj) but by doing the 
    logically equivalent (but a little more laborious) 
    [ getattr(instance, c.name) for c in mapper.primary_key ].
    
    This is because, in some hard-to-debug cases, the previous was returning 
    None to all pk values e.g. for objects on which checkPermission() has not
    been called. Using this version, the primary_key is correctly determined
    irrespective of whether checkPermission() had previously been called on
    the object.
    """
    unproxied = proxy.removeSecurityProxy(obj)
    #!+STRING_KEY experimental, to allow for a more useful string key for 
    # instances, that would be independent of db PK identity but still uniquely
    # identifies the (at least within the scope of the container). 
    # Note this key is part of public URLs, so part of public API.
    # !+valueKey reverse considerations?
    #
    # use the obj's preferred string_key formulation, if obj defines one
    if hasattr(obj, "string_key"):
        return obj.string_key()
    mapper = orm.object_mapper(unproxied)
    #primary_key = mapper.primary_key_from_instance(unproxied)
    identity_values = [ getattr(unproxied, c.name) for c in mapper.primary_key ]
    identity_key = "-".join(map(str, identity_values))
    return "obj-%s" % (identity_key)
Example #15
0
    def to_dict(self, includes=(), excludes=()):
        """ Dictify entity.
        """
        if '__class__' not in excludes:
            dict_ = {'__class__': {'name': self.__class__.__name__,
                                   'module': self.__class__.__module__}}
        else:
            dict_ = {}

        for property_ in object_mapper(self).iterate_properties:

            if property_.key in excludes:
                continue

            if isinstance(property_, ColumnProperty):

                dict_[property_.key] = getattr(self, property_.key)

            elif isinstance(property_, RelationshipProperty) and \
                 property_.key in includes:

                if property_.uselist:
                    dict_[property_.key] = [item.to_dict()
                                            for item in getattr(self,
                                                                property_.key)]
                else:
                    dict_[property_.key] = getattr(self, property_.key).to_dict()

        return dict_
Example #16
0
    def format_node_to_restore(cls, node):
        """Convert node to dict for restoring, works only in fake mode.

        Fake mode can optionally restore the removed node (this simulates
        the node being rediscovered). This method creates the appropriate
        input for that procedure.
        :param node:
        :return: dict
        """
        # only fake tasks
        if cls.use_fake():
            new_node = {}
            reset_attrs = (
                'id',
                'cluster_id',
                'roles',
                'pending_deletion',
                'pending_addition',
                'group_id',
            )
            for prop in object_mapper(node).iterate_properties:
                if isinstance(
                    prop, ColumnProperty
                ) and prop.key not in reset_attrs:
                    new_node[prop.key] = getattr(node, prop.key)
            return new_node
Example #17
0
 def as_json(self):
     date_formatter = date.getLocaleFormatter(common.get_request(), "date",
         "medium"
     )
     items_json = dict(
         items = [
             dict(
                 item_type = self.item_type,
                 item_id = orm.object_mapper(item).primary_key_from_instance(
                     item
                 )[0],
                 item_title = IDCDescriptiveProperties(item).title,
                 status = IWorkflow(item).get_state(item.status).title,
                 status_date = ( date_formatter.format(item.submission_date) 
                     if hasattr(item, "submission_date") else None
                 ),
                 registry_number = ( item.registry_number if
                     hasattr(item, "registry_number") else None
                 ),
                 item_mover = ( IDCDescriptiveProperties(item.owner).title if
                     hasattr(item, "owner") else None
                 ),
                 item_uri = IDCDescriptiveProperties(item).uri
             )
             for item in self.query()
         ]
     )
     return json.dumps(items_json)
Example #18
0
def store(obj):
	if not LDAPConn:
		return
	cls = obj.__class__
	callback = getattr(cls, '_ldap_store', None)
	if callable(callback):
		return callback(object_mapper(obj), None, obj)
Example #19
0
def create_version(obj, session, deleted = False):
    obj_mapper = object_mapper(obj)
    history_mapper = obj.__history_mapper__
    history_cls = history_mapper.class_
    
    obj_state = attributes.instance_state(obj)
    
    attr = {}

    obj_changed = False
    
    for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()):
        if hm.single:
            continue
    
        for hist_col in hm.local_table.c:
            if hist_col.key == 'version':
                continue
                
            obj_col = om.local_table.c[hist_col.key]

            # get the value of the
            # attribute based on the MapperProperty related to the
            # mapped column.  this will allow usage of MapperProperties
            # that have a different keyname than that of the mapped column.
            try:
                prop = obj_mapper.get_property_by_column(obj_col)
            except UnmappedColumnError:
                # in the case of single table inheritance, there may be 
                # columns on the mapped table intended for the subclass only.
                # the "unmapped" status of the subclass column on the 
                # base class is a feature of the declarative module as of sqla 0.5.2.
                continue
                
            # expired object attributes and also deferred cols might not be in the
            # dict.  force it to load no matter what by using getattr().
            if prop.key not in obj_state.dict:
                getattr(obj, prop.key)

            a, u, d = attributes.get_history(obj, prop.key)

            if d:
                attr[hist_col.key] = d[0]
                obj_changed = True
            elif u:
                attr[hist_col.key] = u[0]
            else:
                # if the attribute had no value.
                attr[hist_col.key] = a[0]
                obj_changed = True
                
    if not obj_changed and not deleted:            
        return

    attr['version'] = obj.version
    hist = history_cls()
    for key, value in attr.iteritems():
        setattr(hist, key, value)
    session.add(hist)
    obj.version += 1
Example #20
0
    def with_parent(self, instance, property=None):
        """add a join criterion corresponding to a relationship to the given parent instance.

            instance
                a persistent or detached instance which is related to class represented
                by this query.

            property
                string name of the property which relates this query's class to the 
                instance.  if None, the method will attempt to find a suitable property.

        currently, this method only works with immediate parent relationships, but in the
        future may be enhanced to work across a chain of parent mappers.    
        """

        from sqlalchemy.orm import properties
        mapper = object_mapper(instance)
        if property is None:
            for prop in mapper.iterate_properties:
                if isinstance(prop, properties.PropertyLoader) and prop.mapper is self.mapper:
                    break
            else:
                raise exceptions.InvalidRequestError("Could not locate a property which relates instances of class '%s' to instances of class '%s'" % (self.mapper.class_.__name__, instance.__class__.__name__))
        else:
            prop = mapper.get_property(property, resolve_synonyms=True)
        return self.filter(Query._with_lazy_criterion(instance, prop))
Example #21
0
 def get_audit_trail(cls, obj):
     from sqlalchemy.orm import object_mapper
     session = object_session(obj)
     obj_mapper = object_mapper(obj)
     primary_key = cls._get_instance_pk(obj, obj_mapper)
     return session.query(cls).filter(and_(cls.type==obj.__class__.__name__,
                                           cls.ref==utils.dumps(primary_key)))
Example #22
0
    def _record(self, mapper, model, operation):
        pk = tuple(mapper.primary_key_from_instance(model))
        #orm.object_session(model)._model_changes[pk] = (model, operation)
        changes = {}
        
        for prop in object_mapper(model).iterate_properties:
            if not isinstance(prop, RelationshipProperty):
                try:
                    history = attributes.get_history(model, prop.key)
                except:
                    continue

                added, unchanged, deleted = history

                newvalue = added[0] if added else None

                if operation=='delete':
                    oldvalue = unchanged[0] if unchanged else None
                else:
                    oldvalue = deleted[0] if deleted else None

                if newvalue or oldvalue:
                    changes[prop.key] = (oldvalue, newvalue)
        
        orm.object_session(model)._model_changes[pk] = (model.__tablename__, pk[0], changes, operation)
        return EXT_CONTINUE
Example #23
0
    def cascade_iterator(self, type_, state, visited_instances, halt_on=None):
        if not type_ in self.cascade:
            return

        # only actively lazy load on the 'delete' cascade
        if type_ != "delete" or self.passive_deletes:
            passive = attributes.PASSIVE_NO_INITIALIZE
        else:
            passive = attributes.PASSIVE_OFF

        mapper = self.mapper.primary_mapper()
        instances = state.value_as_iterable(self.key, passive=passive)
        if instances:
            for c in instances:
                if c is not None and c not in visited_instances and (halt_on is None or not halt_on(c)):
                    if not isinstance(c, self.mapper.class_):
                        raise AssertionError(
                            "Attribute '%s' on class '%s' doesn't handle objects of type '%s'"
                            % (self.key, str(self.parent.class_), str(c.__class__))
                        )
                    visited_instances.add(c)

                    # cascade using the mapper local to this object, so that its individual properties are located
                    instance_mapper = object_mapper(c)
                    yield (c, instance_mapper, attributes.instance_state(c))
Example #24
0
    def update(self, entity, params, omit_fields=None):
        params = self._modify_params_for_dates(entity, params)
        params = self._modify_params_for_relationships(entity, params)
        obj = self._get_obj(entity, params)
        relations = self.get_relations(entity)
        mapper = object_mapper(obj)
        for key, value in params.items():
            if omit_fields and key in omit_fields:
                continue

            if isinstance(value, FieldStorage):
                value = value.file.read()

            # this is done to cast any integer columns into ints before they are
            # sent off to the interpreter.  Oracle really needs this.
            try:
                if key not in relations and value:
                    value = self._adapt_type(value, mapper.columns[key])
            except KeyError:
                pass
            setattr(obj, key, value)

        self._remove_related_empty_params(obj, params, omit_fields)
        self.session.flush()
        return obj
Example #25
0
def copy(self, **kwargs):
    """ Fix problem with multilingual and copy&paste.

        If you paste objects after a copy action, the
        translation_targets and translation_source properties
        should be omitted from copy.
    """
    request = get_current_request()
    action = request.session.get('kotti.paste', (None, None))[1]
    if not action or action == 'copy':
        children = list(self.children)
        copy = self.__class__()
        for prop in object_mapper(self).iterate_properties:
            black_list = tuple(
                list(self.copy_properties_blacklist) +
                TRANSLATE_BLACKLIST
                )
            if prop.key not in black_list:
                setattr(copy, prop.key, getattr(self, prop.key))
        for key, value in kwargs.items():
            setattr(copy, key, value)
        for child in children:
            copy.children.append(child.copy())

        return copy
    else:
        return self.__original_copy(**kwargs)
Example #26
0
def queue_notification(document, event):
    connection = get_mq_connection()
    if not connection:
        return
    mq_utility = component.getUtility(IMessageQueueConfig)
    domain_class = document.__class__
    unproxied = removeSecurityProxy(document)
    mapper = orm.object_mapper(unproxied)
    primary_key = mapper.primary_key_from_instance(unproxied)[0]
    notifications_utility = component.getUtility(INotificationsUtility)
    message = {
        "document_id": primary_key,
        "document_type": notifications_utility.get_type(domain_class),
        "source": event.source,
        "destination": event.destination
    }
    kwargs = dict(
        exchange=str(mq_utility.get_task_exchange()),
        routing_key=str(mq_utility.get_task_queue()),
        body=simplejson.dumps(message),
        properties=pika.BasicProperties(
            content_type="text/plain", delivery_mode=1
        )
    )
    txn = transaction.get()
    txn.addAfterCommitHook(post_commit_publish, (), kwargs)
Example #27
0
    def _move(self, direction, context):
        """Swap a line with another line.

        If ``direction`` is ``'up'``, swap with the previous line.
        If ``direction`` is ``'down'``, swap with the next line.

        """
        cond = None
        pkey = object_mapper(self).primary_key[0].key

        if direction == 'up':
            if self._order != 1:
                cond = self._order_column == (self._order - 1)
                values = {self._order_column: self._order}
                self._set_order(self._order - 1)
        elif direction == 'down':
            if self._order < self._max_order(context):
                cond = self._order_column == (self._order + 1)
                values = {self._order_column: self._order}
                self._set_order(self._order + 1)

        if cond is not None and values:
            # Flush it now, so that it works
            self.query.session.flush()
            (self.__class__
             .query.filter(cond).filter(context)
             .filter(getattr(self.__class__, pkey) != getattr(self, pkey))
             .update(values))
Example #28
0
def update(session, model, oldinstance, newinstance, skipcolumns=[]):
#----------------------------------------------------------------------
    '''
    update an existing element based on kwargs query
    
    :param session: session within which update occurs
    :param model: table model
    :param oldinstance: instance of table model which was found in the db
    :param newinstance: instance of table model with updated fields
    :param skipcolumns: list of column names to update
    :rtype: boolean indicates whether any fields have changed
    '''

    updated = False
    
    # update all columns except those we were told to skip
    for col in object_mapper(newinstance).columns:
        # skip indicated keys
        if col.key in skipcolumns: continue
        
        # if any columns are different, update those columns
        # and return to the caller that it's been updated
        if getattr(oldinstance,col.key) != getattr(newinstance,col.key):
            setattr(oldinstance,col.key,getattr(newinstance,col.key))
            updated = True
    
    return updated
Example #29
0
    def duplicate(self, code=None, session=None):
        """
        Return a Plant that is a duplicate of this Plant with attached
        notes, changes and propagations.
        """
        plant = Plant()
        if not session:
            session = object_session(self)
            if session:
                session.add(plant)

        ignore = ('id', 'changes', 'notes', 'propagations')
        properties = filter(lambda p: p.key not in ignore,
                            object_mapper(self).iterate_properties)
        for prop in properties:
            setattr(plant, prop.key, getattr(self, prop.key))

        plant.code = code

        # duplicate notes
        for note in self.notes:
            new_note = PlantNote()
            for prop in object_mapper(note).iterate_properties:
                setattr(new_note, prop.key, getattr(note, prop.key))

            new_note.id = None
            new_note.plant = plant

        # duplicate changes
        for change in self.changes:
            new_change = PlantChange()
            for prop in object_mapper(change).iterate_properties:
                setattr(new_change, prop.key, getattr(change, prop.key))

            new_change.id = None
            new_change.plant = plant

        # duplicate propagations
        for propagation in self.propagations:
            new_propagation = PlantPropagation()
            for prop in object_mapper(propagation).iterate_properties:
                setattr(new_propagation, prop.key,
                        getattr(propagation, prop.key))

            new_propagation.id = None
            new_propagation.plant = plant
        return plant
Example #30
0
 def __iter__(self):
     columns = dict(object_mapper(self).columns).keys()
     # NOTE(russellb): Allow models to specify other keys that can be looked
     # up, beyond the actual db columns.  An example would be the 'name'
     # property for an Instance.
     columns.extend(self._get_extra_keys())
     self._i = iter(columns)
     return self
Example #31
0
def _create_version(obj, session, type_=None, force=False):
    """Create a new version for the given :attr:`obj`.

    :param obj: SQLAlchemy model object.
    :param session: SQLAlchemy session object.
    :param type_: Type of a change.
    :param force: Flag to always create version.

    :type obj: sqlalchemy.ext.declarative.api.Base
    :type session: sqlalchemy.orm.scoping.scoped_session
    :type type_: string
    :type force: bool
    """
    obj_mapper = object_mapper(obj)
    history_mapper = obj.__history_mapper__
    history_class = history_mapper.class_

    obj_state = attributes.instance_state(obj)
    obj_changed = False
    attr = {}

    for obj_mapper_, history_mapper_ in zip(
            obj_mapper.iterate_to_root(),
            history_mapper.iterate_to_root()):

        if history_mapper_.single:
            continue

        for history_column in history_mapper_.local_table.c:
            if _is_versioning_column(history_column):
                continue

            obj_column = obj_mapper_.local_table.c[history_column.key]

            try:
                prop = obj_mapper.get_property_by_column(obj_column)
            except UnmappedColumnError:
                continue

            # Force deferred columns to load.
            if prop.key not in obj_state.dict:
                getattr(obj, prop.key)

            added_, unchanged_, deleted_ = attributes.get_history(obj, prop.key)

            if deleted_:
                attr[prop.key] = deleted_[0]
                obj_changed = True
            elif unchanged_:
                attr[prop.key] = unchanged_[0]
            elif added_:
                obj_changed = True

    if not obj_changed:
        for prop in obj_mapper.iterate_properties:
            if isinstance(prop, RelationshipProperty) and \
               attributes.get_history(
                   obj,
                   prop.key,
                   passive=attributes.PASSIVE_NO_INITIALIZE).has_changes():
                for p in prop.local_columns:
                    if p.foreign_keys:
                        obj_changed = True
                        break
                if obj_changed is True:
                    break

    if not obj_changed and not force:
        return

    attr['version'] = obj.version
    attr['change_type'] = type_
    history = history_class()
    for key, value in attr.items():
        setattr(history, key, value)

    session.add(history)
    obj.version += 1
Example #32
0
    def execute(self, task, respond_to='remove_nodes_resp'):
        logger.debug("DeletionTask.execute(task=%s)" % task.uuid)
        task_uuid = task.uuid
        logger.debug("Nodes deletion task is running")
        nodes_to_delete = []
        nodes_to_delete_constant = []
        nodes_to_restore = []

        USE_FAKE = settings.FAKE_TASKS or settings.FAKE_TASKS_AMQP

        # no need to call astute if there are no nodes in cluster
        if respond_to == 'remove_cluster_resp' and \
                not list(task.cluster.nodes):
            rcvr = rpc.receiver.NailgunReceiver()
            rcvr.remove_cluster_resp(task_uuid=task_uuid,
                                     status='ready',
                                     progress=100)
            return

        for node in task.cluster.nodes:
            if node.pending_deletion:
                nodes_to_delete.append({
                    'id':
                    node.id,
                    'uid':
                    node.id,
                    'roles':
                    node.roles,
                    'slave_name':
                    TaskHelper.make_slave_name(node.id)
                })

                if USE_FAKE:
                    # only fake tasks
                    new_node = {}
                    keep_attrs = ('id', 'cluster_id', 'roles',
                                  'pending_deletion', 'pending_addition')
                    for prop in object_mapper(node).iterate_properties:
                        if isinstance(
                                prop,
                                ColumnProperty) and prop.key not in keep_attrs:
                            new_node[prop.key] = getattr(node, prop.key)
                    nodes_to_restore.append(new_node)
                    # /only fake tasks

        # this variable is used to iterate over it
        # and be able to delete node from nodes_to_delete safely
        nodes_to_delete_constant = list(nodes_to_delete)

        for node in nodes_to_delete_constant:
            node_db = db().query(Node).get(node['id'])

            slave_name = TaskHelper.make_slave_name(node['id'])
            logger.debug(
                "Removing node from database and pending it "
                "to clean its MBR: %s", slave_name)
            if node_db.status == 'discover':
                logger.info("Node is not deployed yet,"
                            " can't clean MBR: %s", slave_name)
                db().delete(node_db)
                db().commit()

                nodes_to_delete.remove(node)

        msg_delete = make_astute_message(
            'remove_nodes', respond_to, {
                'task_uuid': task.uuid,
                'nodes': nodes_to_delete,
                'engine': {
                    'url': settings.COBBLER_URL,
                    'username': settings.COBBLER_USER,
                    'password': settings.COBBLER_PASSWORD,
                    'master_ip': settings.MASTER_IP,
                }
            })
        # only fake tasks
        if USE_FAKE and nodes_to_restore:
            msg_delete['args']['nodes_to_restore'] = nodes_to_restore
        # /only fake tasks
        logger.debug("Calling rpc remove_nodes method")
        rpc.cast('naily', msg_delete)
Example #33
0
def stringKey(instance):
    unproxied = removeSecurityProxy(instance)
    mapper = orm.object_mapper(unproxied)
    primary_key = mapper.primary_key_from_instance(unproxied)
    identity_key = '-'.join(map(str, primary_key))
    return "obj-%s" % identity_key
Example #34
0
 def dictify_relationship(self, obj):
     dict_ = {}
     for col in object_mapper(obj).primary_key:
         dict_[col.name] = getattr(obj, col.name)
     return dict_
Example #35
0
 def to_dict(self):
     mapper = object_mapper(self)
     return odict(
         (key, getattr(self, key))
         for key in mapper.columns.keys()
     )
Example #36
0
 def to_dict(self):
     return {
         col.name: getattr(self, col.name)
         for col in object_mapper(self).mapped_table.c
     }
Example #37
0
def create_version(obj, session, deleted=False):
    obj_mapper = object_mapper(obj)
    history_mapper = obj.__history_mapper__
    history_cls = history_mapper.class_

    obj_state = attributes.instance_state(obj)

    attr = {}

    obj_changed = False

    for omi, hmi in zip(obj_mapper.iterate_to_root(),
                        history_mapper.iterate_to_root()):
        if hmi.single:
            continue

        for hist_col in hmi.local_table.c:

            obj_col = omi.local_table.c[hist_col.key]

            # get the value of the
            # attribute based on the MapperProperty related to the
            # mapped column.  this will allow usage of MapperProperties
            # that have a different keyname than that of the mapped column.
            try:
                prop = obj_mapper.get_property_by_column(obj_col)
            except UnmappedColumnError:
                # in the case of single table inheritance, there may be
                # columns on the mapped table intended for the subclass only.
                # the "unmapped" status of the subclass column on the
                # base class is a feature of the declarative module as of sqla 0.5.2.
                continue

            # expired object attributes and also deferred cols might not be in the
            # dict.  force it to load no matter what by using getattr().
            if prop.key not in obj_state.dict:
                getattr(obj, prop.key)

            a, u, d = attributes.get_history(obj, prop.key)

            if d:
                attr[hist_col.key] = d[0]
                obj_changed = True
            elif u:
                attr[hist_col.key] = u[0]
            else:
                # if the attribute had no value.
                attr[hist_col.key] = a[0]
                obj_changed = True

    if not obj_changed:
        # not changed, but we have relationships.  OK
        # check those too
        for prop in obj_mapper.iterate_properties:
            if isinstance(prop,
                          RelationshipProperty) and attributes.get_history(
                              obj, prop.key).has_changes():
                obj_changed = True
                break

    # if not obj_changed and not deleted:
    #    return

    hist = history_cls()
    for key, value in attr.iteritems():
        setattr(hist, key, value)
    session.add(hist)
 def __iter__(self):
     columns = list(dict(object_mapper(self).columns).keys())
     return ModelIterator(self, iter(columns))
Example #39
0
def create_id(event):
    """Create an event (sitting or session) identifier of the form <type>-<id>
    """
    mapper = orm.object_mapper(event)
    return "%s-%d" % (naming.polymorphic_identity(
        event.__class__), mapper.primary_key_from_instance(event)[0])
Example #40
0
 def sa_mapper(self):
     return object_mapper(self)
Example #41
0
 def __iter__(self):
     return ((c.name, getattr(self, c.name))
             for c in object_mapper(self).columns)
def retrieve_private_key(sa_variant):
    """ Return the Private Key name of a given object/class \'sa_variant\' """
    mapper = class_mapper(sa_variant) if inspect.isclass(sa_variant) \
        else object_mapper(sa_variant)

    return mapper.primary_key[0].key
Example #43
0
def _to_dict(instance,
             deep=None,
             exclude=None,
             include=None,
             exclude_relations=None,
             include_relations=None):
    """Returns a dictionary representing the fields of the specified `instance`
    of a SQLAlchemy model.

    `deep` is a dictionary containing a mapping from a relation name (for a
    relation of `instance`) to either a list or a dictionary. This is a
    recursive structure which represents the `deep` argument when calling
    :func:`!_to_dict` on related instances. When an empty list is encountered,
    :func:`!_to_dict` returns a list of the string representations of the
    related instances.

    If either `include` or `exclude` is not ``None``, exactly one of them must
    be specified. If both are not ``None``, then this function will raise a
    :exc:`ValueError`. `exclude` must be a list of strings specifying the
    columns which will *not* be present in the returned dictionary
    representation of the object (in other words, it is a
    blacklist). Similarly, `include` specifies the only columns which will be
    present in the returned dictionary (in other words, it is a whitelist).

    .. note::

       If `include` is an iterable of length zero (like the empty tuple or the
       empty list), then the returned dictionary will be empty. If `include` is
       ``None``, then the returned dictionary will include all columns not
       excluded by `exclude`.

    `include_relations` is a dictionary mapping strings representing relation
    fields on the specified `instance` to a list of strings representing the
    names of fields on the related model which should be included in the
    returned dictionary; `exclude_relations` is similar.

    """
    if (exclude is not None or exclude_relations is not None) and \
            (include is not None or include_relations is not None):
        raise ValueError('Cannot specify both include and exclude.')
    # create the dictionary mapping column name to value
    columns = (p.key for p in object_mapper(instance).iterate_properties
               if isinstance(p, ColumnProperty))
    # filter the columns based on exclude and include values
    if exclude is not None:
        columns = (c for c in columns if c not in exclude)
    elif include is not None:
        columns = (c for c in columns if c in include)
    result = dict((col, getattr(instance, col)) for col in columns)
    # Convert datetime and date objects to ISO 8601 format.
    #
    # TODO We can get rid of this when issue #33 is resolved.
    for key, value in result.items():
        if isinstance(value, datetime.date):
            result[key] = value.isoformat()
    # recursively call _to_dict on each of the `deep` relations
    deep = deep or {}
    for relation, rdeep in deep.iteritems():
        # Get the related value so we can see if it is None, a list, a query
        # (as specified by a dynamic relationship loader), or an actual
        # instance of a model.
        relatedvalue = getattr(instance, relation)
        if relatedvalue is None:
            result[relation] = None
            continue
        # Determine the included and excluded fields for the related model.
        newexclude = None
        newinclude = None
        if exclude_relations is not None and relation in exclude_relations:
            newexclude = exclude_relations[relation]
        elif (include_relations is not None and relation in include_relations):
            newinclude = include_relations[relation]
        if isinstance(relatedvalue, list):
            result[relation] = [
                _to_dict(inst, rdeep, exclude=newexclude, include=newinclude)
                for inst in relatedvalue
            ]
        else:
            result[relation] = _to_dict(relatedvalue.one(),
                                        rdeep,
                                        exclude=newexclude,
                                        include=newinclude)
    return result
Example #44
0
 def __iter__(self):
     self._i = iter(list(object_mapper(self).attrs.items()))
     # product_id, <ColumnProperty>
     return self
Example #45
0
 def _getKey(self, ob):
     mapper = orm.object_mapper(ob)
     primary_key = mapper.primary_key_from_instance(ob)[0]
     return primary_key, unicode(ob.__class__.__name__)
Example #46
0
def to_dict(instance,
            deep=None,
            exclude=None,
            include=None,
            exclude_relations=None,
            include_relations=None,
            include_methods=None):
    """Returns a dictionary representing the fields of the specified `instance`
    of a SQLAlchemy model.

    The returned dictionary is suitable as an argument to
    :func:`flask.jsonify`; :class:`datetime.date` and :class:`uuid.UUID`
    objects are converted to string representations, so no special JSON encoder
    behavior is required.

    `deep` is a dictionary containing a mapping from a relation name (for a
    relation of `instance`) to either a list or a dictionary. This is a
    recursive structure which represents the `deep` argument when calling
    :func:`!_to_dict` on related instances. When an empty list is encountered,
    :func:`!_to_dict` returns a list of the string representations of the
    related instances.

    If either `include` or `exclude` is not ``None``, exactly one of them must
    be specified. If both are not ``None``, then this function will raise a
    :exc:`ValueError`. `exclude` must be a list of strings specifying the
    columns which will *not* be present in the returned dictionary
    representation of the object (in other words, it is a
    blacklist). Similarly, `include` specifies the only columns which will be
    present in the returned dictionary (in other words, it is a whitelist).

    .. note::

       If `include` is an iterable of length zero (like the empty tuple or the
       empty list), then the returned dictionary will be empty. If `include` is
       ``None``, then the returned dictionary will include all columns not
       excluded by `exclude`.

    `include_relations` is a dictionary mapping strings representing relation
    fields on the specified `instance` to a list of strings representing the
    names of fields on the related model which should be included in the
    returned dictionary; `exclude_relations` is similar.

    `include_methods` is a list mapping strings to method names which will
    be called and their return values added to the returned dictionary.

    """
    if (exclude is not None or exclude_relations is not None) and \
            (include is not None or include_relations is not None):
        raise ValueError('Cannot specify both include and exclude.')
    # create a list of names of columns, including hybrid properties
    try:
        columns = [
            p.key for p in object_mapper(instance).iterate_properties
            if isinstance(p, ColumnProperty)
        ]
    except UnmappedInstanceError:
        return instance
    for parent in type(instance).mro():
        columns += [
            key for key, value in parent.__dict__.iteritems()
            if isinstance(value, hybrid_property)
        ]
    # filter the columns based on exclude and include values
    if exclude is not None:
        columns = (c for c in columns if c not in exclude)
    elif include is not None:
        columns = (c for c in columns if c in include)
    # create a dictionary mapping column name to value
    result = dict((col, getattr(instance, col)) for col in columns
                  if not (col.startswith('__') or col in COLUMN_BLACKLIST))
    # add any included methods
    if include_methods is not None:
        result.update(
            dict((method, getattr(instance, method)())
                 for method in include_methods if not '.' in method))
    # Check for objects in the dictionary that may not be serializable by
    # default. Specifically, convert datetime and date objects to ISO 8601
    # format, and convert UUID objects to hexadecimal strings.
    for key, value in result.items():
        # TODO We can get rid of this when issue #33 is resolved.
        if isinstance(value, datetime.date):
            result[key] = value.isoformat()
        elif isinstance(value, uuid.UUID):
            result[key] = str(value)
        elif is_mapped_class(type(value)):
            result[key] = to_dict(value)
    # recursively call _to_dict on each of the `deep` relations
    deep = deep or {}
    for relation, rdeep in deep.iteritems():
        # Get the related value so we can see if it is None, a list, a query
        # (as specified by a dynamic relationship loader), or an actual
        # instance of a model.
        relatedvalue = getattr(instance, relation)
        if relatedvalue is None:
            result[relation] = None
            continue
        # Determine the included and excluded fields for the related model.
        newexclude = None
        newinclude = None
        if exclude_relations is not None and relation in exclude_relations:
            newexclude = exclude_relations[relation]
        elif (include_relations is not None and relation in include_relations):
            newinclude = include_relations[relation]
        # Determine the included methods for the related model.
        newmethods = None
        if include_methods is not None:
            newmethods = [
                method.split('.', 1)[1] for method in include_methods
                if method.split('.', 1)[0] == relation
            ]
        if is_like_list(instance, relation):
            result[relation] = [
                to_dict(inst,
                        rdeep,
                        exclude=newexclude,
                        include=newinclude,
                        include_methods=newmethods) for inst in relatedvalue
            ]
            continue
        # If the related value is dynamically loaded, resolve the query to get
        # the single instance.
        if isinstance(relatedvalue, Query):
            relatedvalue = relatedvalue.one()
        result[relation] = to_dict(relatedvalue,
                                   rdeep,
                                   exclude=newexclude,
                                   include=newinclude,
                                   include_methods=newmethods)
    return result
Example #47
0
def to_dict(instance,
            options=collections.defaultdict(bool),
            include=None,
            exclude=None):
    """
        Translates sqlalchemy instance to dictionary

        Inspired by flask-restless.helpers.to_dict

        :param instance:
        :param options: Dictionary of flags
                          * execute_queries: Execute Query Objects
                          * execute_hybrids: Execute Hybrids
        :param include: Columns and Relations that should be included for an instance
        :param exclude: Columns and Relations that should not be included for an instance
    """
    if exclude is not None and include is not None:
        raise ValueError('Cannot specify both include and exclude.')

    # None
    if instance is None:
        return None

    # Int / Float / Str
    if isinstance(instance, __basetypes__):
        return instance

    # Date & Time
    if isinstance(instance, __datetypes__):
        return instance.isoformat()

    # Any Dictionary
    if isinstance(instance, dict) or hasattr(instance, 'items'):
        return {
            k: to_dict(v, options=options, **to_deep(include, exclude, k))
            for k, v in instance.items()
        }

    # Any List
    if isinstance(instance, list) or hasattr(instance, '__iter__'):
        return [
            to_dict(x, options=options, include=include, exclude=exclude)
            for x in instance
        ]

    # Additional classes:
    #  - decimal.Decimal: created by sqlalchemy.automap/reflect
    if isinstance(instance, __clsztypes__):
        return str(instance)

    # Include Columns given
    if isinstance(include, collections.Iterable):
        rtn = {}
        for column in include:
            rtn[column] = to_dict(getattr(instance, column),
                                  **to_deep(include, exclude, column))
        return rtn

    # Include all columns if it is a SQLAlchemy instance
    try:
        columns = ModelWrapper.get_columns(object_mapper(instance)).keys()
        relations = ModelWrapper.get_relations(object_mapper(instance)).keys()
        attributes = ModelWrapper.get_attributes(
            object_mapper(instance)).keys()
        proxies = [
            p.key for p in ModelWrapper.get_proxies(object_mapper(instance))
        ]
        hybrids = [
            p.key for p in ModelWrapper.get_hybrids(object_mapper(instance))
        ]
        attributes = itertools.chain(columns, relations, proxies, hybrids,
                                     attributes)
    except UnmappedInstanceError:
        raise DictConvertionError("Could not convert argument to plain dict")

    rtn = {}

    # Include AssociationProxy and Hybrids (may be list/dict/col)
    for column in attributes:

        if exclude is not None and column in exclude:
            continue
        if column in rtn:
            continue

        # Prevent unnec. db calls
        if include is False and column not in hybrids and column not in columns:
            continue

        if column not in instance.__dict__ and not options.get(
                'execute_queries', True):
            if column not in hybrids or not options.get(
                    'execute_hybrids', True):
                continue

        # Get Attribute
        node = getattr(instance, column)

        # Don't execute queries if stopping deepnes
        if include is False and isinstance(node, Query):
            continue
        # Otherwise query it
        elif isinstance(node, Query) and options['execute_queries']:
            node = node.all()

        # Convert it
        rtn[column] = to_dict(node, **to_deep(include, exclude, column))
    return rtn
Example #48
0
 def __repr__(self):
     return '%s-%s' % (
         object_mapper(self).class_.__name__, getattr(self, 'id', self.pk))
Example #49
0
    def execute(self, task, respond_to='remove_nodes_resp'):
        logger.debug("DeletionTask.execute(task=%s)" % task.uuid)
        task_uuid = task.uuid
        logger.debug("Nodes deletion task is running")
        nodes_to_delete = []
        nodes_to_delete_constant = []
        nodes_to_restore = []

        USE_FAKE = settings.FAKE_TASKS or settings.FAKE_TASKS_AMQP

        # no need to call naily if there are no nodes in cluster
        if respond_to == 'remove_cluster_resp' and \
                not list(task.cluster.nodes):
            rcvr = rpc.receiver.NailgunReceiver()
            rcvr.remove_cluster_resp(task_uuid=task_uuid,
                                     status='ready',
                                     progress=100)
            return

        for node in task.cluster.nodes:
            if node.pending_deletion:
                nodes_to_delete.append({
                    'id': node.id,
                    'uid': node.id,
                    'roles': node.roles
                })

                if USE_FAKE:
                    # only fake tasks
                    new_node = {}
                    keep_attrs = ('id', 'cluster_id', 'roles',
                                  'pending_deletion', 'pending_addition')
                    for prop in object_mapper(node).iterate_properties:
                        if isinstance(
                                prop,
                                ColumnProperty) and prop.key not in keep_attrs:
                            new_node[prop.key] = getattr(node, prop.key)
                    nodes_to_restore.append(new_node)
                    # /only fake tasks

        # this variable is used to iterate over it
        # and be able to delete node from nodes_to_delete safely
        nodes_to_delete_constant = list(nodes_to_delete)

        for node in nodes_to_delete_constant:
            node_db = db().query(Node).get(node['id'])

            slave_name = TaskHelper.make_slave_name(node['id'])
            logger.debug(
                "Removing node from database and pending it "
                "to clean its MBR: %s", slave_name)
            if not node_db.online or node_db.status == 'discover':
                logger.info(
                    "Node is offline or not deployed yet,"
                    " can't clean MBR: %s", slave_name)
                db().delete(node_db)
                db().commit()

                nodes_to_delete.remove(node)

        # only real tasks
        engine_nodes = []
        if not USE_FAKE:
            for node in nodes_to_delete_constant:
                slave_name = TaskHelper.make_slave_name(node['id'])
                logger.debug("Pending node to be removed from cobbler %s",
                             slave_name)
                engine_nodes.append(slave_name)
                try:
                    node_db = db().query(Node).get(node['id'])
                    if node_db and node_db.fqdn:
                        node_hostname = node_db.fqdn
                    else:
                        node_hostname = TaskHelper.make_slave_fqdn(node['id'])
                    logger.info("Removing node cert from puppet: %s",
                                node_hostname)
                    cmd = "puppet cert clean {0}".format(node_hostname)
                    proc = subprocess.Popen(shlex.split(cmd),
                                            shell=False,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.PIPE)
                    p_stdout, p_stderr = proc.communicate()
                    logger.info("'{0}' executed, STDOUT: '{1}',"
                                " STDERR: '{2}'".format(
                                    cmd, p_stdout, p_stderr))
                except OSError:
                    logger.warning(
                        "'{0}' returned non-zero exit code".format(cmd))
                except Exception as e:
                    logger.warning("Exception occurred while trying to \
                            remove the system from Cobbler: '{0}'".format(
                        e.message))

        msg_delete = {
            'method': 'remove_nodes',
            'respond_to': respond_to,
            'args': {
                'task_uuid': task.uuid,
                'nodes': nodes_to_delete,
                'engine': {
                    'url': settings.COBBLER_URL,
                    'username': settings.COBBLER_USER,
                    'password': settings.COBBLER_PASSWORD,
                },
                'engine_nodes': engine_nodes
            }
        }
        # only fake tasks
        if USE_FAKE and nodes_to_restore:
            msg_delete['args']['nodes_to_restore'] = nodes_to_restore
        # /only fake tasks
        logger.debug("Calling rpc remove_nodes method")
        rpc.cast('naily', msg_delete)
Example #50
0
 def __iter__(self):
     column = dict(object_mapper(self).columns).keys()
     self._i = iter(columns)
     return self
Example #51
0
 def __iter__(self):
     self._i = iter(orm.object_mapper(self).columns)
     return self
Example #52
0
             subject="Notice - RabbitMQ")
     notify_serialization_failure(None,
                                  body="Failed to find running RabbitMQ",
                                  subject="Notice - RabbitMQ")
     return
 wf_state = None
 try:
     wfc = IWorkflowController(obj)
     wf_state = wfc.state_controller.get_state()
 except InvalidStateError:
     #this is probably a draft document - skip queueing
     log.warning("Could not get workflow state for object %s. "
                 "State: %s", obj, wf_state)
     return
 unproxied = zope.security.proxy.removeSecurityProxy(obj)
 mapper = object_mapper(unproxied)
 primary_key = mapper.primary_key_from_instance(unproxied)
 #!+CAPI(mb, Aug-2012) capi lookup in as at r9707 fails for some keys
 #e.g. get_type_info(instance).workflow_key resolves while
 #get_type_info(same_workflow_key) raises KeyError
 message = {
     "obj_key": primary_key,
     "obj_type": unproxied.__class__.__name__
 }
 kwargs = dict(exchange=SERIALIZE_EXCHANGE,
               routing_key=SERIALIZE_ROUTING_KEY,
               body=simplejson.dumps(message),
               properties=pika.BasicProperties(content_type="text/plain",
                                               delivery_mode=2))
 txn = transaction.get()
 txn.addAfterCommitHook(bungeni.core.notifications.post_commit_publish, (),
Example #53
0
def createRevision(instance, deleted=False):
    """
    Inspects the instance for changes and bumps it's previous row data to
    the audit table.
    """

    liveMapper = object_mapper(instance)
    auditMapper = instance.__audit_mapper__
    instanceState = attributes.instance_state(instance)

    values = dict()
    changed = False

    for lm, am in zip(liveMapper.iterate_to_root(),
                      auditMapper.iterate_to_root()):
        if not am.single:
            for auditColumn in am.local_table.c:
                if auditColumn.name == 'revision':
                    continue

                liveColumn = lm.local_table.c[auditColumn.key]

                # get the value of the attribute based on the MapperProperty
                # related to the mapped column.  this will allow usage of
                # MapperProperties that have a different keyname than that
                # of the mapped column.
                try:
                    liveProperty = \
                        liveMapper.get_property_by_column(liveColumn)
                except UnmappedColumnError:
                    # in the case of single table inheritance, there may be
                    # columns on the mapped table intended for the subclass
                    # only. the 'unmapped' status of the subclass column on
                    # the base class is a feature of the declarative module
                    # as of sqla 0.5.2.
                    continue

                # expired object attributes and also deferred cols might not
                # be in the dict.  force it to load no matter what by using
                # getattr().
                if liveProperty.key not in instanceState.dict:
                    getattr(instance, liveProperty.key)

                # (new value for live table / unchanged value / previous value)
                (new, unchanged, previous) = \
                    attributes.get_history(instance, liveProperty.key)

                if unchanged:
                    # Value was not modified
                    values[auditColumn.key] = unchanged[0]
                else:
                    try:
                        # Attempt to get the previous value
                        values[auditColumn.key] = previous[0]
                    except IndexError:
                        # If the value does not have any previous values
                        # assume it was NULL from a ``flush``, which appears
                        # to be the case most of the time. SA tends to just
                        # use an empty list for previously NULL values on
                        # ``flush`` (yet strangely enough uses a list
                        # containing ``None`` on ``commit``...)
                        # We DO NOT by any means want to use the new value
                        # otherwise it will look as if nothing changed
                        values[auditColumn.key] = None
                    finally:
                        changed = True

    if changed or deleted:
        # Commit previous values to audit table
        session = object_session(instance)
        values['revision'] = instance.revision
        session.add(auditMapper.class_(**values))
        instance.revision += 1
Example #54
0
 def _context(self):
     unwrapped = removeSecurityProxy(self.context)
     mapper = orm.object_mapper(unwrapped)
     primary_key = mapper.primary_key_from_instance(unwrapped)[0]
     return primary_key, unwrapped.__class__.__name__.lower()
Example #55
0
File: base.py Project: zjjott/html
 def __iter__(self):
     # TODO:对sql字段名和ORM属性名不一样的情况需要处理
     self._i = iter(object_mapper(self).columns)
     return self
Example #56
0
 def __iter__(self):
     """Iterate over table columns"""
     self._i = iter(object_mapper(self).columns)
     return self
Example #57
0
 def __init__(self, context):
     self.context = context
     trusted = removeSecurityProxy(context)
     self.oid = orm.object_mapper(trusted).primary_key_from_instance(
         trusted)[0]
     self.object_type = context.__class__.__name__.lower()
Example #58
0
    def __iter__(self):
        self._mapper = object_mapper(self)
        self._col_iter = iter(self._mapper.columns)

        return self
Example #59
0
 def keyfunc(value):
     m = object_mapper(value)
     return tuple(
         [m._get_attr_by_column(value, c) for c in mapping_spec])
Example #60
0
 def keyfunc(value):
     m = object_mapper(value)
     return m._get_attr_by_column(value, mapping_spec)