Exemplo n.º 1
0
 def update_from_dict(self, values_dict, ignore_none=False):
     """Update a DB object instance from values in a dict by matching the dict keys to DB object attributes."""
     col_names = self.get_col_names()
     for key, value in values_dict.iteritems():
         if (not ignore_none or value is not None) and key in col_names:
             set_attribute(self, key, value)
     return self
Exemplo n.º 2
0
    def update_dict(self, data):
        start_date = data.pop('start_date', None)
        end_date = data.pop('end_date', None)
        set_attribute(self, 'during', DateInterval([start_date, end_date]))

        if data.get('accounts') is not None:
            new_accounts = set([(a['type'], a['value'])
                                for a in data.pop('accounts', [])])
            for account in self.accounts:
                key = (account.type, account.value)
                if key in new_accounts:
                    new_accounts.remove(key)
                else:
                    self.accounts.remove(account)
            for new_account in new_accounts:
                type, value = new_account
                self.accounts.append(
                    GroupAccount(type=type,
                                 value=value,
                                 group_id=data.get('id')))

        for key, value in data.items():
            if key.startswith('_'):
                continue
            set_attribute(self, key, value)
Exemplo n.º 3
0
    def update_dict(self, data):
        if data.get('owns') is not None:
            owns = data.pop('owns', [])
            owned_person_ids = {
                o['person_id']
                for o in owns if o.get('person_id')
            }
            owned_group_ids = {
                o['group_id']
                for o in owns if o.get('group_id')
            }
            for owner in self.owns:
                if owner.person_id and owner.person_id in owned_person_ids:
                    owned_person_ids.remove(owner.person_id)
                elif owner.group_id and owner.group_id in owned_group_ids:
                    owned_group_ids.remove(owner.group_id)
                else:
                    self.owns.remove(owner)
            for person_id in owned_person_ids:
                self.owns.append(
                    Owner(person_id=person_id, user_id=data.get('id')))
            for group_id in owned_group_ids:
                self.owns.append(
                    Owner(group_id=group_id, user_id=data.get('id')))

        for key, value in data.items():
            set_attribute(self, key, value)
Exemplo n.º 4
0
    def post(self, id_item, relation):
        """Custom method for updating many-to-many relations"""
        item = self._get_item(id_item)
        rel = inspect(self.model).relationships.get(relation)

        if not rel:
            raise Exception('Model %s has no relation %s' %
                            (self.model, relation))

        if not rel.uselist:
            raise Exception(
                'Method post(id_item, relation) is only for many-to-many relations.'
            )

        if not type(request.json) is list:
            raise Exception('You may provide an array value in request.json')

        cls = rel.mapper.class_
        json = list(request.json)
        fields = rel.mapper.columns.keys()

        # Delete current items
        for r in getattr(item, relation):
            self.db_session.delete(r)

        items_to_set = [
            cls(**self._filter_with_table(d, fields)) for d in json
        ]
        for r in items_to_set:
            self.db_session.add(r)
        set_attribute(item, relation, items_to_set)
        self.db_session.commit()
        return self.get(id_item)
Exemplo n.º 5
0
    def update_dict(self, data):

        start_date = data.pop('start_date', None)
        end_date = data.pop('end_date', None)
        set_attribute(self, 'during', DateInterval([start_date, end_date]))
        for key, value in data.items():
            set_attribute(self, key, value)
Exemplo n.º 6
0
 def update_dict(self, data):
     if 'accounts' in data:
         new_accounts = set([(a['type'], a['value'])
                             for a in data.pop('accounts', [])])
         for account in self.accounts:
             key = (account.type, account.value)
             if key in new_accounts:
                 new_accounts.remove(key)
             else:
                 self.accounts.remove(account)
         for new_account in new_accounts:
             type, value = new_account
             self.accounts.append(
                 PersonAccount(type=type,
                               value=value,
                               person_id=data.get('id')))
     if 'memberships' in data:
         # only update memberships if key is present
         new_memberships = set([(m['group_id'], m.get('start_date'),
                                 m.get('end_date'))
                                for m in data.pop('memberships', [])])
         for membership in self.memberships:
             membership_dict = membership.to_dict()
             key = (membership_dict['group_id'],
                    membership_dict.get('start_date'),
                    membership_dict.get('end_date'))
             if key in new_memberships:
                 new_memberships.remove(key)
             else:
                 self.memberships.remove(membership)
         for new_membership in new_memberships:
             group_id, start_date, end_date = new_membership
             self.memberships.append(
                 Membership.from_dict(
                     dict(group_id=group_id,
                          person_id=data.get('id'),
                          start_date=start_date,
                          end_date=end_date)))
     if 'positions' in data:
         # only update positions if key is present
         new_positions = dict([((m['group_id'], m.get('start_date'),
                                 m.get('end_date')), m)
                               for m in data.pop('positions', [])])
         for position in self.positions:
             position_dict = position.to_dict()
             key = (position_dict['group_id'],
                    position_dict.get('start_date'),
                    position_dict.get('end_date'))
             if key in new_positions:
                 del new_positions[key]
             else:
                 self.positions.remove(position)
         for new_position in new_positions.values():
             self.positions.append(Position.from_dict(new_position))
     for key, value in data.items():
         set_attribute(self, key, value)
Exemplo n.º 7
0
    def update(self, commit=True, **kwargs):
        """Update specific fields of a record."""
        for attr, value in kwargs.items():
            set_attribute(self, attr, value)
            if attr in self.__table__.columns:
                flag_modified(self, attr)

        if commit:
            return self.save()
        else:
            return self
Exemplo n.º 8
0
    def update_sql_properties(self, instance, schema, properties):

        for field in schema:
            if 'custom' in field:
                if 'islink' in field['custom']:
                    if field['custom']['islink']:
                        continue
            key = field["name"]

            from sqlalchemy.orm.attributes import set_attribute
            if key in properties:
                set_attribute(instance, key, properties[key])
Exemplo n.º 9
0
    def update_by_ma(self, schema, instance, commit=True):
        """根据marshmallow以及SQLa实例更新

        :param                  schema: Schema      Schema类或实例
        :param                  instance: object    Model对象
        :param                  commit: bool        是否commit

        此方法用以更新Sqla实例的字段,基于一个marshmallow类或实例,
        根据marshmallow类的load字段加载。由于需要一个临时变量的
        instance,对于需要同时处理复杂relationship的子关系需要增
        加指定active_history=True来跟踪变化以维持正确的加载。
        形如:
        >>> class Remote(Model):
                id = Column(Integer(), primary_key=True)
                name = Column(String(80))
        >>> class Local(Model):
                id = Column(Integer(), primary_key=True)
                remote_id = Column(Integer())
                remote = relationship("Remote", active_history=True,
                            backref=backref('local', active_history=True)
                            )

        在这里需要修改Remote中name以及关系时。
        """
        from sqlalchemy.orm.attributes import (
            get_attribute,
            del_attribute,
            set_attribute,
        )
        from marshmallow import Schema

        if not isinstance(schema, Schema):
            schema = schema()

        db.session.add(instance)

        loadable_fields = [
            k for k, v in schema.fields.items() if not v.dump_only
        ]

        with db.session.no_autoflush:
            for field in loadable_fields:
                set_attribute(self, field, get_attribute(instance, field))
                del_attribute(instance, field)

        db.session.expunge(instance)

        self.save(commit)
Exemplo n.º 10
0
    def __set__(self, obj, val):

        # TODO: Abstract JsonPointerException when settings a member with missing nested parent dict

        # We need to deepcopy data, so that SQLAlchemy can detect changes. Otherwise nested changes would mutate the dict in-place and SQLAlchemy cannot perform comparison.
        data = copy.deepcopy(self.ensure_valid_data(obj))

        val = self.converter.serialize(val)

        if val is not None:

            # Do some basic data validation, don't let first class objects slip through
            if type(val) not in (str, float, bool, int, dict):
                raise BadJSONData("Cannot update field at {} as it has unsupported type {} for JSONB data".format(self.pointer, type(val)))

        jsonpointer.set_pointer(data, self.pointer, val)

        set_attribute(obj, self.data_field, data)
Exemplo n.º 11
0
    def __set__(self, obj, val):

        # TODO: Abstract JsonPointerException when settings a member with missing nested parent dict

        # We need to deepcopy data, so that SQLAlchemy can detect changes. Otherwise nested changes would mutate the dict in-place and SQLAlchemy cannot perform comparison.
        data = copy.deepcopy(self.ensure_valid_data(obj))

        val = self.converter.serialize(val)

        if val is not None:

            # Do some basic data validation, don't let first class objects slip through
            if type(val) not in (str, float, bool, int, dict):
                raise BadJSONData("Cannot update field at {} as it has unsupported type {} for JSONB data".format(self.pointer, type(val)))

        jsonpointer.set_pointer(data, self.pointer, val)

        set_attribute(obj, self.data_field, data)
Exemplo n.º 12
0
    def before_insert(self, mapper, connection, instance):
        fields = [get_attribute(instance, f) for f in self.generate_from]

        table = mapper.columns[self.slugfield].table
        column = table.c[self.slugfield]
        assert isinstance(column.type, (db.Unicode, db.String))
        max_length = column.type.length

        # filter out fields with no value as we cannot join them they are
        # not relevant for slug generation.
        fields = ifilter(None, fields)
        slug = self.separator.join(imap(gen_ascii_slug, fields))
        # strip the string if max_length is applied
        slug = slug[:max_length-4] if max_length is not None else slug

        set_attribute(instance, self.slugfield,
            find_next_increment(getattr(instance.__class__, self.slugfield),
                                slug, max_length))
        return orm.EXT_CONTINUE
Exemplo n.º 13
0
def update_obj(obj, attributes, params):
    """
    Updates any arbitrary object. Takes a list of attributes and a dictionary of update
    parameters. Checks if each key is in the list of approved attributes and then attempts
    to set it. If the object does not have that key, throw an HTTP 400 Bad Request

    :param obj: object to update
    :param attributes: list of approved attributes
    :param params: dictionary of update parameters 

    """
    for key in params.keys():
        if key in attributes:
            try:
                set_attribute(obj, key, params[key])
            except:
                abort(400)
    
    meta.Session.flush()
Exemplo n.º 14
0
def update_obj(obj, attributes, params):
    """
    Updates any arbitrary object. Takes a list of attributes and a dictionary of update
    parameters. Checks if each key is in the list of approved attributes and then attempts
    to set it. If the object does not have that key, throw an HTTP 400 Bad Request

    :param obj: object to update
    :param attributes: list of approved attributes
    :param params: dictionary of update parameters 

    """
    for key in params.keys():
        if key in attributes:
            try:
                set_attribute(obj, key, params[key])
            except:
                abort(400)

    meta.Session.flush()
Exemplo n.º 15
0
    def before_insert(self, mapper, connection, instance):
        fields = [get_attribute(instance, f) for f in self.generate_from]

        table = mapper.columns[self.slugfield].table
        column = table.c[self.slugfield]
        assert isinstance(column.type, (db.Unicode, db.String))
        max_length = column.type.length

        # filter out fields with no value as we cannot join them they are
        # not relevant for slug generation.
        fields = ifilter(None, fields)
        slug = self.separator.join(imap(gen_ascii_slug, fields))
        # strip the string if max_length is applied
        slug = slug[:max_length - 4] if max_length is not None else slug

        set_attribute(
            instance, self.slugfield,
            find_next_increment(getattr(instance.__class__, self.slugfield),
                                slug, max_length))
        return orm.EXT_CONTINUE
Exemplo n.º 16
0
 def __setattr__(self, key, value):
     if is_instrumented(self, key):
         set_attribute(self, key, value)
     else:
         self._goofy_dict[key] = value
Exemplo n.º 17
0
 def __setattr__(self, key, value):
     if is_instrumented(self, key):
         set_attribute(self, key, value)
     else:
         self._goofy_dict[key] = value
Exemplo n.º 18
0
 def __set_col_value(self, name, value):
     if name in self.get_col_names():
         set_attribute(self, name, value)
Exemplo n.º 19
0
 def update(self, **kwargs):
     for attr in kwargs:
         if attr in self._columns() and kwargs[attr] is not None:
             set_attribute(self, attr, kwargs[attr])
     db.session.commit()
     return self
Exemplo n.º 20
0
 def _setattr_from_instance(self, fields: List[str], instance: _M) -> None:
     with self.session().no_autoflush:
         for field in fields:
             set_attribute(self.instance(), field, get_attribute(instance, field))
             del_attribute(instance, field)
Exemplo n.º 21
0
def process_board(api, session, bid, force_initial_pass):
    try:
        board = api.board(bid)
    except NoAccess as na:
        print(na)
        return

    initial_pass = not session.query(func.count(Thread.tid)).join(Thread.board).filter(Board.bid == bid)[0][0] \
                   or force_initial_pass

    newest_complete_tid = None
    if initial_pass:
        print('Initial pass on this board.')
    else:
        newest_complete_thread = session.query(Thread).filter_by(
            is_complete=True,
            bid=bid).join(Thread.last_post).order_by(desc(Post.pid)).first()
        if newest_complete_thread:
            # This is an easy shortcut that mostly works because Sammelthreads.
            newest_complete_tid = newest_complete_thread.tid
            print(
                'Update pass on this board. Fixpoint thread is TID %d (%s).' %
                (newest_complete_tid, newest_complete_thread.title))

    thread_set = set()

    with ElapsedProgressBar(length=int(
            board.find('./number-of-threads').attrib['value']),
                            show_pos=True,
                            label='Syncing threads') as bar:
        for thread in api.iter_board(bid,
                                     oldest_tid=newest_complete_tid,
                                     reverse=initial_pass):
            dbthread = thread_from_xml(session, thread)
            set_attribute(dbthread, 'tags', api.thread_tags(dbthread.tid))
            session.add(dbthread)
            thread_set.add(dbthread)
            bar.update(1)
        session.commit()

    with ElapsedProgressBar(thread_set,
                            show_pos=True,
                            label='Finding updated threads') as bar:
        for dbthread in bar:
            tnu = session.query(WorldeaterThreadsNeedingUpdate).get(
                dbthread.tid) or WorldeaterThreadsNeedingUpdate(
                    thread=dbthread)
            if dbthread.last_pid:
                thread = api.thread(dbthread.tid, pid=dbthread.last_post.pid)
                # Might advance dbthread.last_post to the last post on this page
                posts = thread.findall('./posts/post')
                merge_posts(session, dbthread, posts)
                pids = [int(post.attrib['id']) for post in posts]
                last_on_page = pids[-1] == dbthread.last_post.pid
                last_page = int(
                    thread.find('./number-of-pages').attrib['value']) == int(
                        thread.find('./posts').attrib['page'])

                if last_on_page and (last_page or len(posts) < 30):
                    # Up to date on this thread if the last post we have is the last post on its page
                    # and we are on the last page. This method seems to be accurate, unlike
                    # XML:number-of-replies, which is not generally correct. (IIRC there are multiple corner cases
                    # involving hidden and deleted posts; some threads have XML:nor=500, but the last page
                    # has offset=500 and count=2, for example).
                    #
                    # Note that XML:number-of-pages is computed in bB based on XML:number-of-replies,
                    # so if a lot of replies are missing it will be wrong as well. We catch of most of these
                    # (~97 % in some theoretical sense) with the extra len(posts)<30 check, which will trigger
                    # if we are already on the last *real* page which is not full.
                    # If the stars align just right we'll always think a thread has some new posts and we will
                    # never be able to tell it doesn't.
                    if dbthread.can_be_complete:
                        dbthread.is_complete = True
                    continue

                index_in_page = pids.index(dbthread.last_post.pid)
                index_in_thread = int(
                    thread.find('./posts').attrib['offset']) + index_in_page
                num_replies = int(
                    thread.find('./number-of-replies').attrib['value'])
                # Due to XML:number-of-replies inaccuracy this might become negative
                estimated_number_of_posts = max(0,
                                                num_replies - index_in_thread)

                tnu.start_page = int(thread.find('./posts').attrib['page']) + 1
                tnu.est_number_of_posts = estimated_number_of_posts
            else:
                tnu.start_page = 0
                tnu.est_number_of_posts = dbthread.est_number_of_replies
            session.add(tnu)

    session.commit()
Exemplo n.º 22
0
 def __setitem__(self, key, value):
     set_attribute(self, key, value)
     flag_modified(self, key)
     db.session.add(self)
     db.session.commit()
Exemplo n.º 23
0
    def update_dict(self, data):

        for key, value in data.items():
            set_attribute(self, key, value)
Exemplo n.º 24
0
    def update_properties(instance: Model,
                          properties: Dict[str, Any]) -> None:  # type: ignore

        for field, value in properties.items():
            # Call to untyped function "set_attribute" in typed context
            set_attribute(instance, field, value)  # type: ignore
Exemplo n.º 25
0
    def update_dict(self, data):
        if self.id is None and data.get('id'):
            self.id = data.pop('id')

        start_date = data.pop('start_date', None)
        end_date = data.pop('end_date', None)
        issued = data['issued']
        set_attribute(self, 'during', DateInterval([start_date, end_date]))
        if start_date is None and end_date is None:
            set_attribute(self, 'during', DateInterval([issued, issued]))

        if 'identifiers' in data:
            new_values = set([(a['type'], a['value'])
                              for a in data.pop('identifiers', [])])
            for value in self.identifiers:
                key = (value.type, value.value)
                if key in new_values:
                    new_values.remove(key)
                else:
                    self.identifiers.remove(value)
            for new_value in new_values:
                type, value = new_value
                self.identifiers.append(
                    Identifier(type=type, value=value, work_id=data.get('id')))
        if 'measures' in data:
            new_values = set([(a['type'], a['value'])
                              for a in data.pop('measures', [])])
            for value in self.measures:
                key = (value.type, value.value)
                if key in new_values:
                    new_values.remove(key)
                else:
                    self.measures.remove(value)
            for new_value in new_values:
                type, value = new_value
                self.measures.append(
                    Measure(type=type, value=value, work_id=data.get('id')))

        if 'contributors' in data:
            existing_contributors = dict([(c.id, c)
                                          for c in self.contributors])
            new_contributors = []

            for contributor_data in data.pop('contributors', []):
                contributor_data['work_id'] = self.id
                affiliations_data = contributor_data.pop('affiliations', [])
                if contributor_data.get('id') in existing_contributors:
                    contributor = existing_contributors.pop(
                        contributor_data['id'])
                    contributor.update_dict(contributor_data)

                else:
                    contributor = Contributor.from_dict(contributor_data)

                existing_affiliations = dict([
                    (c.id, c) for c in contributor.affiliations
                ])
                new_affiliations = []
                for affiliation_data in affiliations_data:
                    affiliation_data['work_id'] = self.id
                    if affiliation_data.get('id') in existing_affiliations:
                        affiliation = existing_affiliations.pop(
                            affiliation_data['id'])
                        affiliation.update_dict(affiliation_data)
                    else:
                        affiliation = Affiliation.from_dict(affiliation_data)
                    new_affiliations.append(affiliation)
                contributor.affiliations[:] = new_affiliations

                new_contributors.append(contributor)
            self.contributors[:] = new_contributors

        if 'descriptions' in data:
            existing_descriptions = dict([(c.id, c)
                                          for c in self.descriptions])
            new_descriptions = []
            for description_data in data.pop('descriptions', []):
                description_data['work_id'] = self.id
                if description_data.get('id') in existing_descriptions:
                    description = existing_descriptions.pop(
                        description_data['id'])
                    description.update_dict(description_data)
                else:
                    description = Description.from_dict(description_data)
                new_descriptions.append(description)
            self.descriptions[:] = new_descriptions

        if 'relations' in data:
            existing_relations = dict([(c.id, c) for c in self.relations])
            new_relations = []
            for relation_data in data.pop('relations', []):
                relation_data['work_id'] = self.id
                if relation_data.get('id') in existing_relations:
                    relation = existing_relations.pop(relation_data['id'])
                    relation.update_dict(relation_data)
                else:
                    relation = Relation.from_dict(relation_data)
                new_relations.append(relation)
            self.relations[:] = new_relations

        for key, value in data.items():
            if key.startswith('_'):
                continue
            set_attribute(self, key, value)
Exemplo n.º 26
0
 def update_dict(self, data):
     for key, value in data.items():
         if key.startswith('_'):
             continue
         set_attribute(self, key, value)
Exemplo n.º 27
0
def process_board(api, session, bid, force_initial_pass):
    try:
        board = api.board(bid)
    except NoAccess as na:
        print(na)
        return

    initial_pass = not session.query(func.count(Thread.tid)).join(Thread.board).filter(Board.bid == bid)[0][0] \
                   or force_initial_pass

    newest_complete_tid = None
    threads_paginate = session.query(func.count(Thread.tid) > 30).filter(Thread.bid == bid).scalar()
    if initial_pass:
        print('Initial pass on this board.')
    elif threads_paginate:
        newest_complete_thread = session.query(Thread).filter_by(is_complete=True, bid=bid).join(
            Thread.last_post).order_by(desc(Post.pid)).first()
        if newest_complete_thread:
            # This is an easy shortcut that mostly works because Sammelthreads.
            newest_complete_tid = newest_complete_thread.tid
            print('Update pass on this board. Fixpoint thread is TID %d (%s).' % (
            newest_complete_tid, newest_complete_thread.title))

    thread_set = set()

    with ElapsedProgressBar(length=int(board.find('./number-of-threads').attrib['value']),
                            show_pos=True, label='Syncing threads') as bar:
        for thread in api.iter_board(bid, oldest_tid=newest_complete_tid, reverse=initial_pass):
            dbthread = thread_from_xml(session, thread)
            set_attribute(dbthread, 'tags', api.thread_tags(dbthread.tid))
            session.add(dbthread)
            thread_set.add(dbthread)
            bar.update(1)
        session.commit()

    with ElapsedProgressBar(thread_set,
                            show_pos=True, label='Finding updated threads') as bar:
        for dbthread in bar:
            tnu = session.query(WorldeaterThreadsNeedingUpdate).get(dbthread.tid) or WorldeaterThreadsNeedingUpdate(thread=dbthread)
            if dbthread.last_pid:
                try:
                    thread = api.thread(dbthread.tid, pid=dbthread.last_post.pid)
                except InvalidThreadError as ite:
                    print("Thread", dbthread.tid, "has been unexisted, skipping.")
                    continue
                # Might advance dbthread.last_post to the last post on this page
                posts = thread.findall('./posts/post')
                merge_posts(session, dbthread, posts)
                pids = [int(post.attrib['id']) for post in posts]
                if not pids:
                    # broken thread / invisibilized last post
                    # example: TID#213929 last_post := PID#1246148592 results in empty page 50
                    # reset last post
                    dbthread.last_post = None
                    tnu.start_page = 0
                    tnu.est_number_of_posts = dbthread.est_number_of_replies
                    session.add(tnu)
                    continue
                last_on_page = pids[-1] == dbthread.last_post.pid
                last_page = int(thread.find('./number-of-pages').attrib['value']) == int(
                    thread.find('./posts').attrib['page'])

                if last_on_page and (last_page or len(posts) < 30):
                    # Up to date on this thread if the last post we have is the last post on its page
                    # and we are on the last page. This method seems to be accurate, unlike
                    # XML:number-of-replies, which is not generally correct. (IIRC there are multiple corner cases
                    # involving hidden and deleted posts; some threads have XML:nor=500, but the last page
                    # has offset=500 and count=2, for example).
                    #
                    # Note that XML:number-of-pages is computed in bB based on XML:number-of-replies,
                    # so if a lot of replies are missing it will be wrong as well. We catch of most of these
                    # (~97 % in some theoretical sense) with the extra len(posts)<30 check, which will trigger
                    # if we are already on the last *real* page which is not full.
                    # If the stars align just right we'll always think a thread has some new posts and we will
                    # never be able to tell it doesn't.
                    if dbthread.can_be_complete:
                        dbthread.is_complete = True
                    continue

                try:
                    index_in_page = pids.index(dbthread.last_post.pid)
                except ValueError:
                    # TID#207876 PID#1243516772
                    # Current last page is [1243516598, 1243516600, 1243516606, 1243516611, 1243516623, 1243516628, 1243516633, 1243516679, 1243516686, 1243516695, 1243516712, 1243516713, 1243516717, 1243516726, 1243516727, 1243516733, 1243516738, 1243516749]
                    # Forum still knows PID#1243516772 exists (TID+PID navigation).
                    # Probably hidden.
                    print("Broken thread", dbthread.tid, "with seen but now gone PID", dbthread.last_post.pid)
                    index_in_page = 0

                index_in_thread = int(thread.find('./posts').attrib['offset']) + index_in_page
                num_replies = int(thread.find('./number-of-replies').attrib['value'])
                # Due to XML:number-of-replies inaccuracy this might become negative
                estimated_number_of_posts = max(0, num_replies - index_in_thread)

                tnu.start_page = int(thread.find('./posts').attrib['page']) + 1
                tnu.est_number_of_posts = estimated_number_of_posts
            else:
                tnu.start_page = 0
                tnu.est_number_of_posts = dbthread.est_number_of_replies
            session.add(tnu)

    session.commit()
Exemplo n.º 28
0
 def _setattr_from_instance(self, fields: List[str], instance: db.Model):
     with db.session.no_autoflush:
         for field in fields:
             set_attribute(self, field, get_attribute(instance, field))
             del_attribute(instance, field)