def add_keyword(bot, update, chat_data): user = User.from_telegram_object(update.effective_user) if check_suggestion_limit(bot, update, user): return kw = update.message.text bot_to_edit = chat_data.get('edit_bot') kw = helpers.format_keyword(kw) # Sanity checks if kw in settings.FORBIDDEN_KEYWORDS: update.message.reply_text('The keyword {} is forbidden.'.format(kw)) return if len(kw) <= 1: update.message.reply_text('Keywords must be longer than 1 character.') return if len(kw) >= 20: update.message.reply_text( 'Keywords must not be longer than 20 characters.') # Ignore duplicates try: Keyword.get((Keyword.name == kw) & (Keyword.entity == bot_to_edit)) return except Keyword.DoesNotExist: pass Suggestion.add_or_update(user=user, action='add_keyword', subject=bot_to_edit, value=kw) set_keywords(bot, update, chat_data, bot_to_edit) Statistic.of(update, 'added keyword to'.format(kw), bot_to_edit.username)
def matched_keyword(keyword_str, group_id): keyword = Keyword.objects(keyword=keyword_str).get() if group_id in keyword.solved_team: return team = Team.objects(group_id=group_id).get() # if keyword_str == "238504": # bot.sendMessage(team.group_id, "「副市長是受小石信任之人,是心靈純潔之人」") # elif keyword_str == "15769": # bot.sendMessage(team.group_id, "「市長是小石害怕之人,是已經受到心靈扭曲影響之人」") coin = config.KEYWORD_MATCH_REWARD * keywords[keyword_str] if len(keyword.solved_team) == 0: coin *= 2 coupon = generate_coupon(coin, "解開謎題 獲得", "System") Keyword.objects(keyword=keyword_str).update_one(push__solved_team=group_id) Team.objects(group_id=group_id).update_one(inc__coin=coupon.coin) team.reload() coupon.own_team = team coupon.save() bot.sendMessage(team.group_id, "{} {} {currency_name}\n{} 目前總計擁有 {} {currency_name}" .format(coupon.description, coupon.coin, team.name, team.coin, currency_name=config.CURRENCY_NAME)) app.logger.info("{}, {} solved keyword {} gain {} coin".format(str(datetime.now()), team.name, keyword_str, coupon.coin))
def get_keyword(update: Update, context: CallbackContext): chat_id = update.message.from_user.id gif = Gif.get(Gif.id == context.user_data['gif']) text = update.message.text if text == 'DONE': context.bot.send_message(chat_id, 'do you agree to show this gif to others?!', reply_markup=ReplyKeyboardMarkup([['YES'], ['NO']])) return PUBLIC try: keyword = Keyword.get(Keyword.text == text.lower()) if keyword in gif.keywords: context.bot.send_message(chat_id, 'You have already added this keyword!') else: gif.keywords.add(keyword) context.bot.send_message(chat_id, 'keyword successfully added!') except: keyword = Keyword.insert({ Keyword.text: text.lower() }).execute() gif.keywords.add(keyword) context.bot.send_message(chat_id, 'keyword successfully added!') return
async def add_keywords(bot, response, to_check): if not isinstance(response, Response) or response.empty: return full_text = response.full_text.lower() # Search for botbuilder pattern to see if this bot is a Manybot/Chatfuelbot/etc. if botbuilder_pattern.search(full_text): to_check.botbuilder = True # Search /start and /help response for global list of keywords to_add = [] for name in Keyword.get_distinct_names(exclude_from_bot=to_check): if re.search(r'\b{}\b'.format(name), full_text, re.IGNORECASE): to_add.append(name) to_add = [x for x in to_add if x not in settings.FORBIDDEN_KEYWORDS] if to_add: Keyword.insert_many([dict(name=k, entity=to_check) for k in to_add]).execute() msg = 'New keyword{}: {} for {}.'.format( 's' if len(to_add) > 1 else '', ', '.join(['#' + k for k in to_add]), to_check.str_no_md) bot.send_message(settings.BOTLIST_NOTIFICATIONS_ID, msg, timeout=40) log.info(msg)
def keywords_categorizer(): keywords_tree = Keyword.get_all_hierarchically() return render_template('keywords_categorizer.html', title="Keywords Categorizer", keywords_tree=keywords_tree, unassociated_keywords=Keyword.get_all( and_(~Keyword.categories.any(), Keyword.value != None)))
def add_keyword(request, room_title): room = get_object_or_404(Room, title=room_title) keyword_str = request.POST['keyword'].strip() keywords = room.keyword_set.filter(name=keyword_str).all() if len(keywords)==0: keyword = Keyword(name=keyword_str, room=room) keyword.save() return HttpResponse('OK')
def list(request): events = [] if request.GET and request.GET.has_key('keyword'): request_keyword = request.GET['keyword'] keyword = Keyword(keyword=request_keyword) events = keyword.search() logging.debug("events is %s" % events) return render_to_response('atndapp/list.html',{'events':events})
def keyword_addition(paper_id, keywords): paper = Paper.objects.get(pk=paper_id) print "Extracting from..", paper #extract top 20 keywords = keywords[:20] print keywords count = 0 for items in keywords: kw = Keyword(keyword= items[0], paper= paper) kw.save() count = count + 1 print count, " keywords stored"
def set_keywords(): data = request.get_json(silent=True) for key, values in data['keywords'].items(): keyword = Keyword.get(keyword_id=key) description = keyword.description for i in range(len(values)): keyword.value = values[i] db_session.commit() if i != len(values) - 1: # Creating new keyword for multiple values keyword = set_attributes(Keyword(), description=description) db_session.add(keyword) return HttpResponse('Keywords set!')
def init_add_keywords(): global session, engine KEYWORDS = [] KEYWORDS.append({'keyword_description': "NULL", 'keyword_regex': "NULL"}) # Lets import the CSV with the keywork list with open('initial/keywords.csv') as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') line_count = 0 for row in csv_reader: if line_count != 0: print(f'Adding keyword {row[0]} => {row[1]}') KEYWORDS.append({ 'keyword_description': row[0], 'keyword_regex': row[1] }) line_count += 1 logging.info('Inserted {} keywords to database'.format(line_count)) for keyword in KEYWORDS: logging.info('{}: Adding keyword {} to the database'.format( sys._getframe().f_code.co_name, keyword['keyword_description'])) session.add( Keyword(keyword_description=keyword['keyword_description'], keyword_regex=keyword['keyword_regex'], keyword_tmodified=datetime.now(), keyword_tcreate=datetime.now())) session.commit()
def insert_none_keyword(self, keyword): if self.is_keyword_need_upsert(keyword): self.upsert_keyword( Keyword( value=keyword, update=datetime.strptime(datetime.today().strftime('%Y%m0309'), '%Y%m%d%H') + relativedelta(months=+1) ))
def seeblanknuggetscreen(): if not g.user: flash("You need to create an account to make new nuggets.", "danger") return redirect("/") form = CreateNuggetForm() if request.method == "POST": csrf_token = request.json.get("submission", None).get("csrf_token", None) f = {"csrf_token": csrf_token} request.form = f form.csrf_token.data = csrf_token if form.validate_on_submit(): submitted_info = request.json.get("submission", None) nugget = Nugget(truth=submitted_info.get("truth"), user_id=g.user.id) db.session.add(nugget) db.session.commit() for input_keyword in submitted_info.get("keywords"): keyword = Keyword.create_with_fakeouts(input_keyword, nugget.id) #seems like an unnecessary db commit db.session.commit() for deck_id in submitted_info.get("Decks", None): deck = Deck.query.get(deck_id) deck.my_nuggets.append(nugget) db.session.commit() flash("You have created a new nugget") return redirect("/") return render_template('/nuggetviewcontrol.html', form=form, nugget=None, decks=None, nugget_dict={})
def add_or_update(user, action, subject, value): from models import Statistic # value may be None already_exists = Suggestion.get_pending(action, subject, user, value) if already_exists: # Does the new suggestion reset the value? if action == 'remove_keyword': try: kw = Keyword.get(entity=subject, name=value) kw.delete_instance() except Keyword.DoesNotExist: pass elif action == 'add_keyword': return # TODO: is this right? elif value == getattr(already_exists.subject, action): already_exists.delete_instance() return None already_exists.value = value already_exists.save() Statistic.of(user, 'made changes to their suggestion: ', str(already_exists)) return already_exists else: new_suggestion = Suggestion(user=user, action=action, date=datetime.date.today(), subject=subject, value=value) new_suggestion.save() Statistic.of(user, 'suggestion', new_suggestion._md_plaintext()) return new_suggestion
def delete_keywords(): ids = request.values['keywordIds'].split(',') keywords = Keyword.get_all(Keyword.keyword_id.in_(ids)) for keyword in keywords: keyword.categories = [] db_session.delete(keyword) db_session.commit() return HttpResponse('Keywords deleted.')
def link_keyword_to_category(): keyword = Keyword.get(keyword_id=request.values['keywordId']) category_id = request.values['categoryId'] keyword.categories = [Category.get( category_id=category_id)] if category_id != '-1' else [] db_session.commit() return HttpResponse('Keyword {0} assigned to category {1}'.format( keyword.keyword_id, category_id))
def _get_keyword_id(self, word, type_): try: keyword = Keyword.select(word=word, type=type_).first() except Exception as e: print(f"Failed to search keyword {word} with error: {e}") return -1 if keyword is None: try: keyword = Keyword(created=datetime.now(), word=word, type=type_) keyword.flush() except Exception as e: print(f"Failed to insert keyword {keyword} with error: {e}") return -1 return keyword.id
def apply(self): try: if self.subject is None: self.delete_instance() return False except Bot.DoesNotExist: self.delete_instance() return False if self.action == 'category': from models import Category try: cat = Category.get(Category.id == self.value) self.subject.category = cat except Category.DoesNotExist: raise AttributeError("Category to change to does not exist.") elif self.action == 'name': self.subject.name = self.value elif self.action == 'username': self.subject.username = self.value elif self.action == 'description': self.subject.description = self.value elif self.action == 'extra': self.subject.extra = self.value elif self.action == 'country': if self._value == 'None' or self._value is None: self.subject.country = None else: from models import Country try: con = Country.get(id=self._value) self.subject.country = con except Country.DoesNotExist: raise AttributeError( "Country to change to does not exist.") elif self.action == 'inlinequeries': self.subject.inlinequeries = bool(self.value) elif self.action == 'official': self.subject.official = bool(self.value) elif self.action == 'offline': self.subject.offline = bool(self.value) elif self.action == 'spam': self.subject.spam = bool(self.value) elif self.action == 'add_keyword': kw_obj = Keyword(name=self.value, entity=self.subject) kw_obj.save() elif self.action == 'remove_keyword': try: kw_obj = Keyword.get(name=self.value, entity=self.subject) kw_obj.delete_instance() except Keyword.DoesNotExist: raise AttributeError( "Keyword to disable does not exist anymore.") self.subject.save() self.executed = True self.save() return True
def detail_text(self): from models import Keyword keywords = Keyword.select().where(Keyword.entity == self) txt = '{}'.format(self.__str__()) txt += '\n_{}_'.format(util.escape_markdown( self.name)) if self.name else '' txt += '\n\n{}'.format(self.description) if self.description else '' txt += util.escape_markdown('\n\nKeywords: {}'.format(', '.join( [str(k) for k in keywords])) if keywords else '') return txt
def test_get_all_keywords_hierarchically(client): def assertion(keywords): assert len(keywords) == 3 assert len(keywords[1]['children']) > 0 assert keywords[2]['count'] > 0 assert type(keywords[3]['keywords']) is list t = TimeCalculator('keywords_hierarchically') assertion(Keyword.get_all_hierarchically()) t.get_running_time()
def add(request): name=request.POST['name'] url=request.POST['url'] number=request.POST['number'] keywords=request.POST['words'] """ If the website already exists, then we update it. Otherwise, we create a new one. """ w1=Website.objects.filter(url=url) """ At this point, the previous entry for this website is deleted, rather than updated. The idea is that it is faster to delete the old record, rather than searching through each of its fields to check which ones need to be updated and which do not. """ if w1!=[]: w1.name=name w1.number=number w1.delete() #Now we add the website to the database. w1=Website(name=name, url=url, number=number) w1.save() """Remove blank spaces from words and separate into separate entries in a list, rather than just a string. """ keywords=stl(keywords) """ Add the keywords that describe this Website. If the keyword already exists, then we find it and name it k1. Otherwise, we create a new Keyword. """ for keyword in keywords: try: k1=Keyword.objects.get(name=keyword) w1.words.add(k1) except Keyword.DoesNotExist: #Create the new Keyword and save it. k1=Keyword(name=keyword) k1.save() #add this word to the Website w1.words.add(k1)
def rateTitle(title): kws = titleToKeywords_one(title) keywords = [] for kw in kws: KeywordObject = Keyword.objects(key = strToKeyword(kw)) if KeywordObject: keywords.append(KeywordObject[0]) rating = 0 for kw in keywords: if (kw.volume > 10000): rating += (kw.cpc / kw.volume) return rating
def create_keywords(): values = request.values['keyword'].split(',') keywords = Keyword.get_all(Keyword.value.in_(values)) if len(keywords) == 0: keywords = [add_to_db(Keyword(), value=v) for v in values] return HttpResponse( { 'keywords': [{ 'value': k.value, 'id': k.keyword_id } for k in keywords] }, 202) else: if len(keywords[0].categories) > 0: return HttpResponse( 'Keyword "{0}" already exists in category "{1}"!'.format( keywords[0].value, keywords[0].categories[0].name), 302) else: return HttpResponse( 'Keyword "{0}" already exists!'.format(keywords[0].value), 302)
def many_to_many(session): # give Wendy some blog posts wendy = session.query(User)\ .filter_by(name='wendy')\ .one() post = BlogPost("Wendy's Blog Post", "This is a test", wendy) session.add(post) print("wendy's first post: {}".format(post)) # create a few keywords post.keywords.append(Keyword('wendy')) post.keywords.append(Keyword('firstpost')) # query posts with the 'firstpost' keyword first_posts = session.query(BlogPost)\ .filter(BlogPost.keywords.any(keyword='firstpost'))\ .all() print('all the first posts: {}'.format(first_posts)) # we can also see all of wendy's posts print("Wendy's posts: {}".format(wendy.posts.all()))
def get_keyword(self, name): """ Returns the topic if it exists, and otherwise creates it """ keyword = self.session.query(Keyword).filter( Keyword.name == name).first() if not keyword: keyword = Keyword(name) self.session.add(keyword) self.session.commit() return keyword
def add_keywords(update, context): try: keyword_sent_list = update.effective_message.text.split('\n') except ValueError: update.message.reply_text('ورودی اشتباه') return ADD_WORDS admin = Admin.get_by_username(update.effective_message.from_user.username) for keyword in keyword_sent_list: if not keyword in Admin.get_keywords(admin.username): Keyword(name=keyword, admin_id=admin.id).add() return ADD_WORDS
def parse(self, data): """Convert CSV topics into vertices and topics""" vertex = Vertex.get_or_create( name=data['institution'], graph_id=self.graph.id) for topic, keywords in data['topics']: topic = Topic.get_or_create(name=topic) tv = TopicVertex.get_or_create( topic_id=topic.id, vertex_id=vertex.id) for keyword in keywords: kw = Keyword.get_or_create(name=keyword) kt = KeywordTopic.get_or_create( keyword_id=kw.id, topic_id=topic.id) self.vertices.append(vertex)
def find_category(description): """Take the description and try to associate it with a category It also add it to the keyword table, so that the admin can associate it to some categories """ if not description: return None Keyword.generate_from_description(description) categories = {} for word in description.split(' '): keywords = Keyword.get_all(Keyword.value.contains(word)) for keyword in keywords: if len(keyword.categories) > 0: category_name = keyword.categories[0].name categories[category_name] = 1 + categories.get( category_name, 0) if len(categories) == 0: return None else: category_name = max(categories.items(), key=operator.itemgetter(1))[0] return Category.get(name=category_name)
def carga_keywords(): # aqui se va a abrir el archivo txt y se metera en la base de datos contador = 0 try: with open('keywords.txt') as file: for line in file: line = line.replace('\n', '').lower() if line != "": keywords = Keyword(line) keywords.save() # aqui voy a llamar a comprobar y me retorna la posicion contador += 1 key = Keyword(line, 12) key.update() except FileNotFoundError: print("No se encuentra Fichero keywords.txt") return Keyword.get_all()
def by_name(cls, name): from sqlalchemy import func category = cls.get(func.lower(cls.name) == name.lower()) if category is None: from models import Keyword keyword = Keyword.get(func.lower(Keyword.value) == name.lower()) if keyword is not None: # TODO: Apply AI categorizer from keywords intelligence here category = keyword.categories[0] if len( keyword.categories) > 0 else None else: cls.add_keyword(name) return category
def init_add_keywords(): global session, SERVER_MODE, engine KEYWORDS = [ { 'keyword_description': 'Binance', 'keyword_regex': '(binance|bnb)' }, { 'keyword_description': 'Huobi', 'keyword_regex': '(huobi)' }, { 'keyword_description': 'Bittrex', 'keyword_regex': '(bittrex)' }, { 'keyword_description': 'Bitfinex', 'keyword_regex': '(bitfinex)' }, { 'keyword_description': 'Coinbase', 'keyword_regex': '(coinbase)' }, { 'keyword_description': 'Kraken', 'keyword_regex': '(kraken)' }, { 'keyword_description': 'Poloniex', 'keyword_regex': '(poloniex)' }, ] for keyword in KEYWORDS: logging.info( f"{sys._getframe().f_code.co_name}: Adding keyword {keyword['keyword_description']} to the database" ) session.add( Keyword(keyword_description=keyword['keyword_description'], keyword_regex=keyword['keyword_regex'], keyword_tmodified=datetime.now(), keyword_tcreate=datetime.now())) session.commit()
def submit(): word = request.form["keyword"] if not word: return "<div>Please input keyword!</div>" keywords = Keyword.objects.filter(word=word) if keywords: keyword = keywords[0] else: keyword = Keyword(word=word) domain = whois.query("%s.com" % str(word)) keyword.website = {"exist": bool(domain)} for platform in PLATFORMS: keyword.__setattr__(platform, {"exist": fetch(platform, word)}) keyword.save() platforms = dict((key, keyword.__getattribute__(key)) for key in PLATFORMS) platforms["website"] = keyword.website return render_template("result.html", platforms=platforms)
def geo_add(): form = SeedForm() if form.validate_on_submit(): geo_query = GeoQuery(name=form.keyword.data) db.session.add(geo_query) seeds = json.loads(form.seeds.data) for blob_id in seeds.keys(): count = 0 for patch in seeds[blob_id]: keyword = Keyword(name=form.keyword.data + str(count), geoquery=geo_query) count += 1 db.session.add(keyword) x = max(0, patch[0]) y = max(0, patch[1]) size = patch[2] print '%s (%d, %d, %d)' % (blob_id, x, y, size) blob = Blob.query.get(blob_id) patch = Patch(blob=blob, x=int(x), y=int(y), size=int(size)) db.session.add(patch) seed = Example(value=True, patch=patch, keyword=keyword) db.session.add(seed) db.session.commit() # calculate patch feature, save feature file with just that patch manage.calculate_dataset_features( config.BLOB_DIR, config.FEATURES_DIR, os.path.basename(blob.location), [x, y, size, patch.id]) # initialize classifier, change to select geo_query dataset manage.create_classifier(keyword_id=keyword.id, dataset_id=2, num_seeds=1) else: print 'did not validate' print form.keyword.errors print form.seeds.errors return redirect(url_for('geo_top'))
def rateTitles(videoList): if DEBUG: print("rateTitles: Rating a list of video titles...") titleList = [i['snippet']['title'] for i in videoList] ratingDict = {} unknownKeywords = [] keywords = [] if DEBUG: print("rateTitles: Sending a title list to titleToKeywords_all...") kws = titleToKeywords_all(titleList) for kw in kws: KeywordObject = Keyword.objects(key = strToKeyword(kw)) if (not KeywordObject): unknownKeywords.append(kw) if DEBUG: print("rateTitles: Sending %i unknown keywords to dbInsertKeywords for no return..." % len(unknownKeywords)) dbInsertKeywords(unknownKeywords, False) for title in titleList: ratingDict[title] = rateTitle(title) if DEBUG: print("rateTitles: Returning a rating dictionary...") return ratingDict
def parse_keyword(keyword, response, page, page_size): """keyword 解析器""" resp_json = response.json() resp_keywords = resp_json['value']['data'] if not resp_json['successed'] or len(resp_keywords) == 0: return None, None resp_total = resp_json['value']['total'] next_page = None if page >= 500 else _get_next_page(1, page, page_size, resp_total) keywords = list() for item in resp_keywords: try: new_keyword = Keyword( value=item['keywords'], company_cnt=item['company_cnt'], showwin_cnt=item['showwin_cnt'], repeat_keyword=item.get('repeatKeyword', None), is_p4p_keyword=item.get('isP4pKeyword', None), update=datetime.strptime(item['yyyymm']+'0309', '%Y%m%d%H') + relativedelta(months=+1), srh_pv={ 'srh_pv_this_mon': item['srh_pv_this_mon'], 'srh_pv_last_1mon': item['srh_pv_last_1mon'], 'srh_pv_last_2mon': item['srh_pv_last_2mon'], 'srh_pv_last_3mon': item['srh_pv_last_3mon'], 'srh_pv_last_4mon': item['srh_pv_last_4mon'], 'srh_pv_last_5mon': item['srh_pv_last_5mon'], 'srh_pv_last_6mon': item['srh_pv_last_6mon'], 'srh_pv_last_7mon': item['srh_pv_last_7mon'], 'srh_pv_last_8mon': item['srh_pv_last_8mon'], 'srh_pv_last_9mon': item['srh_pv_last_9mon'], 'srh_pv_last_10mon': item['srh_pv_last_10mon'], 'srh_pv_last_11mon': item['srh_pv_last_11mon'], }, ) except KeyError: raise ParseError('数据解析错误 - %s: %s' % (type(item), item)) keywords.append(new_keyword) return next_page, keywords
def menu(option): #carga las palabras claves iniciar el programa keywords = Keyword.get_all() dominio = "google.com" #Mostrara al Usuario el menu de Opciones a escoger if option == "1": kw = input("Introduzca la palabra clave que desea agregar: ") keywords = agregar_keywords(kw) print("Se ha cargado la palabra clave...") elif option == "2": carga_keywords() elif option == "3": keywords = mostrar_keywords() if (len(keywords) ) > 20: # En caso de que la lista sea mayor a 20 palabras claves cont = 0 for i, j in enumerate(keywords, 1): if cont <= 19: print(i, j) cont += 1 else: stop = input("Presione cualquier tecla para continuar") cont = 0 elif (len(keywords) ) < 20: # En caso de que la lista sea menor a 20 palabras claves for i, j in enumerate(keywords, 1): print(i, j) elif option == "4": kw = input("Introduzca la palabra clave que desea buscar: ") position = comprueba_keywords(kw) if position == None or position >= 100: print( "La palabra clave {} para el dominio {} se encuentra en la posicion {}\n" .format(kw, dominio, "+100")) else: print( "La palabra clave {} para el dominio {} se encuentra en la posicion {}\n" .format(kw, dominio, position))
def pipeline(abspath,session): #par() is parseXML_diclist() import from parse.py d=par(abspath) # d["Document"] is a list of dictionary # as there is only one dict in this list. # so by d['Document'][0] # we directly get a document_dictionary: doc. doc=d["Document"][0] # the argument **doc is keyword argument. # we initialize Document with dictionary: doc # then we get a model object doc_t doc_t=Document(**doc) # this object should be add to session, otherwise you cannot find it in session. session.add(doc_t) # jour is the same pattern. jour=d['Journal'][0] jour_t=Journal(**jour) # here, for the documents column, we add doc_t to it. jour_t.documents.append(doc_t) session.add(jour_t) # we use loop here, because len(d['Author']) != 1 for auth in d["Author"]: # we first check whether there is the same author in this session auth_t = session.query(Author).filter_by(full_name=auth["full_name"]).first() if auth_t == None: auth_t = Author(**auth) # if there exits this author, update his or her information else: for k, v in auth.items(): auth_t.__setattr__(k,v) # append doc_t auth_t.documents.append(doc_t) session.add(auth_t) for key in d["Keyword"]: # some documents may not have keywords # if we omit this line, an Error will be raised. if len(key) != 0: key_t=session.query(Keyword).filter_by(keyword=key["keyword"]).first() if key_t == None: key_t = Keyword(**key) else: for k,v in key.items(): key_t.__setattr__(k,v) key_t.documents.append(doc_t) session.add(key_t) for fld in d["Field"]: fld_t=session.query(Field).filter_by(name=fld["name"],value=fld["value"],type=fld["type"]).first() if fld_t == None: fld_t = Field(**fld) else: for k,v in fld.items(): fld_t.__setattr__(k,v) fld_t.documents.append(doc_t) session.add(fld_t)