コード例 #1
0
 def post(self, id):
     with session_scope():
         meetme = meetme_dao.get(id)
         form = MeetmeForm(obj=meetme)
         if form.validate_on_submit():
             form.populate_obj(meetme)
             meetme_dao.edit(meetme)
     return redirect(url_for('meetme.Meetme:index'))
コード例 #2
0
ファイル: resource.py プロジェクト: sboily/xivo-webi-plugins
 def post(self, id):
     with session_scope():
         meetme = meetme_dao.get(id)
         form = MeetmeForm(obj=meetme)
         if form.validate_on_submit():
             form.populate_obj(meetme)
             meetme_dao.edit(meetme)
     return redirect(url_for('meetme.Meetme:index'))
コード例 #3
0
 def post(self):
     with session_scope():
         sip = generalsip_dao.list()
         form = FormGeneralSIP(obj=sip)
         if form.validate_on_submit():
             form.populate_obj(sip)
             generalsip_dao.edit(sip)
     return redirect(url_for("q_generalsip.GeneralSIP:get"))
コード例 #4
0
def index(request):
    conn = sqlite3.connect('douban.db')
    count = request.GET.get('count')
    if count == None or count == "":
        count = '1'
    doubanUrl = request.GET.get('doubanurl')
    if doubanUrl == None:
        doubanUrl = 'http://'
    doubanUrl = urllib2.unquote(doubanUrl)
    print(doubanUrl)
    click = request.GET.get('click')
    if click == '1':
        models.edit(conn, doubanUrl, 1)
        return render(request, 'index.html')
    title = '豆瓣找房小助手'
    crawlerTool = DoubanCrawler.CrawlerTool.CrawlerTool()
    crawlerTool.initContents(string.atoi(count))
    topicTitles = crawlerTool.getTitles()
    topicUrls = crawlerTool.getUrls()
    for topicUrl in topicUrls:
        models.insertUrl(conn, topicUrl, 0)
    models.edit(conn, doubanUrl, 1)
    isReadeds = []
    for topicUrl in topicUrls:
        if models.isReaded(conn, topicUrl):
            isReadeds.append(1)
        else:
            isReadeds.append(0)
    pricesFromDb = []
    for topicUrl in topicUrls:
        priceFromDb = models.getPrice(conn, topicUrl)
        if not priceFromDb == None:
            pricesFromDb.append(priceFromDb)
        else:
            pricesFromDb.append(-1)
    pricess = crawlerTool.getMoney(topicUrls, isReadeds, pricesFromDb)
    minPrices = []
    for prices in pricess:
        # 找出最小的价钱
        temp = 10000
        for price in prices:
            intPrice = string.atoi(price)
            if intPrice <= temp:
                temp = intPrice
        if temp == 10000:
            minPrices.append(0)
        else:
            minPrices.append(temp)
    # 将价钱不对的设为已读
    # 不再设为已读,而是暂时不显示
    for index in range(len(minPrices)):
        if minPrices[index] > 1751:
            # models.edit(conn,topicUrls[index],1)
            # isReadeds[index] = 1
            # 将所有价钱都存在数据库中
            models.setPrice(conn, topicUrls[index], minPrices[index])
        else:
            # isReadeds[index] = 0
            # models.setPrice(conn,topicUrls[index],minPrices[index])
            # 将所有价钱都存在数据库中
            models.setPrice(conn, topicUrls[index], minPrices[index])

    return render(
        request, 'index.html', {
            'title': title,
            'topicTitles': topicTitles,
            'topicUrls': topicUrls,
            'minPrices': minPrices,
            'doubanUrl': doubanUrl,
            'isReadeds': isReadeds
        })