def archive(): id = request.args.get("id", type=int) if id: db = DB() try: ret = six.next(db._select2dic("scraper_craigslist", where="id = ?", where_values=[id])) db._delete("scraper_craigslist", where="id = ?", where_values=[id]) db.commit() except StopIteration: return jsonify({"code": 404, "message": "not found", "data": None}) del ret["id"] del ret["is_delete"] del ret["is_archive"] del ret["is_save"] db._insert("scraper_archive", **ret) db.commit() return jsonify({"code": 200, "message": "success", "data": None}) return jsonify({"code": 500, "message": "missing id", "data": None})
def main(): db = DB() for each in db._select2dic("scraper_craigslist", where="is_save = 1"): outid = each['outid'] try: six.next( db._select2dic("scraper_archive", where="outid = ?", where_values=[outid])) except StopIteration: del each["id"] del each["is_delete"] del each["is_archive"] del each["is_save"] db._insert("scraper_archive", **each) db.commit() t = get_day_time() db._delete("scraper_craigslist", where="created <= ?", where_values=[t]) db.commit()
def addconfig(): form = AddConfigForm() form.active.data = "0" if form.validate_on_submit(): # spider_ip = form.spider_ip.data url = form.url.data active = form.active.data source = form.source.data db = DB() try: six.next(db._select2dic("scraper_setting", where="url = ? and source = ?", where_values=[url, source])) form.url.errors.append("url exists") flash_errors(form) return render_template("addconfig.html", form=form) except StopIteration: db._insert("scraper_setting", **{"spider_ip": "", "url": url, "active": active, "source": source}) db.commit() return redirect(url_for("configlist")) else: flash_errors(form) return render_template("addconfig.html", form=form)
def adddata(): outid = request.form["outid"] url = request.form["url"] title = request.form["title"] source = request.form.get('source', 'craigslist') location = request.form["location"] thumbnail = request.form["thumbnail"] keyword = request.form["keyword"] created = request.form["created"] if outid and url and title and keyword and created and source: db = DB() try: six.next( db._select2dic("scraper_craigslist", where="outid = ? and source = ?", where_values=[outid, source])) return jsonify({"code": 303, "message": "data exists", "data": {'url': url, 'outid': outid}}) except StopIteration: db._insert("scraper_craigslist", **{"outid": outid, "url": url, "title": title, 'location': location, 'thumbnail': thumbnail, 'keyword': keyword, 'created': created, 'is_delete': 0, 'is_archive': 0, 'is_save': 0, 'source': source}) db.commit() return jsonify({"code": 200, "message": "success", "data": None}) return jsonify({"code": 500, "message": "missing params", "data": None})
def keyword_filter(): form = AddKeywordsFilterForm() db = DB() ret = None try: ret = six.next(db._select2dic("keyword_filter")) except: pass if form.validate_on_submit(): keywords = form.keywords.data if ret: db._update("keyword_filter", where="id = ?", where_values=[ret['id']], keywords=keywords) db.commit() return render_template("keyword_filter.html", form=form) else: db._insert("keyword_filter", **{"keywords": keywords}) db.commit() return redirect(url_for("keyword_filter")) else: flash_errors(form) if ret: form.keywords.data = ret['keywords'] return render_template("keyword_filter.html", form=form)