def deleteconfig(): id = request.args.get("id", type=int) if id: db = DB() try: six.next(db._select2dic("scraper_setting", where="id = ?", where_values=[id])) db._delete("scraper_setting", where="id = ?", where_values=[id]) db.commit() return redirect(url_for("configlist")) except StopIteration: return redirect(url_for("configlist")) return redirect(url_for("configlist"))
def delete_archive(): id = request.args.get("id", type=int) if id: db = DB() try: six.next(db._select2dic("scraper_archive", where="id = ?", where_values=[id])) except StopIteration: return jsonify({"code": 404, "message": "not found", "data": None}) db._delete("scraper_archive", where="id = ?", where_values=[id]) db.commit() return jsonify({"code": 200, "message": "success", "data": None}) return jsonify({"code": 500, "message": "missing id", "data": None})
def main(): db = DB() for each in db._select2dic("scraper_craigslist", where="is_save = 1"): outid = each['outid'] try: six.next( db._select2dic("scraper_archive", where="outid = ?", where_values=[outid])) except StopIteration: del each["id"] del each["is_delete"] del each["is_archive"] del each["is_save"] db._insert("scraper_archive", **each) db.commit() t = get_day_time() db._delete("scraper_craigslist", where="created <= ?", where_values=[t]) db.commit()