def cleaner(self): logger.info('cleaner run') # history = History.select(History.id).order_by(History.id.desc()).limit(self.cfg.DiffCount()) # Item.delete().where(~(Item.purge << history)).execute() history_del = History.select(History.id).order_by(History.id.desc()).offset(self.cfg.DiffCount()) Item.delete().where(Item.purge << history_del).execute() IP.delete().where(IP.purge << history_del).execute() Domain.delete().where(Domain.purge << history_del).execute() URL.delete().where(URL.purge << history_del).execute()
def item_post_handler(): if not current_user.has_registered: return jsonify({'status': 'unauthorized'}), 401 try: req_data = request.get_json(force=True) name = html.escape(req_data['name']) detail = html.escape(req_data['detail']) img_type = req_data['img']['type'] type_ = int(req_data['type']) sale_self = req_data['sale_self'] will_take_back = req_data['will_take_back'] price = decimal.Decimal(req_data['price']) except: return jsonify({'status': 'badrequest'}), 400 if name == '' or price < 0 or price > 100000000: return jsonify({'status': 'badrequest'}), 400 if type_ >= len(TYPE_NAME) or type_ < 0: return jsonify({'status': 'badrequest'}), 400 if img_type not in ['png', 'jpg', 'gif', 'null']: return jsonify({'status': 'badrequest'}), 400 if img_type == 'null': id_ = Item.insert(name=name, price=price, type_=type_, sale_self=sale_self, will_take_back=will_take_back, user=current_user.id_, has_saled=False, is_deleted=False, has_given_staff=False, detail=detail, img_filename='').execute() else: img = base64.b64decode(req_data['img']['data']) if len(img) > 102400: return jsonify({'status': 'badrequest'}), 400 item = Item.create(name=name, price=price, type_=type_, sale_self=sale_self, will_take_back=will_take_back, user=current_user.id_, has_saled=False, is_deleted=False, has_given_staff=False, detail=detail, img_filename='') item.save() item.img_filename = '%d.%s' % (item.id_, img_type) with open('imgs/' + item.img_filename, 'wb') as f: f.write(img) item.save() id_ = item.id_ return jsonify({'status': 'ok', 'id': id_})
def cleaner(self): private_nets = [ '0.%', '127.%', '192.168.%', '10.%', '172.16.%', '172.17.%', '172.18.%', '172.19.%', '172.20.%', '172.21.%', '172.22.%', '172.23.%', '172.24.%', '172.25.%', '172.26.%', '172.27.%', '172.28.%', '172.29.%', '172.30.%', '172.31.%' ] logger.info('Dump cleaner run') # history = History.select(History.id).order_by(History.id.desc()).limit(self.cfg.DiffCount()) # Item.delete().where(~(Item.purge << history)).execute() history_clear = History.select(History.id).order_by( History.id.desc()).offset(self.cfg.DiffCount()) item_del = Item.delete().where(Item.purge << history_clear).execute() logger.info('Item deleted: %d', item_del) ip_del = IP.delete().where(IP.purge << history_clear).execute() logger.info('IP deleted: %d', ip_del) domain_del = Domain.delete().where( Domain.purge << history_clear).execute() logger.info('Domain deleted: %d', domain_del) url_del = URL.delete().where(URL.purge << history_clear).execute() logger.info('URL deleted: %d', url_del) history_rm = History.select(History.id).order_by( History.id.desc()).offset(self.cfg.HistoryCount()) hist_del = History.delete().where(History.id << history_rm).execute() logger.info('History deleted: %d', hist_del) for net in private_nets: ip_count = IP.delete().where(IP.ip % net).execute() if ip_count: logger.info('IP error LIKE %s, count %d', net, ip_count)
def create_new_item(): item = request.json # TODO: Check for failure, try/except block new_item = Item() new_item.comment = item.get('comment') new_item.title = item.get('title') new_item.date_added = datetime.now() session.add(new_item) session.commit() result = { "id": new_item.id, "title": new_item.title, "comment": new_item.comment, "date_added": new_item.date_added } return ((jsonify(result), 201))
def item_handler(item_id): if not current_user.has_registered: return jsonify({'status': 'unauthorized'}), 401 try: item = Item.get((Item.id_ == item_id) & (Item.is_deleted == False)) except DoesNotExist: return jsonify({'status': 'itemnotexist'}), 404 if item.user.id_ != current_user.id_: return jsonify({'status': 'unauthorized'}), 401 if request.method == 'GET': return jsonify({ 'status': 'ok', 'type': item.type_, 'name': item.name, 'detail': item.detail, 'price': format(item.price, 'f'), 'has_saled': item.has_saled, 'sale_self': item.sale_self, 'will_take_back': item.will_take_back, 'has_given_staff': item.has_given_staff }) if item.has_saled or item.has_given_staff: return jsonify({'status': 'cannotmodify'}), 403 if request.method == 'DELETE': item.is_deleted = True item.save() return jsonify({'status': 'ok'}) elif request.method == 'PUT': try: req_data = request.get_json(force=True) name = html.escape(req_data['name']) detail = html.escape(req_data['detail']) img_type = req_data['img']['type'] type_ = int(req_data['type']) sale_self = req_data['sale_self'] will_take_back = req_data['will_take_back'] price = decimal.Decimal(req_data['price']) except: return jsonify({'status': 'badrequest'}), 400 if name == '' or price < 0 or price > 100000000: return jsonify({'status': 'badrequest'}), 400 if type_ >= len(TYPE_NAME) or type_ < 0: return jsonify({'status': 'badrequest'}), 400 if img_type not in ['png', 'jpg', 'gif', 'null']: return jsonify({'status': 'badrequest'}), 400 item.name = name item.detail = detail if img_type == 'null': item.img_filename = '' else: img = base64.b64decode(req_data['img']['data']) item.img_filename = '%d.%s' % (item.id_, img_type) with open('imgs/' + item.img_filename, 'wb') as f: f.write(img) item.type_ = type_ item.sale_self = sale_self item.will_take_back = will_take_back item.price = price item.save() return jsonify({'status': 'ok'})
def add_items(self): for i in self.items: self.session.add( Item(name=i.name, description=i.description, category=i.category, user=i.user, picture=i.picture))
def img_handler(item_id): try: item = Item.get((Item.id_ == item_id) & (Item.is_deleted == False)) except DoesNotExist: abort(404) if item.img_filename == '': abort(404) return send_from_directory('imgs', item.img_filename)
def add_item(db): print("Adding a new item") name = raw_input("Name: ") price = raw_input("Price: ") try: price = float(price) except ValueError: print("Price must be a real number\n") return item = Item(db) err = item.add(name, price) if err is not None: print(err) return print("Added item (%s, %.2f)\n" % (name, price))
def addNewItem(name, description, file, author, category_name): category_id = getCategoryName(category_name) picture = add_pic(file) newItem = Item(name=name, description=description, category_id=category_id, user_id=author, picture=picture) session.add(newItem) return newItem
def add_item(): title = request.GET.get('title') price = request.GET.get('price') desc = request.GET.get('desc') lng = request.GET.get('lng') lat = request.GET.get('lat') item = Item(title=title, price=price, desc=desc) user = session.query(User).filter_by(name='jimmy').first() user.itemlist.append(item) session.commit() return "user %s added!" % title
def create_item(list_id, event_id): success, user = check_session() if not success: return user event_list = user.public_lists.filter_by(public_list_id = list_id).first() if event_list is None: return failure_response("list not found!") event_list = event_list.public_list event = event_list.events.filter_by(id = event_id).first() if event is None: return failure_response("event not found!") body = json.loads(request.data.decode()) completed = body.get('completed') date = body.get('date') title = body.get('title') if completed is None or date is None or title is None: return failure_response("missing field(s)!") new_item = Item(completed=completed, date=date, title=title, event_id = event_id) event.items.append(new_item) db.session.add(new_item) db.session.commit() return success_response(new_item.serialize(), 201)
def spend_budget(budget_id, category_name): budget = Budget.query.filter_by(id=budget_id).first() if budget is not None: category = Category.query.filter_by(budget_id=budget_id, name=category_name).first() if category is not None: budget_body = json.loads(request.data) spent = budget_body.get('spent', budget.spent) item = Item( name = budget_body.get('name'), total = spent, category_id = category.id, budget_id = budget.id ) budget.spent += spent category.total += spent if budget.spent <= budget.total: category.items.append(item) db.session.add(item) db.session.commit() return json.dumps({'success': True, 'data': item.serialize()}), 200 return json.dumps({'success': False, 'error': 'You are spending overbudget!'}) return json.dumps({'success': False, 'error': 'Category not found!'}), 404 return json.dumps({'success': False, 'error': 'Budget not found!'}), 404
def newItem(category_id): category = session.query(Category).filter_by(id=category_id).one() if request.method == 'POST': newItem = Item(name=request.form['name'], description=request.form['description'], price = request.form['price'], image=request.form['image'], category_id=category_id) session.add(newItem) flash('New item \"%s\" successfuly added' % newItem.name) session.commit() return redirect(url_for('showCategories', category_id=category_id)) else: return render_template('newItem.html', category_id=category_id, category=category)
def myitems_handler(): if not current_user.has_registered: return redirect('/register', code=302) items = Item.select().where((Item.user == current_user.id_) & (Item.is_deleted == False)) return render_template('myitems.html', items=json.dumps( [{ 'id': item.id_, 'type': item.type_, 'name': item.name, 'detail': item.detail, 'price': format(item.price, 'f'), 'has_saled': item.has_saled, 'sale_self': item.sale_self, 'will_take_back': item.will_take_back, 'has_given_staff': item.has_given_staff } for item in items], ensure_ascii=False), active='myitems')
def add_item(): title = request.GET.get('title') price = request.GET.get('price') desc = request.GET.get('description') latlng = request.GET.get('latlng') [lat, lng] = latlng.split(',') username = request.GET.get('username') user = dbsession.query(User).filter_by(name=username).first() if not user: #return HTTPError(404, 'User not found.') return {'status': 'error'} item = Item(title=title, price=price, desc=desc, location='POINT(%s %s)' % (lat, lng)) user.itemlist.append(item) dbsession.commit() #return "item %s added!" % title return {'status': 'ok'}
def items_handler(): page = request.args.get('page') if page == None: page = 1 else: try: page = int(page) except: page = 1 keywords = request.args.get('keywords') type_ = request.args.get('type') condition = (Item.is_deleted == False) & (Item.has_saled == False) if keywords != None and keywords != '': keywords = keywords.split(' ') for k in keywords: condition = condition & (Item.name.contains(k) | Item.detail.contains(k)) if type_ != None and type_ != 'all': try: type_ = int(type_) if type_ in [0, 1, 2, 3]: condition = condition & (Item.type_ == type_) except: pass query = Item.select().where(condition) pages = int(math.ceil(query.count() / 10)) if pages == 0: return render_template('items.html', items=[], page=0, pages=0, active='items') if page > pages: abort(404) items = query.order_by(Item.id_.desc()).paginate(page, 10) return render_template('items.html', items=items, page=page, pages=pages, active='items')
def read_item( shopping: ItemResp, shoppingListId: int, username: str = Depends(get_current_username), ): anItem = Item(barcode=shopping.barcode, itemId=shopping.itemId, shoppingListId=shoppingListId, user=username, time=int(time.time())) session = Session() session.add(anItem) ls = session.query(ShoppingList).filter( ShoppingList.listId == anItem.shoppingListId).first() if len(ls.bought) == 1: ls.startTime = time.time() if len(ls.bought) == ls.numItems: ls.completionTime = time.time() ls.differenceTime = ls.completionTime - ls.startTime session.commit() session.close() return
def create_item(category_id=0): if not is_logged_in(): return "You must be authenticated to create Items" if request.method == 'GET': return render_template('new-item.html', categories=get_all_categories(), is_logged_in=is_logged_in(), logged_in_user=login_session, category_id=category_id) else: name = request.form['name'] description = request.form['description'] category_id = request.form['category'] new_item = Item(name=name, description=description, category_id=category_id, user_id=login_session.get('id')) session.add(new_item) session.commit() return redirect('/', code=303)
def admin_checkout_handler(): if not current_user.is_admin: abort(401) if request.method == 'GET': users = User.select() return render_template('admin.html', active='admin', users=users) elif request.method == 'POST': req_data = request.get_json(force=True) try: action = req_data['action'] id_ = int(req_data['id']) if action == 'newadmin': user = User.get(User.id_ == id_) else: item = Item.get((Item.id_ == id_) & (Item.is_deleted == False)) except DoesNotExist: return jsonify({'status': 'notfound'}), 404 except: return jsonify({'status': 'badrequest'}), 400 if action == 'checkout': if item.has_saled: return jsonify({'status': 'hassaled'}), 404 item.has_saled = True item.save() elif action == 'receive': if item.sale_self: return jsonify({'status': 'saleself'}), 400 if item.has_given_staff: return jsonify({'status': 'hasgiven'}), 404 item.has_given_staff = True item.save() elif action == 'newadmin': user.is_admin = True user.save() else: return jsonify({'status': 'badrequest'}), 400 return jsonify({'status': 'ok'})
def items(): items = data["api_mst_slotitem"] count = 0 for item in items: i = Item( sortno = item["api_sortno"], api_id = item["api_id"], info = item["api_info"], usebull = item["api_usebull"], name = item["api_name"], rarity = item["api_rare"], broken = item["api_broken"], types = item["api_type"], # taik = item["api_taik"], souk = item["api_souk"], houg = item["api_houg"], raig = item["api_raig"], soku = item["api_soku"], baku = item["api_baku"], tyku = item["api_tyku"], tais = item["api_tais"], atap = item["api_atap"], houm = item["api_houm"], raim = item["api_raim"], houk = item["api_houk"], raik = item["api_raik"], bakk = item["api_bakk"], saku = item["api_saku"], sakb = item["api_sakb"], luck = item["api_luck"], leng = item["api_leng"], ) # mercy db.session.add(i) db.session.commit()
def detail_handler(item_id): try: item = Item.get((Item.id_ == item_id) & (Item.is_deleted == False)) except DoesNotExist: abort(404) return render_template('detail.html', active='detail', item=item)
def __addItem(self, item, tables, locale="enGB"): """only used by constructor""" newItem = Item(item, tables, locale) self.itemList[newItem.id] = newItem
def create_item(name, price, stock): new_item = Item(name=name, price=price, stock=stock) db.session.add(new_item) db.session.commit() return new_item.serialize()
category1 = Category(name="Lipstick") session.add(category1) session.commit() category2 = Category(name="Mascara") session.add(category2) session.commit() category3 = Category(name="Eyeshadow") session.add(category3) session.commit() category4 = Category(name="Foundation") session.add(category4) session.commit() Item1 = Item( name="Burbery Kisses Lipstick", description="Nude Beige No. 01 - Pale beige", price="$33.00", image="http://www.sephora.com/productimages/sku/s1740489-main-Lhero.jpg", category=category1) session.add(Item1) session.commit() Item2 = Item( name="Yves Saint Laurent Rouge Pur Couture Star Clash Edition", description="52 Rouge Rose", price="$37.00", image="http://www.sephora.com/productimages/sku/s1863257-main-Lhero.jpg", category=category1) session.add(Item2) session.commit()
def parse_dump(self): if not os.path.exists(self.path_py + '/dump.xml'): logger.info('dump.xml not found: s%', self.path_py + '/dump.xml') return 0 logger.info('dump.xml already exists.') tree_xml = ElementTree().parse(self.path_py + '/dump.xml') dt = datetime.strptime(tree_xml.attrib['updateTime'][:19], '%Y-%m-%dT%H:%M:%S') update_time = int(time.mktime(dt.timetuple())) Dump.update(value=update_time).where( Dump.param == 'lastDumpDate').execute() logger.info('Got updateTime: %s.', update_time) dt = datetime.strptime(tree_xml.attrib['updateTimeUrgently'][:19], '%Y-%m-%dT%H:%M:%S') update_time_urgently = int(time.mktime(dt.timetuple())) Dump.update(value=update_time_urgently).where( Dump.param == 'lastDumpDateUrgently').execute() logger.info('Got updateTimeUrgently: %s.', update_time_urgently) list_xml = tree_xml.findall(".//*[@id]") id_set_dump = set() id_set_db = set() for content_xml in list_xml: # print(content_xml.tag, content_xml.attrib, content_xml.text) id_set_dump.add(int(content_xml.attrib['id'])) select_content_id_db = Item.select( Item.content_id).where(Item.purge >> None) for content_db in select_content_id_db: id_set_db.add(content_db.content_id) common_id_set = id_set_dump.intersection(id_set_db) delete_id_set = id_set_db.difference(common_id_set) add_id_set = id_set_dump.difference(common_id_set) # print(delete_id_set) # print(add_id_set) if len(delete_id_set) > 0: with self.transact.atomic(): for del_item in delete_id_set: logger.info('Full delete Item, IP, Domain, URL id: %s.', del_item) Item.update(purge=self.code_id).where( Item.content_id == del_item, Item.purge >> None).execute() Domain.update(purge=self.code_id).where( Domain.content_id == del_item, Domain.purge >> None).execute() URL.update(purge=self.code_id).where( URL.content_id == del_item, URL.purge >> None).execute() IP.update(purge=self.code_id).where( IP.content_id == del_item, IP.purge >> None).execute() if len(add_id_set) > 0: include_time = str() urgency_type = int() entry_type = int() block_type = str() hash_value = str() with self.transact.atomic(): for new_item in add_id_set: logger.info('New Item, IP, Domain, URL id: %s.', new_item) new_item_xml = tree_xml.find(".//content[@id='" + str(new_item) + "']") for data_xml in new_item_xml.iter(): if data_xml.tag == 'content': content_id = int(data_xml.attrib['id']) try: urgency_type = int( data_xml.attrib['urgencyType']) except KeyError: urgency_type = 0 include_time = self.date_time_xml_to_db( data_xml.attrib['includeTime']) try: block_type = data_xml.attrib['blockType'] except KeyError: block_type = 'default' entry_type = int(data_xml.attrib['entryType']) hash_value = data_xml.attrib['hash'] if data_xml.tag == 'decision': decision_date = data_xml.attrib['date'] decision_number = data_xml.attrib['number'] decision_org = data_xml.attrib['org'] item_new = Item(content_id=content_id, includeTime=include_time, urgencyType=urgency_type, entryType=entry_type, blockType=block_type, hashRecord=hash_value, decision_date=decision_date, decision_num=decision_number, decision_org=decision_org, add=self.code_id) item_new.save() if data_xml.tag == 'url': if not self.only_ascii(data_xml.text): url_split = str(data_xml.text).split(':') url = url_split[0] + ':' + urllib.parse.quote( url_split[1]) else: url = data_xml.text URL.create(item=item_new.id, content_id=content_id, url=url, add=self.code_id) if data_xml.tag == 'domain': if not self.only_ascii(data_xml.text): domain = (str( data_xml.text).encode('idna')).decode() else: domain = data_xml.text Domain.create(item=item_new.id, content_id=content_id, domain=domain, add=self.code_id) if data_xml.tag == 'ip': ip = data_xml.text IP.create(item=item_new.id, content_id=content_id, ip=ip, add=self.code_id) if data_xml.tag == 'ipSubnet': net = data_xml.text.split('/') ip = net[0] mask = net[1] IP.create(item=item_new.id, content_id=content_id, ip=ip, mask=mask, add=self.code_id) url_db_set = set() url_xml_set = set() ip_db_set = set() ip_xml_set = set() sub_ip_xml_set = set() sub_ip_db_set = set() domain_db_set = set() domain_xml_set = set() data_update = False with self.transact.atomic(): for item_xml in list_xml: for data_xml in item_xml.iter(): # print(data_xml.tag, data_xml.attrib, data_xml.text) if data_xml.tag == 'content': content_id = int(data_xml.attrib['id']) hash_value = data_xml.attrib['hash'] item_db = Item.get(Item.content_id == content_id, Item.purge >> None) if hash_value != item_db.hashRecord: logger.info('Hashes not equal, update hash id: %s', content_id) try: urgency_type = int( data_xml.attrib['urgencyType']) except KeyError: urgency_type = 0 include_time = self.date_time_xml_to_db( data_xml.attrib['includeTime']) try: block_type = data_xml.attrib['blockType'] except KeyError: block_type = 'default' entry_type = int(data_xml.attrib['entryType']) item_db.hashRecord = hash_value # Item.update(purge=None).where(Item.content_id == content_id).execute() data_update = True else: data_update = False break if data_xml.tag == 'decision': decision_date = data_xml.attrib['date'] decision_number = data_xml.attrib['number'] decision_org = data_xml.attrib['org'] # print(item_db) if str(item_db.includeTime) != include_time: logger.info('content_id: %s.', content_id) logger.info('XML includeTime: %s.', include_time) logger.info('DB includeTime: %s.', item_db.includeTime) item_db.includeTime = include_time # Item.update(includeTime=include_time).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.urgencyType != urgency_type: logger.info('content_id: %s.', content_id) logger.info('XML urgencyType: %s.', urgency_type) logger.info('DB urgencyType: %s.', item_db.urgencyType) item_db.urgencyType = urgency_type # Item.update(urgencyType=urgency_type).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.blockType != block_type: logger.info('content_id: %s.', content_id) logger.info('XML blockType: %s.', block_type) logger.info('DB blockType: %s.', item_db.blockType) item_db.blockType = block_type # Item.update(blockType=block_type).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.entryType != entry_type: logger.info('content_id: %s.', content_id) logger.info('XML entryType: %s.', entry_type) logger.info('DB entryType: %s.', item_db.entryType) item_db.entryType = entry_type # Item.update(entryType=entry_type).where(Item.content_id == content_id, # Item.purge >> None).execute() if str(item_db.decision_date) != decision_date: logger.info('content_id: %s.', content_id) logger.info('XML date: %s.', decision_date) logger.info('DB date: %s.', str(item_db.decision_date)) item_db.decision_date = decision_date # Item.update(decision_date=decision_date).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.decision_num != decision_number: logger.info('content_id: %s.', content_id) logger.info('XML number: %s.', decision_number) logger.info('DB number: %s.', item_db.decision_num) item_db.decision_num = decision_number # Item.update(decision_num=decision_number).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.decision_org != decision_org: logger.info('content_id: %s.', content_id) logger.info('XML org: %s.', decision_org) logger.info('DB org: %s.', item_db.decision_org) item_db.decision_org = decision_org # Item.update(decision_org=decision_org).where(Item.content_id == content_id, # Item.purge >> None).execute() if data_xml.tag == 'url': if not self.only_ascii(data_xml.text): url_split = str(data_xml.text).split(':') url = url_split[0] + ':' + urllib.parse.quote( url_split[1]) else: url = data_xml.text url_xml_set.add(url) if data_xml.tag == 'domain': if not self.only_ascii(data_xml.text): domain = (str( data_xml.text).encode('idna')).decode() else: domain = data_xml.text domain_xml_set.add(domain) if data_xml.tag == 'ip': ip_xml_set.add(data_xml.text) if data_xml.tag == 'ipSubnet': sub_ip_xml_set.add(data_xml.text) if data_update: url_db = URL.select().where(URL.item == item_db.id, URL.purge >> None) for url_item in url_db: url_db_set.add(url_item.url) if url_db_set != url_xml_set: common_url_set = url_xml_set.intersection(url_db_set) delete_url_set = url_db_set.difference(common_url_set) add_url_set = url_xml_set.difference(common_url_set) if len(delete_url_set) > 0: logger.info('Delete id %s URL: %s', content_id, delete_url_set) for delete_url in delete_url_set: URL.update(purge=self.code_id).where( URL.item == item_db.id, URL.url == delete_url, URL.purge >> None).execute() if len(add_url_set) > 0: logger.info('Add id %s URL: %s', content_id, add_url_set) for add_url in add_url_set: URL.create(item=item_db.id, content_id=item_db.content_id, url=add_url, add=self.code_id) url_db_set.clear() url_xml_set.clear() domain_db = Domain.select().where( Domain.item == item_db.id, Domain.purge >> None) for domain_item in domain_db: domain_db_set.add(domain_item.domain) if domain_db_set != domain_xml_set: common_domain_set = domain_xml_set.intersection( domain_db_set) delete_domain_set = domain_db_set.difference( common_domain_set) add_domain_set = domain_xml_set.difference( common_domain_set) if len(delete_domain_set) > 0: logger.info('Delete id %s Domain: %s', content_id, delete_domain_set) for delete_domain in delete_domain_set: Domain.update(purge=self.code_id).where( Domain.item == item_db.id, Domain.domain == delete_domain, Domain.purge >> None).execute() if len(add_domain_set) > 0: logger.info('Add id %s Domain: %s', content_id, add_domain_set) for add_domain in add_domain_set: Domain.create(item=item_db.id, content_id=item_db.content_id, domain=add_domain, add=self.code_id) domain_db_set.clear() domain_xml_set.clear() ip_db = IP.select().where(IP.item == item_db.id, IP.mask == 32, IP.purge >> None) for ip_item in ip_db: ip_db_set.add(ip_item.ip) if ip_db_set != ip_xml_set: common_ip_set = ip_xml_set.intersection(ip_db_set) delete_ip_set = ip_db_set.difference(common_ip_set) add_ip_set = ip_xml_set.difference(common_ip_set) if len(delete_ip_set) > 0: logger.info('Delete id %s ip: %s', content_id, delete_ip_set) for delete_ip in delete_ip_set: IP.update(purge=self.code_id).where( IP.item == item_db.id, IP.ip == delete_ip, IP.mask == 32, IP.purge >> None).execute() if len(add_ip_set) > 0: logger.info('Add id %s ip: %s', content_id, add_ip_set) for add_ip in add_ip_set: IP.create(item=item_db.id, content_id=item_db.content_id, ip=add_ip, add=self.code_id) ip_db_set.clear() ip_xml_set.clear() sub_ip_db = IP.select().where(IP.item == item_db.id, IP.mask < 32, IP.purge >> None) for sub_ip_item in sub_ip_db: sub_ip_db_set.add( str(sub_ip_item.ip) + '/' + str(sub_ip_item.mask)) if sub_ip_db_set != sub_ip_xml_set: common_sub_ip_set = sub_ip_xml_set.intersection( sub_ip_db_set) delete_sub_ip_set = sub_ip_db_set.difference( common_sub_ip_set) add_sub_ip_set = sub_ip_xml_set.difference( common_sub_ip_set) if len(delete_sub_ip_set) > 0: logger.info('Delete id %s subnet: %s', content_id, delete_sub_ip_set) for delete_sub_ip in delete_sub_ip_set: del_subnet = str(delete_sub_ip).split('/') del_ip = del_subnet[0] del_mask = del_subnet[1] IP.update(purge=self.code_id).where( IP.item == item_db.id, IP.ip == del_ip, IP.mask == del_mask, IP.purge >> None).execute() if len(add_sub_ip_set) > 0: logger.info('Add id %s subnet: %s', content_id, add_sub_ip_set) for add_sub_ip in add_sub_ip_set: add_subnet = str(add_sub_ip).split('/') add_ip = add_subnet[0] add_mask = add_subnet[1] IP.create(item=item_db.id, content_id=item_db.content_id, ip=add_ip, mask=add_mask, add=self.code_id) item_db.save() sub_ip_db_set.clear() sub_ip_xml_set.clear() if self.check_diff(): self.cleaner() return 1 else: logger.info('no updates') # print('no updates') return 2
def statistics_show(self, diff=0, stdout=False): date_time = datetime.fromtimestamp(int(Dump.get(Dump.param == 'lastDumpDate') .value)).strftime('%Y-%m-%d %H:%M:%S') message = 'vigruzki.rkn.gov.ru update: ' + date_time + '\n' url_add_sql = self._url_diff_sql(diff, 'ignore', 1) message += '\nURLs added: \n\n' for url_add in url_add_sql: message += url_add.url + '\n' ip_add_sql = self._ip_diff_sql(diff, 'ignore', 1) message += '\nIPs added: \n\n' for ip_add in ip_add_sql: if ip_add.mask < 32: message += ip_add.ip + '/' + str(ip_add.mask) else: message += ip_add.ip + '\n' domain_add_sql = self._domain_diff_sql(diff, 'ignore', 1) message += '\nDOMAINs added: \n\n' for domain_add in domain_add_sql: message += domain_add.domain + '\n' url_del_sql = self._url_diff_sql(diff, 'ignore', 0) message += '\nURLs deleted: \n\n' for url_del in url_del_sql: message += url_del.url + '\n' ip_del_sql = self._ip_diff_sql(diff, 'ignore', 0) message += '\nIPs deleted: \n\n' for ip_del in ip_del_sql: if ip_del.mask < 32: message += ip_del.ip + '/' + str(ip_del.mask) else: message += ip_del.ip + '\n' domain_del_sql = self._domain_diff_sql(diff, 'ignore', 0) message += '\nDOMAINs deleted: \n\n' for domain_del in domain_del_sql: message += domain_del.domain + '\n' rb_list = self.idx_list[:diff] domain_count = Domain.select(fn.Count(fn.Distinct(Domain.domain)))\ .where(~(Domain.add << rb_list) & ((Domain.purge >> None) | (Domain.purge << rb_list))).scalar() url_count = URL.select(fn.Count(fn.Distinct(URL.url)))\ .where(~(URL.add << rb_list) & ((URL.purge >> None) | (URL.purge << rb_list))).scalar() ip_count = IP.select(fn.Count(fn.Distinct(IP.ip)))\ .where(~(IP.add << rb_list) & ((IP.purge >> None) | (IP.purge << rb_list))).scalar() id_count = Item.select(fn.Count(fn.Distinct(Item.content_id)))\ .where(~(Item.add << rb_list) & ((Item.purge >> None) | (Item.purge << rb_list))).scalar() message += '\nURLs count: ' + str(url_count) + '\n' message += 'IPs count: ' + str(ip_count) + '\n' message += 'DOMAINs count: ' + str(domain_count) + '\n' message += 'Item count: ' + str(id_count) + '\n' if stdout: print(message) return False else: return message
def parse_dump(self): if not os.path.exists(self.path_py + '/dump.xml'): logger.info('dump.xml not found: s%', self.path_py + '/dump.xml') return 0 logger.info('dump.xml already exists.') tree_xml = ElementTree().parse(self.path_py + '/dump.xml') dt = datetime.strptime(tree_xml.attrib['updateTime'][:19], '%Y-%m-%dT%H:%M:%S') update_time = int(time.mktime(dt.timetuple())) Dump.update(value=update_time).where(Dump.param == 'lastDumpDate').execute() logger.info('Got updateTime: %s.', update_time) dt = datetime.strptime(tree_xml.attrib['updateTimeUrgently'][:19], '%Y-%m-%dT%H:%M:%S') update_time_urgently = int(time.mktime(dt.timetuple())) Dump.update(value=update_time_urgently).where(Dump.param == 'lastDumpDateUrgently').execute() logger.info('Got updateTimeUrgently: %s.', update_time_urgently) list_xml = tree_xml.findall(".//*[@id]") id_set_dump = set() id_set_db = set() for content_xml in list_xml: # print(content_xml.tag, content_xml.attrib, content_xml.text) id_set_dump.add(int(content_xml.attrib['id'])) select_content_id_db = Item.select(Item.content_id).where(Item.purge >> None) for content_db in select_content_id_db: id_set_db.add(content_db.content_id) common_id_set = id_set_dump.intersection(id_set_db) delete_id_set = id_set_db.difference(common_id_set) add_id_set = id_set_dump.difference(common_id_set) # print(delete_id_set) # print(add_id_set) if len(delete_id_set) > 0: with self.transact.atomic(): for del_item in delete_id_set: logger.info('Full delete Item, IP, Domain, URL id: %s.', del_item) Item.update(purge=self.code_id).where(Item.content_id == del_item, Item.purge >> None).execute() Domain.update(purge=self.code_id).where(Domain.content_id == del_item, Domain.purge >> None).execute() URL.update(purge=self.code_id).where(URL.content_id == del_item, URL.purge >> None).execute() IP.update(purge=self.code_id).where(IP.content_id == del_item, IP.purge >> None).execute() if len(add_id_set) > 0: include_time = str() urgency_type = int() entry_type = int() block_type = str() hash_value = str() with self.transact.atomic(): for new_item in add_id_set: logger.info('New Item, IP, Domain, URL id: %s.', new_item) new_item_xml = tree_xml.find(".//content[@id='" + str(new_item) + "']") for data_xml in new_item_xml.iter(): if data_xml.tag == 'content': content_id = int(data_xml.attrib['id']) try: urgency_type = int(data_xml.attrib['urgencyType']) except KeyError: urgency_type = 0 include_time = self.date_time_xml_to_db(data_xml.attrib['includeTime']) try: block_type = data_xml.attrib['blockType'] except KeyError: block_type = 'default' entry_type = int(data_xml.attrib['entryType']) hash_value = data_xml.attrib['hash'] if data_xml.tag == 'decision': decision_date = data_xml.attrib['date'] decision_number = data_xml.attrib['number'] decision_org = data_xml.attrib['org'] item_new = Item(content_id=content_id, includeTime=include_time, urgencyType=urgency_type, entryType=entry_type, blockType=block_type, hashRecord=hash_value, decision_date=decision_date, decision_num=decision_number, decision_org=decision_org, add=self.code_id) item_new.save() if data_xml.tag == 'url': if not self.only_ascii(data_xml.text): url_split = str(data_xml.text).split(':') url = url_split[0] + ':' + urllib.parse.quote(url_split[1]) else: url = data_xml.text URL.create(item=item_new.id, content_id=content_id, url=url, add=self.code_id) if data_xml.tag == 'domain': if not self.only_ascii(data_xml.text): domain = (str(data_xml.text).encode('idna')).decode() else: domain = data_xml.text Domain.create(item=item_new.id, content_id=content_id, domain=domain, add=self.code_id) if data_xml.tag == 'ip': ip = data_xml.text IP.create(item=item_new.id, content_id=content_id, ip=ip, add=self.code_id) if data_xml.tag == 'ipSubnet': net = data_xml.text.split('/') ip = net[0] mask = net[1] IP.create(item=item_new.id, content_id=content_id, ip=ip, mask=mask, add=self.code_id) url_db_set = set() url_xml_set = set() ip_db_set = set() ip_xml_set = set() sub_ip_xml_set = set() sub_ip_db_set = set() domain_db_set = set() domain_xml_set = set() data_update = False with self.transact.atomic(): for item_xml in list_xml: for data_xml in item_xml.iter(): # print(data_xml.tag, data_xml.attrib, data_xml.text) if data_xml.tag == 'content': content_id = int(data_xml.attrib['id']) hash_value = data_xml.attrib['hash'] item_db = Item.get(Item.content_id == content_id, Item.purge >> None) if hash_value != item_db.hashRecord: logger.info('Hashes not equal, update hash id: %s', content_id) try: urgency_type = int(data_xml.attrib['urgencyType']) except KeyError: urgency_type = 0 include_time = self.date_time_xml_to_db(data_xml.attrib['includeTime']) try: block_type = data_xml.attrib['blockType'] except KeyError: block_type = 'default' entry_type = int(data_xml.attrib['entryType']) item_db.hashRecord = hash_value # Item.update(purge=None).where(Item.content_id == content_id).execute() data_update = True else: data_update = False break if data_xml.tag == 'decision': decision_date = data_xml.attrib['date'] decision_number = data_xml.attrib['number'] decision_org = data_xml.attrib['org'] # print(item_db) if str(item_db.includeTime) != include_time: logger.info('content_id: %s.', content_id) logger.info('XML includeTime: %s.', include_time) logger.info('DB includeTime: %s.', item_db.includeTime) item_db.includeTime = include_time # Item.update(includeTime=include_time).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.urgencyType != urgency_type: logger.info('content_id: %s.', content_id) logger.info('XML urgencyType: %s.', urgency_type) logger.info('DB urgencyType: %s.', item_db.urgencyType) item_db.urgencyType = urgency_type # Item.update(urgencyType=urgency_type).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.blockType != block_type: logger.info('content_id: %s.', content_id) logger.info('XML blockType: %s.', block_type) logger.info('DB blockType: %s.', item_db.blockType) item_db.blockType = block_type # Item.update(blockType=block_type).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.entryType != entry_type: logger.info('content_id: %s.', content_id) logger.info('XML entryType: %s.', entry_type) logger.info('DB entryType: %s.', item_db.entryType) item_db.entryType = entry_type # Item.update(entryType=entry_type).where(Item.content_id == content_id, # Item.purge >> None).execute() if str(item_db.decision_date) != decision_date: logger.info('content_id: %s.', content_id) logger.info('XML date: %s.', decision_date) logger.info('DB date: %s.', str(item_db.decision_date)) item_db.decision_date = decision_date # Item.update(decision_date=decision_date).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.decision_num != decision_number: logger.info('content_id: %s.', content_id) logger.info('XML number: %s.', decision_number) logger.info('DB number: %s.', item_db.decision_num) item_db.decision_num = decision_number # Item.update(decision_num=decision_number).where(Item.content_id == content_id, # Item.purge >> None).execute() if item_db.decision_org != decision_org: logger.info('content_id: %s.', content_id) logger.info('XML org: %s.', decision_org) logger.info('DB org: %s.', item_db.decision_org) item_db.decision_org = decision_org # Item.update(decision_org=decision_org).where(Item.content_id == content_id, # Item.purge >> None).execute() if data_xml.tag == 'url': if not self.only_ascii(data_xml.text): url_split = str(data_xml.text).split(':') url = url_split[0] + ':' + urllib.parse.quote(url_split[1]) else: url = data_xml.text url_xml_set.add(url) if data_xml.tag == 'domain': if not self.only_ascii(data_xml.text): domain = (str(data_xml.text).encode('idna')).decode() else: domain = data_xml.text domain_xml_set.add(domain) if data_xml.tag == 'ip': ip_xml_set.add(data_xml.text) if data_xml.tag == 'ipSubnet': sub_ip_xml_set.add(data_xml.text) if data_update: url_db = URL.select().where(URL.item == item_db.id, URL.purge >> None) for url_item in url_db: url_db_set.add(url_item.url) if url_db_set != url_xml_set: common_url_set = url_xml_set.intersection(url_db_set) delete_url_set = url_db_set.difference(common_url_set) add_url_set = url_xml_set.difference(common_url_set) if len(delete_url_set) > 0: logger.info('Delete id %s URL: %s', content_id, delete_url_set) for delete_url in delete_url_set: URL.update(purge=self.code_id).where(URL.item == item_db.id, URL.url == delete_url, URL.purge >> None).execute() if len(add_url_set) > 0: logger.info('Add id %s URL: %s', content_id, add_url_set) for add_url in add_url_set: URL.create(item=item_db.id, content_id=item_db.content_id, url=add_url, add=self.code_id) url_db_set.clear() url_xml_set.clear() domain_db = Domain.select().where(Domain.item == item_db.id, Domain.purge >> None) for domain_item in domain_db: domain_db_set.add(domain_item.domain) if domain_db_set != domain_xml_set: common_domain_set = domain_xml_set.intersection(domain_db_set) delete_domain_set = domain_db_set.difference(common_domain_set) add_domain_set = domain_xml_set.difference(common_domain_set) if len(delete_domain_set) > 0: logger.info('Delete id %s Domain: %s', content_id, delete_domain_set) for delete_domain in delete_domain_set: Domain.update(purge=self.code_id).where(Domain.item == item_db.id, Domain.domain == delete_domain, Domain.purge >> None).execute() if len(add_domain_set) > 0: logger.info('Add id %s Domain: %s', content_id, add_domain_set) for add_domain in add_domain_set: Domain.create(item=item_db.id, content_id=item_db.content_id, domain=add_domain, add=self.code_id) domain_db_set.clear() domain_xml_set.clear() ip_db = IP.select().where(IP.item == item_db.id, IP.mask == 32, IP.purge >> None) for ip_item in ip_db: ip_db_set.add(ip_item.ip) if ip_db_set != ip_xml_set: common_ip_set = ip_xml_set.intersection(ip_db_set) delete_ip_set = ip_db_set.difference(common_ip_set) add_ip_set = ip_xml_set.difference(common_ip_set) if len(delete_ip_set) > 0: logger.info('Delete id %s ip: %s', content_id, delete_ip_set) for delete_ip in delete_ip_set: IP.update(purge=self.code_id).where(IP.item == item_db.id, IP.ip == delete_ip, IP.mask == 32, IP.purge >> None).execute() if len(add_ip_set) > 0: logger.info('Add id %s ip: %s', content_id, add_ip_set) for add_ip in add_ip_set: IP.create(item=item_db.id, content_id=item_db.content_id, ip=add_ip, add=self.code_id) ip_db_set.clear() ip_xml_set.clear() sub_ip_db = IP.select().where(IP.item == item_db.id, IP.mask < 32, IP.purge >> None) for sub_ip_item in sub_ip_db: sub_ip_db_set.add(str(sub_ip_item.ip) + '/' + str(sub_ip_item.mask)) if sub_ip_db_set != sub_ip_xml_set: common_sub_ip_set = sub_ip_xml_set.intersection(sub_ip_db_set) delete_sub_ip_set = sub_ip_db_set.difference(common_sub_ip_set) add_sub_ip_set = sub_ip_xml_set.difference(common_sub_ip_set) if len(delete_sub_ip_set) > 0: logger.info('Delete id %s subnet: %s', content_id, delete_sub_ip_set) for delete_sub_ip in delete_sub_ip_set: del_subnet = str(delete_sub_ip).split('/') del_ip = del_subnet[0] del_mask = del_subnet[1] IP.update(purge=self.code_id).where(IP.item == item_db.id, IP.ip == del_ip, IP.mask == del_mask, IP.purge >> None).execute() if len(add_sub_ip_set) > 0: logger.info('Add id %s subnet: %s', content_id, add_sub_ip_set) for add_sub_ip in add_sub_ip_set: add_subnet = str(add_sub_ip).split('/') add_ip = add_subnet[0] add_mask = add_subnet[1] IP.create(item=item_db.id, content_id=item_db.content_id, ip=add_ip, mask=add_mask, add=self.code_id) item_db.save() sub_ip_db_set.clear() sub_ip_xml_set.clear() if self.check_diff(): self.cleaner() return 1 else: logger.info('no updates') self.cleaner() return 2
def list_item_detail(list_id, item_id): item = Item.select().where(Item.id == item_id).first() list_ = item.list comments = item.comments return render_template("item.html", list=list_, item=item, comments=comments)