def search_item_coupons(): paged = get_param('paged', Struct.Int, default=1) perpage = get_param('perpage', Struct.Int, default=60) keyword = get_param('keyword', Struct.Attr, default=u'') categories = get_param('categories') paged = parse_int(paged, 1, 1) perpage = parse_int(perpage, 1, 1) if not keyword: return [] perpage = _safe_perpage(paged, perpage) if perpage <= 0: return [] taoke = connect_taoke() try: coupons = taoke.list_coupons(search_key=keyword, categories=categories, paged=paged, perpage=perpage) except Exception as e: current_app.logger.error(StoreCouponError(e)) coupons = [] return [output_coupon(coupon) for coupon in coupons]
def list_promotion_items(promo_slug): paged = parse_int(get_args('paged'), 1, 1) perpage = parse_int(get_args('perpage'), 60, 1) promo = current_app.mongodb.Promotion.find_one_by_slug(promo_slug) if not promo: raise StorePromoNotFound if not promo['favorite_id']: return [] # use default store for now store = current_app.mongodb.Store.find_one_default() taoke = connect_taoke(store) try: items = taoke.list_favorite_items(favorite_id=promo['favorite_id'], paged=paged, perpage=perpage) except Exception as e: raise StorePromoItemsError(e) results_count = len(items) return attach_extend([output_promo_item(item) for item in items], {'_more': results_count >= perpage})
def edit_entry_message(_id, idx): title = request.form.get('title', u'') description = request.form.get('description', u'') picurl = request.form.get('picurl', u'') url = request.form.get('url', u'') pos = request.form.get('pos', 0) pos = parse_int(pos) idx = parse_int(idx) entry = g.files.get(_id) if not entry: raise Exception('Entry not found.') try: entry['messages'][idx] = { 'title': title, 'description': description, 'picurl': picurl, 'url': url } except IndexError: raise Exception('Message index out of range.') if pos != idx: entry['messages'].insert(pos, entry['messages'].pop(idx)) entry.save() return_url = url_for('.entry', _id=entry['_id']) return redirect(return_url)
def search_by_files(keywords, content_type=None, offset=0, limit=0, use_tags=True): if content_type: files = [f for f in g.files if f['content_type'] == content_type] else: files = g.files if not keywords: results = files else: results = [] if isinstance(keywords, basestring): keywords = keywords.split() elif not isinstance(keywords, list): keywords = [] def _search_match(keyword, f): if keyword in f['_keywords'] and f['status']: return True return False results = files for kw in keywords: results = [f for f in results if _search_match(kw, f)] limit = parse_int(limit, 1, True) offset = parse_int(offset, 0, 0) return results[offset:offset + limit], len(results)
def query_by_files(content_type=None, attrs=None, term=None, tag=None, offset=0, limit=1, sortby=None): # query files = _query(files=g.files, content_type=content_type, attrs=attrs, term=term, tag=tag) total_count = len(files) # sorting sorting = _sorting(files, parse_sortby(sortby)) limit = parse_int(limit, 1, True) offset = parse_int(offset, 0, 0) if sorting: ids = [item['_id'] for item in sorting[offset:offset + limit]] order_dict = {_id: index for index, _id in enumerate(ids)} files = [f for f in files if f['_id'] in ids] files.sort(key=lambda x: order_dict[x['_id']]) else: files = files[offset:offset + limit] return files, total_count
def list_promotions(): paged = parse_int(get_args('paged'), 1, 1) perpage = parse_int(get_args('perpage'), 60, 1) promotions = current_app.mongodb.Promotion.find_activated() p = make_paginator(promotions, paged, perpage) return attach_extend([output_promo(promo) for promo in promotions], { '_more': p.has_next, '_count': p.count })
def list_shortcuts(): paged = parse_int(get_args('paged'), 1, 1) perpage = parse_int(get_args('perpage'), 60, 1) shortcuts = current_app.mongodb.Shortcut.find_activated() p = make_paginator(shortcuts, paged, perpage) return attach_extend([output_shortcut(shortcut) for shortcut in shortcuts], { '_more': p.has_next, '_count': p.count })
def list_activities(): paged = parse_int(get_args('paged'), 1, 1) perpage = parse_int(get_args('perpage'), 60, 1) activitys = current_app.mongodb.Activity.find_activated() p = make_paginator(activitys, paged, perpage) return attach_extend([output_activity(activity) for activity in activitys], { '_more': p.has_next, '_count': p.count })
def list_activity_items(activity_slug): paged = parse_int(get_args('paged'), 1, 1) perpage = parse_int(get_args('perpage'), 60, 1) timestamp = parse_int(get_args('timestamp')) items = current_app.mongodb.\ Commodity.find_by_activity(activity_slug, timestamp) p = make_paginator(items, paged, perpage) return attach_extend([output_activity_commodity(item) for item in items], { '_more': p.has_next, '_count': p.count })
def update(shortcut_id): src = request.form['src'] path = request.form['path'] priority = request.form['priority'] status = request.form.get('status') shortcut = current_app.mongodb.Shortcut.find_one_by_id(shortcut_id) shortcut['src'] = src shortcut['path'] = path shortcut['priority'] = parse_int(priority) shortcut['status'] = parse_int(status) if path else 0 shortcut.save() flash('Saved.') return_url = url_for('.detail', shortcut_id=shortcut['_id']) return redirect(return_url)
def add_entry(): fname = request.form['filename'] rtype = request.form['type'] keys = request.form['keywords'] text = request.form.get('text', u'') status = request.form.get('status', 0) _id = process_slug(fname) if g.files.get(_id): raise Exception('Entry duplicated.') elif not _id: raise Exception('Entry ID is required.') entry = Entry( { '_id': _id, 'type': rtype, 'keywords': _parse_input_keys(keys), 'status': parse_int(status), 'text': text, 'messages': [], }, make_file_path(_id)) entry.save() g.files[_id] = entry return_url = url_for('.entry', _id=entry['_id']) return redirect(return_url)
def upload_media(file): Media = current_app.mongodb.Media filename = safe_filename(file.filename) key = u'{}/{}'.format('master', filename) media = Media.find_one_by_key(key) if media: # rename file if exists. fname, ext = os.path.splitext(filename) filename = u'{}-{}{}'.format(fname, uuid4_hex(), ext) key = u'{}/{}'.format('master', filename) mimetype = unicode(file.mimetype) size = parse_int(file.content_length) file_obj = { 'filename': filename, 'stream': file.stream } bucket = current_app.config.get('CDN_UPLOADS_BUCKET') try: current_app.cdn.upload(bucket, key, file_obj, mimetype) except Exception as e: raise e media = Media() media['filename'] = filename media['key'] = key media['mimetype'] = mimetype media['size'] = size media.save() return media
def straw(raw_list, value, key='id', recursive_key='nodes', limit=600): """return a item matched with key/value form a list. some_page = straw(pages, some_id, key='id', recursive_key='nodes', limit=600) """ if not isinstance(key, basestring): key = 'id' if not isinstance(recursive_key, basestring): recursive_key = None limit = min(parse_int(limit), 600) def _find(items, level=0): if not isinstance(items, list) or level > 2: return None for item in items[:limit]: if item.get(key) == value: return item if recursive_key and item.get(recursive_key): node = _find(item[recursive_key], level + 1) if node: return node return None return _find(raw_list, 0)
def index(): paged = parse_int(request.args.get('paged'), 1, True) mediafiles = current_app.mongodb.Media.find_all(True) p = make_paginator(mediafiles, paged, 60) res_url = current_app.config.get('RES_URL') mediafiles = list(mediafiles) for media in mediafiles: media['src'] = u'{}/{}'.format(res_url, media['key']) prev_url = url_for(request.endpoint, paged=p.previous_page) next_url = url_for(request.endpoint, paged=p.next_page) paginator = { 'next': next_url if p.has_next else None, 'prev': prev_url if p.has_previous and p.previous_page else None, 'paged': p.current_page, 'start': p.start_index, 'end': p.end_index, } return render_template('mediafiles.html', mediafiles=mediafiles, p=paginator)
def add_entry_message(_id): title = request.form.get('title', u'') description = request.form.get('description', u'') picurl = request.form.get('picurl', u'') url = request.form.get('url', u'') pos = request.form.get('pos', None) if pos: pos = parse_int(pos) entry = g.files.get(_id) if not entry: raise Exception('Entry not found.') if len(entry['messages']) >= 8: raise Exception('Too many messages.') msg = { 'title': title, 'description': description, 'picurl': picurl, 'url': url } if isinstance(pos, int): entry['messages'].insert(pos, msg) else: entry['messages'].append(msg) entry.save() return_url = url_for('.entry', _id=entry['_id']) return redirect(return_url)
def update(store_id): mini_app_id = request.form['mini_app_id'] taoke_app_key = request.form['taoke_app_key'] taoke_app_secret = request.form['taoke_app_secret'] pid = request.form['pid'] title = request.form['title'] splash = request.form['splash'] tpwd_msg = request.form['tpwd_msg'] allow_tpwd = request.form.get('allow_tpwd') sort_type = request.form.get('sort_type') top_categories = request.form.getlist('top_categories') status = request.form.get('status') is_default = request.form.get('default') == 'DEFAULT' # ssl = request.form.get('ssl') if not pid: status = 0 if is_default: current_app.mongodb.Store.freed_default() if top_categories: cat_ids = u''.join(top_categories) else: cat_ids = None store = current_app.mongodb.Store.find_one_by_id(store_id) store['mini_app_id'] = mini_app_id store['taoke_app_key'] = taoke_app_key store['taoke_app_secret'] = taoke_app_secret store['pid'] = pid store['title'] = title store['splash'] = splash store['allow_tpwd'] = bool(allow_tpwd) store['tpwd_msg'] = tpwd_msg store['ssl'] = False store['sort_type'] = parse_int(sort_type) store['cat_ids'] = cat_ids store['status'] = parse_int(status) store['default'] = bool(is_default) store.save() flash('Saved.') return_url = url_for('.index') return redirect(return_url)
def remove(media_id): paged = parse_int(request.args.get('paged'), 1, True) media = current_app.mongodb.Media.find_one_by_id(media_id) del_mediafile(media['key']) media.delete() return_url = url_for('.index', paged=paged) return redirect(return_url)
def update(activity_id): title = request.form['title'] caption = request.form['caption'] poster = request.form['poster'] splash = request.form['splash'] priority = request.form['priority'] status = request.form.get('status') activity = current_app.mongodb.Activity.find_one_by_id(activity_id) activity['poster'] = poster activity['splash'] = splash activity['title'] = title activity['caption'] = caption activity['priority'] = parse_int(priority) activity['status'] = parse_int(status) activity.save() flash('Saved.') return_url = url_for('.detail', activity_id=activity['_id']) return redirect(return_url)
def filter_column_offset(data, pattern=None, column=4, row_columns=12): row_columns = parse_int(row_columns, 12, 0) column = parse_int(column, 4, True) if isinstance(pattern, basestring): if '{}' not in pattern: pattern += '{}' else: pattern = None length = len(data) if isinstance(data, list) else parse_int(data, 0, 0) offset = int((row_columns - length * column) / 2) if pattern: if offset > 0: output = pattern.format(offset) else: output = '' else: output = offset return output
def _parse_csv_row(cols, release=ADACRelease.Y2018): if release == ADACRelease.Y2018: YEARS_HEADER = YEARS_HEADER_2018 KM_CLASSES = KM_CLASSES_2018 elif release == ADACRelease.Y2019: YEARS_HEADER = YEARS_HEADER_2019 KM_CLASSES = KM_CLASSES_2019 else: assert False, f"Unsupported release: {release}" KM_CLASSES.reverse() KM_CLASSES_REGEXP = "|".join(KM_CLASSES) while len(cols) > 0: if cols[0]: break else: cols = cols[1:] model = cols[0] km_class = [c for c in cols if re.search(KM_CLASSES_REGEXP, c)][-1] km_index = cols.index(km_class) # split into two parts by km class # right side contains year prices # left side - data for modle model_part = cols[:km_index] # taking last 4 parts if " " in km_class and release == ADACRelease.Y2019: kw, _, _ = re.split(r"\s+", km_class) else: match_hp_kw = re.findall(r"\d+", model_part[-1]) if len(match_hp_kw) == 2: kw, _ = match_hp_kw else: kw = model_part[-2] price_part = cols[km_index:] if " " in price_part[0] and release == ADACRelease.Y2018: price_part = price_part[0].split(" ") + price_part[1:] year_prices = price_part[1:] if not year_prices[0]: year_prices = year_prices[1:] if len(year_prices) > len(YEARS_HEADER) and release == ADACRelease.Y2018: year_prices = [year_prices[0]] + year_prices[-(len(YEARS_HEADER) - 1):] else: year_prices = year_prices[:len(YEARS_HEADER) + 1] year_dict = dict([(year, int(price)) for year, price in list(zip(YEARS_HEADER, year_prices)) if re.match(r"\d+", price)]) return {"model": model, "kw": parse_int(kw), "prices": year_dict}
def list_commodities(): paged = parse_int(get_args('paged'), 1, 1) perpage = parse_int(get_args('perpage'), 12, 1) timestamp = parse_int(get_args('timestamp')) categories = get_args('categories') is_newest = get_args('newest') store = g.store if is_newest and not categories: categories = store['cat_ids'] cids = _convert_categories(categories) items = current_app.mongodb.\ Commodity.find_live(cids, timestamp, store['sort_type']) p = make_paginator(items, paged, perpage) return attach_extend( [output_commodity(item) for item in items], {'_more': p.has_next, '_count': p.count} )
def update(promo_id): title = request.form['title'] caption = request.form['caption'] poster = request.form['poster'] splash = request.form['splash'] priority = request.form['priority'] favorite_id = request.form.get('favorite_id') status = request.form.get('status') promotion = current_app.mongodb.Promotion.find_one_by_id(promo_id) promotion['poster'] = poster promotion['splash'] = splash promotion['title'] = title promotion['caption'] = caption promotion['favorite_id'] = parse_int(favorite_id) promotion['priority'] = parse_int(priority) promotion['status'] = parse_int(status) if favorite_id else 0 promotion.save() flash('Saved.') return_url = url_for('.detail', promo_id=promotion['_id']) return redirect(return_url)
def list_item_coupons(): paged = parse_int(get_args('paged'), 1, 1) perpage = parse_int(get_args('perpage'), 60, 1) categories = get_args('categories') if categories: perpage = _safe_perpage(paged, perpage) else: perpage = _safe_perpage(paged, perpage, limit=10000) if perpage <= 0: return [] taoke = connect_taoke() try: coupons = taoke.list_coupons(categories=categories, paged=paged, perpage=perpage) except Exception as e: current_app.logger.error(StoreCouponError(e)) coupons = [] return [output_coupon(coupon) for coupon in coupons]
def del_entry_message(_id, idx): idx = parse_int(idx) entry = g.files.get(_id) if not entry: raise Exception('Entry not found.') try: entry['messages'].pop(idx) except IndexError: raise Exception('Message index out of range.') entry.save() return_url = url_for('.entry', _id=entry['_id']) return redirect(return_url)
def search_commodities(): paged = get_param('paged', Struct.Int, default=1) perpage = get_param('perpage', Struct.Int, default=60) keywords = get_param('keywords', Struct.List, default=[]) timestamp = parse_int(get_args('timestamp')) categories = get_args('categories') store = g.store cids = _convert_categories(categories) paged = parse_int(paged, 1, 1) perpage = parse_int(perpage, 1, 1) if not keywords: return [] items = current_app.mongodb.\ Commodity.search(keywords, cids, timestamp, store['sort_type']) p = make_paginator(items, paged, perpage) return attach_extend( [output_commodity(item) for item in items], {'_more': p.has_next, '_count': p.count} )
def update_entry(_id): rtype = request.form['type'] keys = request.form['keywords'] text = request.form.get('text', u'') status = request.form.get('status', 0) entry = g.files.get(_id) if not entry: raise Exception('Entry not found.') entry['type'] = rtype entry['keywords'] = _parse_input_keys(keys, entry['_id']) entry['status'] = parse_int(status) entry['text'] = text entry.save() return_url = url_for('.entry', _id=entry['_id']) return redirect(return_url)
def index(): paged = parse_int(get_args('paged'), 1, True) last_filename = get_args('last') commodities = current_app.mongodb.Commodity.find_all() activities = current_app.mongodb.Activity.find_all() p = make_paginator(commodities, paged, 60) prev_url = url_for(request.endpoint, paged=p.previous_page) next_url = url_for(request.endpoint, paged=p.next_page) paginator = { 'next': next_url if p.has_next else None, 'prev': prev_url if p.has_previous and p.previous_page else None, 'paged': p.current_page, 'start': p.start_index, 'end': p.end_index, 'count': p.count, } if last_filename: flash('Last file is: {}'.format(last_filename)) return render_template('commodities.html', commodities=commodities, activities=activities, p=paginator)
def upload(): f = request.files['file'] activity = request.form.get('activity', u'') ext = split_file_ext(f.filename) if ext == 'json': items_list = json.loads(f.stream.read()) elif ext == 'zip': items_list = _unpack_items(f) else: raise Exception('must be json or zip') new_count = 0 update_count = 0 for item in items_list: item_id = unicode(item['item_id']) start_time = to_timestamp(item['coupon_start_time']) end_time = to_timestamp(item['coupon_end_time']) if now() >= (end_time + 3600 * 6): continue commodity = current_app.mongodb.\ Commodity.find_one_by_itemid(item_id) if not commodity: commodity = current_app.mongodb.Commodity() commodity['item_id'] = item_id new_count += 1 else: update_count += 1 commodity['cid'] = unicode(item['cid']) commodity['shop_type'] = item['shop_type'] commodity['shop_title'] = item['shop_title'] commodity['title'] = item['title'] commodity['src'] = item['pic_url'] if item['volume']: # incase volume not provided (with 0). commodity['volume'] = item['volume'] commodity['activity'] = process_slug(activity, False) commodity['price'] = parse_int(item['price'] * 100) commodity['income_rate'] = parse_int(item['income_rate'] * 100) commodity['commission'] = parse_int(item['commission'] * 100) if item['coupon_id']: # incase coupon_id is missing with activity. commodity['coupon_id'] = item['coupon_id'] commodity['coupon_info'] = item['coupon_info'] commodity['category'] = item['category'] commodity['start_time'] = start_time commodity['end_time'] = end_time commodity['click_url'] = item['click_url'] commodity['coupon_url'] = item['coupon_url'] commodity['coupon_click_url'] = item['coupon_click_url'] commodity['memo'] = item['memo'] commodity.save() flash('{} Commodities added, {} updated.'.format(new_count, update_count)) return_url = url_for('.index', last=f.filename) return redirect(return_url)
def _safe_paging(perpage, paged): max_perpage = current_app.config.get('MAXIMUM_QUERY', 60) perpage = parse_int(perpage, 12, True) paged = parse_int(paged, 1, True) return min(perpage, max_perpage), paged