def do_mangle(): t_id = request.form.get("t_id") m_id = request.form.get("m_id") previous_page = redirect("template_detail?id={}".format(t_id)) if 'file' not in request.files: # print("No file parts from", request.remote_addr) return previous_page file = request.files['file'] if file.filename == '': # print("Empty file from", request.remote_addr) return previous_page if not file or not file.filename.rsplit('.', 1)[1].lower().startswith("csv"): return previous_page batch_path = util.generate_temp_filepath() file.save(batch_path) template = Template(t_id) mangler = Mangler(template) mapping = ManglerMapping(m_id) template.date_last_used = util.today() mapping.date_last_used = util.today() outfiles = mangler.create(mapping, Databatch(batch_path)) return previous_page
def get_naglist(cur): cur.execute('''SELECT users.userid, IFNULL(email, users.userid), MAX(postdate) AS lastpostdate FROM users LEFT OUTER JOIN posts ON posts.userid = users.userid WHERE reminderday = ? AND email IS NOT NULL GROUP BY users.userid HAVING lastpostdate IS NULL or lastpostdate < ?''', (util.today().weekday(), util.today().toordinal() - 6)) return [(userid, email, lastpostdate is not None and datetime.date.fromordinal(lastpostdate) or None) for userid, email, lastpostdate in cur.fetchall()]
def _execute(config, param, args): param.update(args) start = int(util.get(args, 'start', util.day_start(util.today()))) now = util.datetime2epic(util.today(param['timezone'])) end = min(now, int(util.get(args, 'end', now))) start = util.timestamp2datetime(start) end = util.timestamp2datetime(end) return {'today':triage.query_topdiffspender(config, start, end, param)}
def getweekly(cur): yesterday = util.today() - datetime.timedelta(1) lastweek = util.today() - datetime.timedelta(7) for userid, email, posts in model.iter_weekly(cur, lastweek, yesterday): if len(posts): print "Sending weekly update to %s <%s>" % (userid, email) subject = "Status Updates for %s through %s" % \ (lastweek.isoformat(), yesterday.isoformat()) yield getdigest(email, subject, posts)
def toDatetime(part1, part2=None): """ Returns a datetime object created from the date and time strings part1 - must be in the form MM/DD/YYYY or MM/DD/YYYY HH:MM AM part2 - must be n the form HH:MM AM (the space is optional) """ i = 0 result = None formats = ["%m/%d/%Y %I:%M%p", "%m/%d/%Y %H:%M:S", "%Y-%m-%d %H:%M:%S", "%m/%d/%Y", "%m/%d/y", "%m/%d", "%Y-%m-%d"] #check if reasonable data was given if part1 != None and part1 != "" and type(part1) == str: #join the two parts if a second part was given if part2: part2 = part2.replace(" ","").upper() timestamp = part1 + " "+ part2 else: timestamp = part1 #keep trying all the diffenent formats until one works while not result and i < len(formats): result = _parseTimestamp(timestamp, formats[i]) i+=1 #special case - no year given set to the current year if result.year == 1900: result = result.replace( util.today().year ) #just return what was given else: result = part1 return result
def preview(self, completed, planned, tags): assert cherrypy.request.method.upper() == 'POST' today = util.today().toordinal() now = util.now() post = Post(('<preview>', today, now, completed.decode("utf-8"), planned.decode("utf-8"), tags.decode("utf-8"))) return render('preview.xhtml', post=post)
def start_batch(key, bid): try: reader = BlobReader(BlobKey(key)) except: return 'failed to find key: please re-upload.' newest_date = branch_newest_date(bid) dic = {} for line in reader: line = line.strip() try: game_date = valid(line) if game_date < newest_date: game_date = newest_date except: continue if util.tomorrow(util.today()) < game_date: continue if game_date not in dic: dic[game_date] = [] dic[game_date].append(','.join(line.split(',')[:8])) for key_date in sorted(dic.keys()): qs = QueueScore( bid=bid, game_date=key_date, body=( '\n'.join(reversed(dic[key_date])) )) qs.put() BlobInfo.get(key).delete() return 'upload succeeded!'
def get_feedposts(): cur = get_cursor() cur.execute('''SELECT userid, postdate, posttime, completed, planned, tags FROM posts WHERE postdate > ? ORDER BY postdate DESC, posttime DESC''', (util.today().toordinal() - 15,)) return [Post(d) for d in cur.fetchall()]
def post(self, completed, planned, tags, isedit=False, **kwargs): loginid = cherrypy.request.loginid assert cherrypy.request.method.upper() == 'POST' cur = model.get_cursor() cur.execute( '''SELECT IFNULL(email, userid) FROM users WHERE userid = ?''', (loginid, )) email, = cur.fetchone() completed = completed or None planned = planned or None tags = tags or None today = util.today().toordinal() now = util.now() bugs = kwargs_to_buglist(kwargs) if isedit: cur.execute( '''UPDATE posts SET completed = ?, planned = ?, tags = ?, posttime = ? WHERE userid = ? AND postdate = ( SELECT lastpostdate FROM ( SELECT MAX(postdate) AS lastpostdate FROM posts AS p2 WHERE p2.userid = ? ) AS maxq )''', (completed, planned, tags, now, loginid, loginid)) else: cur.execute( '''INSERT INTO posts (userid, postdate, posttime, completed, planned, tags) VALUES (?, ?, ?, ?, ?, ?)''', (loginid, today, now, completed, planned, tags)) for bug in bugs: model.save_bugstatus(cur, loginid, bug, today) allteam, sendnow = model.get_userteam_emails(loginid) if isinstance(completed, str): completed = completed.decode("utf-8") if isinstance(planned, str): planned = planned.decode("utf-8") if isinstance(tags, str): tags = tags.decode("utf-8") if len(sendnow): mail.sendpost( email, sendnow, model.create_post_with_bugs( (loginid, today, now, completed, planned, tags), None, bugs)) raise cherrypy.HTTPRedirect(cherrypy.url('/'))
def get_feedposts(): cur = get_cursor() cur.execute('''SELECT userid, postdate, posttime, completed, planned, tags FROM posts WHERE postdate > ? ORDER BY postdate DESC, posttime DESC''', (util.today().toordinal() - 15,)) posts = [create_post_with_bugs(d, cur) for d in cur.fetchall()] return posts
def getdaily(cur): yesterday = util.today() - datetime.timedelta(1) for userid, email, posts in model.iter_daily(cur, yesterday): if len(posts): print "Sending daily update to %s <%s>" % (userid, email) yield getdigest(email, "Status Updates for %s" % yesterday.isoformat(), posts)
def execute(appconfig, query, args, fetcher): ret = {} date1 = int(util.get(args, 'start', util.day_start(util.today()))) date2 = int(util.get(args, 'end', util.day_start(util.today()))) args['start'] = date1 args['end'] = date1 + 24 * 3600 day1 = brick._execute(appconfig, query, args, fetcher) ret['today'] = day1['today'] query['offset_minutes'] = (date1 - date2) / 60 day2 = brick._execute(appconfig, query, args, fetcher) ret['ystd'] = day2['today'] ret['lastwk'] = [] return ret
def display(date, tab): """ Displays the calendar page """ standardSlots = ["08:00AM", "08:15AM", "08:30AM", "08:45AM", "09:00AM", "09:15AM", "09:30AM", "09:45AM", "10:00AM", "10:15AM", "10:30AM", "10:45AM", "11:00AM", "11:15AM", "11:30AM", "11:45AM", "12:00PM", "12:15PM", "12:30PM", "12:45PM", "01:00PM", "01:15PM", "01:30PM", "01:45PM", "02:00PM", "02:15PM", "02:30PM", "02:45PM", "03:00PM", "03:15PM", "03:30PM", "03:45PM", "04:00PM", "04:15PM", "04:30PM", "04:45PM", "05:00PM", "05:15PM", "05:30PM", "05:45PM", "06:00PM", "06:15PM", "06:30PM", "06:45PM", "07:00PM"] schedule = [] #list of dicts of lists, days -> timeslot -> appointment list locations = None roles = None for day in range(util.DAYS_IN_WEEK): schedule.append({}) #the beginning of the week - the week is either given or the current week is used startDate = util.startOfWeek(util.toDatetime(date) if date else util.today()) tab = int(tab) #build a list of maps of time to appointments with some default values #prepopulated so they are always displayed for day in schedule: for timeslot in standardSlots: day[timeslot] = [] #get a database connection conn = services.getConnection() #get all the locations and roles locations = services.getLocations(conn) roles = sorted(services.getRoles(conn)) #get all the appointments for the week and add them to the schedule for appointment in services.getAppointments(conn, startDate): time = appointment.getFTime() weekday = appointment.time.weekday() #if the appointment is not scheduled for one of the standard times #then add that time slot if time not in schedule[weekday]: schedule[weekday][time] = [] schedule[weekday][time].append(appointment) conn.close() #format and return the page return build(locations, roles, schedule, startDate, tab)
def get_user_feedposts(userid): cur = get_cursor() cur.execute('''SELECT userid, postdate, posttime, completed, planned, tags FROM posts WHERE userid = ? AND postdate >= ? ORDER BY postdate DESC, posttime DESC''', (userid, util.today().toordinal() - 15)) posts = [create_post_with_bugs(d, cur) for d in cur.fetchall()] return posts
def index(): from app.servers.monitor.forms import MonitorForm form = MonitorForm() form.type.choices = [('all', 'All Server')] + sorted( [(ele.cat_name, ele.cat_name) for ele in CatServerNameMap.query.all()], key=lambda d: d[0].lower()) servers, percent = [], 0.1 if request.method == 'GET': form.date.data, form.hour.data = today(), datetime.datetime.now().hour else: type, date, hour = form.type.data, form.date.data, form.hour.data percent = form.percent.data or 0.1 try: percent = float(percent) except: flash("Percent Must be float!") return render_template('index.html', servers=servers, form=form) if hour == -1: hour = '' elif hour < 10: hour = '0' + str(hour) time = date.replace('-', '') + str(hour) if type != 'all': error_report = get_cat_error_report(type, time) or [] servers.append({ "name": type, "report": error_report, "cat_link": CAT_LINK_PATTERN % (CAT_HOST, type, time) }) else: for server_name in CatServerNameMap.query.all(): error_report = get_cat_error_report(server_name.cat_name, time) or [] servers.append({ "name": server_name.cat_name, "report": error_report, "cat_link": CAT_LINK_PATTERN % (CAT_HOST, server_name.cat_name, time) }) form.only_overload.data = True return render_template('index.html', servers=format_report(servers, percent), form=form)
def sendtodaysmail(app): db = app.connectionpool().connectfn() cur = db.cursor() messages = [m for m in getnags(cur)] messages += [m for m in getdaily(cur)] if util.today().weekday() == 1: messages += [m for m in getweekly(cur)] sendmails(messages, app=app)
def get_project_late(projectname): cur = get_cursor() cur.execute('''SELECT userprojects.userid, MAX(postdate) AS lastpostdate FROM userprojects LEFT OUTER JOIN posts ON posts.userid = userprojects.userid WHERE projectname = ? GROUP BY userprojects.userid HAVING lastpostdate IS NULL OR lastpostdate < ? ORDER BY lastpostdate ASC''', (projectname, util.today().toordinal() - 6)) return [(userid, lastpostdate is not None and datetime.date.fromordinal(lastpostdate) or None) for userid, lastpostdate in cur.fetchall()]
def new_manglermapping(template_id, name): cur = bean.get_db().cursor() cur.execute( "INSERT INTO manglermapping " "(name, template_id, date_added, date_last_used, mappings_json) " "VALUES (?, ?, ?, ?, ?)", [name, template_id, util.today(), "", json.dumps(dict({}))]) return ManglerMapping(cur.lastrowid)
def getLastK(code): end = util.today().strftime('%Y-%m-%d') start = util.weekAgo().strftime('%Y-%m-%d') try: df = ts.get_k_data(code, start=start, end=end) df.loc[:, 'date'] = pd.to_datetime(df.loc[:, 'date']) df.set_index('date', inplace=True) df.drop('code', axis=1, inplace=True) return df except Exception as e: print(e)
def get_recentposts(): cur = get_cursor() cur.execute('''SELECT userid, postdate, posttime, completed, planned, tags FROM posts WHERE postdate = (SELECT MAX(postdate) FROM posts AS p2 WHERE p2.userid = posts.userid) AND postdate > ? ORDER BY postdate DESC, posttime DESC''', (util.today().toordinal() - 15,)) return [Post(d) for d in cur.fetchall()]
def enroll_patients(day=today()): fields = ['MRN', 'Name', 'Mobile', 'Email', 'CareTour', 'Location'] rows = [ patient.to_row() for patient in get_patients().values() if patient.enrolled(day) ] with open('results.csv', 'w') as file: w = writer(file) w.writerow(fields) for row in rows: w.writerow(row)
def post(self, completed, planned, tags, isedit=False, **kwargs): loginid = cherrypy.request.loginid assert cherrypy.request.method.upper() == 'POST' cur = model.get_cursor() cur.execute('''SELECT IFNULL(email, userid) FROM users WHERE userid = ?''', (loginid,)) email, = cur.fetchone() completed = completed or None planned = planned or None tags = tags or None today = util.today().toordinal() now = util.now() bugs = kwargs_to_buglist(kwargs) if isedit: cur.execute('''UPDATE posts SET completed = ?, planned = ?, tags = ?, posttime = ? WHERE userid = ? AND postdate = ( SELECT lastpostdate FROM ( SELECT MAX(postdate) AS lastpostdate FROM posts AS p2 WHERE p2.userid = ? ) AS maxq )''', (completed, planned, tags, now, loginid, loginid)) else: cur.execute('''INSERT INTO posts (userid, postdate, posttime, completed, planned, tags) VALUES (?, ?, ?, ?, ?, ?)''', (loginid, today, now, completed, planned, tags)) for bug in bugs: model.save_bugstatus(cur, loginid, bug, today) allteam, sendnow = model.get_userteam_emails(loginid) if isinstance(completed, str): completed = completed.decode("utf-8") if isinstance(planned, str): planned = planned.decode("utf-8") if isinstance(tags, str): tags = tags.decode("utf-8") if len(sendnow): mail.sendpost(email, sendnow, model.create_post_with_bugs((loginid, today, now, completed, planned, tags), None, bugs)) raise cherrypy.HTTPRedirect(cherrypy.url('/'))
def parseOne(self, one, block, housecode): oneOut = items.HouseItem2() oneOut['src'] = self.src try: #/html/body/div[3]/div/div/div[1]/h1 oneOut['title'] = util.ExtractString(one, '/html/body/div[3]/div/div/div[1]/h1/text()') # 这个是链家编号+crawldate oneOut['_id'] = housecode#util.ExtractString(one, '/html/body/div[5]/div[2]/div[6]/div[4]/span[2]') # 这个是真实的链家编号 oneOut['houseID'] = oneOut['_id'] oneOut['_id'] += '_' + util.todayString() oneOut['unitPrice'] = util.ExtractNumber(one, '/html/body/div[5]/div[2]/div[4]/div[1]/div[1]/span') oneOut['totalPrice'] = util.ExtractNumber(one, '/html/body/div[5]/div[2]/div[4]/span[1]') if len(block): oneOut['community'] = block else: oneOut['community'] = util.ExtractString(one, '/html/body/div[5]/div[2]/div[6]/div[1]/a[1]/text()') oneOut['houseType'] = util.ExtractString(one, '/html/body/div[5]/div[2]/div[5]/div[1]/div[1]/text()')# oneOut['square'] = util.ExtractNumber(one, '/html/body/div[5]/div[2]/div[5]/div[3]/div[1]') oneOut['level'] = util.ExtractString(one, '/html/body/div[5]/div[2]/div[5]/div[1]/div[2]/text()') oneOut['structure'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[1]/div[2]/ul/li[1]/text()') oneOut['thb'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[1]/div[2]/ul/li[10]/text()') oneOut['lx'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[1]/div[2]/ul/li[6]/text()') oneOut['heating'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[1]/div[2]/ul/li[11]/text()') oneOut['property'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[1]/div[2]/ul/li[13]/text()') oneOut['attention'] = util.ExtractNumber(one, '//*[@id="favCount"]') oneOut['follow'] = util.ExtractNumber(one, '//*[@id="cartCount"]') oneOut['release'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[1]/span[2]/text()') oneOut['lastTrade'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[3]/span[2]/text()') oneOut['years'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[5]/span[2]/text()') oneOut['mortgage'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[7]/span[2]/text()').strip() #/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[6]/span[2] oneOut['ownership'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[2]/span[2]/text()') oneOut['use'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[4]/span[2]/text()') oneOut['propertyRight'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[6]/span[2]/text()') oneOut['book'] = util.ExtractString(one, '/html/body/div[7]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li[8]/span[2]/text()') oneOut['crawlDate'] = util.today() except Exception as e: print(e) logging.warning("parseOne Exception %s"%(str(e))) return oneOut
def new_template(name, path, owner): cur = bean.get_db().cursor() cur.execute( "INSERT INTO template (name, path, date_added, date_last_used, owner) VALUES (?, ?, ?, ?, ?)", [name, path, util.today(), "", owner]) template = Template(cur.lastrowid) owner_usergroup = UserGroup(owner) grant_access(owner_usergroup, template) return template
def parseBlock(self, response): oneOut = items.BlockItem() try: oneOut['name'] = util.ExtractString(response, self.xpath['name']) oneOut['block'] = util.ExtractString(response, self.xpath['block']) oneOut['price'] = util.ExtractNumber(response, self.xpath['price']) oneOut['sellCounter'] = util.ExtractNumber(response, self.xpath['sellCounter']) oneOut['traded'] = util.ExtractNumber(response, self.xpath['traded']) oneOut['lookCounter'] = util.ExtractNumber(response, self.xpath['lookCounter']) except Exception as e: print(e) oneOut['crawlDate'] = util.today() return oneOut
def preview(self, completed, planned, tags, **kwargs): assert cherrypy.request.method.upper() == 'POST' today = util.today().toordinal() now = util.now() bugs = kwargs_to_buglist(kwargs) if isinstance(completed, str): completed = completed.decode("utf-8") if isinstance(planned, str): planned = planned.decode("utf-8") if isinstance(tags, str): tags = tags.decode("utf-8") post = model.create_post_with_bugs(('<preview>', today, now, completed, planned, tags), None, bugs) return render('preview.xhtml', post=post)
def preview(self, completed, planned, tags, **kwargs): assert cherrypy.request.method.upper() == 'POST' today = util.today().toordinal() now = util.now() bugs = kwargs_to_buglist(kwargs) if isinstance(completed, str): completed = completed.decode("utf-8") if isinstance(planned, str): planned = planned.decode("utf-8") if isinstance(tags, str): tags = tags.decode("utf-8") post = model.create_post_with_bugs( ('<preview>', today, now, completed, planned, tags), None, bugs) return render('preview.xhtml', post=post)
def post(self, completed, planned, tags, isedit=False): loginid = cherrypy.request.loginid assert cherrypy.request.method.upper() == 'POST' cur = model.get_cursor() cur.execute('''SELECT IFNULL(email, userid) FROM users WHERE userid = ?''', (loginid,)) email, = cur.fetchone() completed = completed or None planned = planned or None tags = tags or None today = util.today().toordinal() now = util.now() if isedit: cur.execute('''UPDATE posts SET completed = ?, planned = ?, tags = ?, posttime = ? WHERE userid = ? AND postdate = ( SELECT lastpostdate FROM ( SELECT MAX(postdate) AS lastpostdate FROM posts AS p2 WHERE p2.userid = ? ) AS maxq )''', (completed, planned, tags, now, loginid, loginid)) else: cur.execute('''INSERT INTO posts (userid, postdate, posttime, completed, planned, tags) VALUES (?, ?, ?, ?, ?, ?)''', (loginid, today, now, completed, planned, tags)) allteam, sendnow = model.get_userteam_emails(loginid) if len(sendnow): mail.sendpost(email, allteam, sendnow, Post((loginid, today, now, completed and completed.decode("utf-8"), planned and planned.decode("utf-8"), tags and tags.decode("utf-8")))) raise cherrypy.HTTPRedirect(cherrypy.url('/'))
def parseOne(self, one): oneOut = items.HouseItem() oneOut['src'] = self.src oneOut['district'] = ''.join(one.xpath('./div[1]/p[3]/a[1]/text()').extract()).strip() oneOut['subDistrict'] = ''.join(one.xpath('./div[1]/p[3]/a[2]/text()').extract()).strip() oneOut['title'] = ''.join(one.xpath('./div[1]/h4/a/text()').extract()).strip() href = ''.join(one.xpath('./div[1]/h4/a/@href').extract()).strip() if len(href) > 0: id = '-1' try: id = href.split('/')[-1][:-5] except Exception as e: logging.warning("parseOne Exception %s" % (str(e))) oneOut['_id'] = id try: totalPrice = util.String2Number(''.join(one.xpath('./div[2]/p[1]/span/text()').extract()).strip()) oneOut['totalPrice'] = totalPrice oneOut['unitPrice'] = util.String2Number( ''.join(one.xpath('./div[2]/p[2]/text()').extract()).strip()) oneOut['community'] = ''.join(one.xpath('./div[1]/p[1]/a/text()').extract()) # community = community.split(' ') # if len(community) >= 2: # oneOut['community'] = community[1] oneOut['houseType'] = ''.join(one.xpath('./div[1]/p[1]/span[2]/text()').extract()).strip() if oneOut['houseType'] == '|': oneOut['houseType'] = ''.join(one.xpath('./div[1]/p[1]/span[3]/text()').extract()).strip() oneOut['square'] = util.String2Number(''.join(one.xpath('./div[1]/p[1]/span[4]/text()').extract()).strip()) if np.isnan(oneOut['square']): oneOut['square'] = util.String2Number(''.join(one.xpath('./div[1]/p[1]/span[5]/text()').extract()).strip()) oneOut['level'] = ''.join(one.xpath('./div[1]/p[2]/span[1]/text()').extract()).strip() oneOut['crawlDate'] = util.today() except Exception as e: print(e) logging.warning("parseOne Exception %s"%(str(e))) return oneOut
def get_user_posts(userid): """ Get the 10 most recent posts by this username, and get today's post if there is one today. @returns posts, thispost """ cur = get_cursor() cur.execute('''SELECT userid, postdate, posttime, completed, planned, tags FROM posts WHERE userid = ? ORDER BY postdate DESC, posttime DESC LIMIT 10''', (userid,)) posts = [create_post_with_bugs(r, cur) for r in cur.fetchall()] if not len(posts): posts.append(Post(None)) thispost = Post(None) elif posts[0].postdate == util.today(): thispost = posts[0] else: thispost = Post(None) return posts, thispost
def index(): from app.servers.monitor.forms import MonitorForm form = MonitorForm() form.type.choices = [('all', 'All Server')] + sorted([(ele.cat_name, ele.cat_name) for ele in CatServerNameMap.query.all()], key=lambda d: d[0].lower()) servers, percent = [], 0.1 if request.method == 'GET': form.date.data, form.hour.data = today(), datetime.datetime.now().hour else: type, date, hour = form.type.data, form.date.data, form.hour.data percent = form.percent.data or 0.1 try: percent = float(percent) except: flash("Percent Must be float!") return render_template('index.html', servers=servers, form=form) if hour == -1: hour = '' elif hour < 10: hour = '0' + str(hour) time = date.replace('-', '') + str(hour) if type != 'all': error_report = get_cat_error_report(type, time) or [] servers.append( {"name": type, "report": error_report, "cat_link": CAT_LINK_PATTERN % (CAT_HOST, type, time)}) else: for server_name in CatServerNameMap.query.all(): error_report = get_cat_error_report(server_name.cat_name, time) or [] servers.append({"name": server_name.cat_name, "report": error_report, "cat_link": CAT_LINK_PATTERN % (CAT_HOST, server_name.cat_name, time)}) form.only_overload.data = True return render_template('index.html', servers=format_report(servers, percent), form=form)
def __init__(self, db_key, currency, date_start=datetime(2009, 1, 3), date_stop=util.today(), tx=BlockchainCom.fetch_data("n-transactions")): self.S = util.Series(date_start=date_start, date_stop=date_stop) self.db_key = db_key self.currency = currency self.db = db.Db(self.db_key) self.tx = self.S.prepare(tx) if currency == 'USD': self.usd = [(a[0], 1.0) for a in self.tx] else: self.usd = self.S.prepare(self.db.get_series('usd')) self.supply = self.S.prepare(self.db.get_series('supply')) self.mcap = [(a[0], a[1] * self.usd[i][1]) for i, a in enumerate(self.supply)] self.tx_sqr_m = [(a[0], (self.mcap[i][1] / self.mcap[0][1]) * a[1]**2) for i, a in enumerate(self.tx)]
def parseOne(self, one, district, subDistrict): oneOut = items.HouseItem() oneOut['src'] = self.src oneOut['district'] = district oneOut['subDistrict'] = subDistrict oneOut['title'] = ''.join(one.xpath('./div/div[2]/div[1]/a/text()').extract()).strip() href = ''.join(one.xpath('./div/div[2]/div[1]/a/@href').extract()).strip() if len(href) > 0: id = '-1' try: id = href.split('/')[-1][:-5] except Exception as e: logging.warning("parseOne Exception %s" % (str(e))) oneOut['_id'] = id try: totalPrice = util.String2Number(''.join(one.xpath('./div/div[3]/h3/span/text()').extract()).strip()) oneOut['totalPrice'] = totalPrice oneOut['unitPrice'] = util.String2Number( ''.join(one.xpath('./div/div[3]/p/text()').extract()).strip()) oneOut['community'] = ''.join(one.xpath('./div/div[2]/div[2]/a/text()').extract()) tmp = ''.join(one.xpath('./div/div[2]/div[2]/text()').extract()).strip() tmp2 = tmp.split('|') if len(tmp2) > 0: oneOut['houseType'] = tmp2[0] if len(tmp2) > 1: oneOut['square'] = util.String2Number(tmp2[1]) if len(tmp2) > 4: oneOut['level'] = tmp2[3] + '-' + tmp2[4] oneOut['crawlDate'] = util.today() except Exception as e: print(e) logging.warning("parseOne Exception %s"%(str(e))) return oneOut
def parseOne(self, one, district, subDistrict): # {'_id': '', # 'area': '', # 'attention': '', # 'community': '', # 'crawlDate': datetime.datetime(2019, 8, 24, 0, 0), # 'district': '朝阳', # 'level': ')', # 'src': 'lj', # 'subDistrict': '通州北苑', # 'title': '', # 'totalPrice': nan, # 'unitPrice': nan} oneOut = items.HouseItem() oneOut['src'] = self.src oneOut['district'] = district oneOut['subDistrict'] = subDistrict oneOut['title'] = ''.join( one.xpath('.//div[1]/div[1]/a/text()').extract()).strip() oneOut['_id'] = ''.join( one.xpath('.//div[1]/div[1]/a/@data-housecode').extract()).strip() try: unitPrice = util.String2Number(''.join( one.xpath( './/div[1]/div[6]/div[2]/span/text()').extract()).strip()) if not np.isnan(unitPrice): oneOut['unitPrice'] = unitPrice oneOut['totalPrice'] = util.String2Number(''.join( one.xpath('.//div[1]/div[6]/div[1]/span/text()').extract() ).strip()) else: # https://sh.lianjia.com/ershoufang/changning/pg96/ oneOut['unitPrice'] = util.String2Number(''.join( one.xpath('.//div[1]/div[7]/div[2]/span/text()').extract() ).strip()) oneOut['totalPrice'] = util.String2Number(''.join( one.xpath('.//div[1]/div[7]/div[1]/span/text()').extract() ).strip()) oneOut['community'] = ''.join( one.xpath('.//div[1]/div[2]/div/a/text()').extract()) houseInfo = ''.join( one.xpath('.//div[1]/div[2]/div/text()').extract()) houseInfo = houseInfo.split('|') if len(houseInfo) > 1: oneOut['houseType'] = houseInfo[1].strip() if len(houseInfo) > 2: oneOut['square'] = util.String2Number(houseInfo[2].strip()) #'/html/body/div[4]/div[1]/ul/li[1]/div[1]/div[3]/div/a' oneOut['area'] = util.ExtractString( one, './/div[1]/div[3]/div/a/text()') positionInfo = ''.join( one.xpath('.//div[1]/div[3]/div/text()').extract()) positionInfo = positionInfo.split(')') if len(positionInfo) > 0: oneOut['level'] = positionInfo[0].strip() + ')' if len(positionInfo) > 1: oneOut['structure'] = positionInfo[1].strip() followInfo = ''.join( one.xpath('.//div[1]/div[4]/text()').extract()) followInfo = followInfo.split('/') if len(followInfo) > 0: oneOut['attention'] = followInfo[0].strip() if len(followInfo) > 1: oneOut['follow'] = followInfo[1].strip() if len(followInfo) > 2: oneOut['release'] = followInfo[2].strip() oneOut['crawlDate'] = util.today() except Exception as e: print(e) logging.warning("parseOne Exception %s" % (str(e))) return oneOut