def track_goodreads(cur): last_entry_date = date_related.get_date_of_last_entry(cur, "goodreads") today = date_related.get_date() if today != last_entry_date[0]: insert_goodreads_data(cur, today) print("New data added to Goodreads.") else: print("Goodreads was already updated today.")
def track_codewars(cur): last_entry_date = date_related.get_date_of_last_entry(cur, "codewars") today = date_related.get_date() if today != last_entry_date[0]: cwData = scrape_codewars() sql = ''' INSERT INTO codewars(Date, Honor, Points, Challenges) VALUES (?,?,?,?)''' new_entry = [ today, cwData["honor"], cwData["ranks"]["overall"]["score"], cwData["codeChallenges"]["totalCompleted"] ] cur.execute(sql, new_entry[0:4]) print("New data added to Codewars.") else: print("Codewars was already updated today.")
def body_week_visuals(cur): #BODY_TARGET = 0 week_Date = [] week_stretch = [] week_core = [] week_pullup = [] week_cardio = [] # Set dates for the previous week for i in range(7, 0, -1): week_Date.append(get_date(i)) # Pull db data for those dates for date in week_Date: cur.execute( "SELECT Stretching, Core, PullUps, Cardio FROM body WHERE Date = ?", (date, )) raw_data = cur.fetchone() week_stretch.append(raw_data[0]) week_core.append(raw_data[1]) week_pullup.append(raw_data[2]) week_cardio.append(raw_data[3]) # Create stacked bar chart week_core_pullup = [] week_cps = [] for i in range(len(week_stretch)): week_core_pullup.append(week_core[i] + week_pullup[i]) week_cps.append(week_core[i] + week_pullup[i] + week_stretch[i]) plt.figure(4, figsize=(20, 10)) p1 = plt.bar(week_Date, week_core, color='m') p2 = plt.bar(week_Date, week_pullup, bottom=week_core, color='b') p3 = plt.bar(week_Date, week_stretch, bottom=week_core_pullup, color='g') p4 = plt.bar(week_Date, week_cardio, bottom=week_cps, color='k') plt.subplot() plt.legend((p1[0], p2[0], p3[0], p4[0]), ("Core", "Pull ups", "Stretching", "Cardio")) plt.xlabel("Date") plt.ylabel("Minutes or Repetitions") plt.title("Body Work this Week") #plt.plot([0, 6], [BODY_TARGET/7, BODY_TARGET], 'k-', lw=1) plt.show(block=True) #plt.savefig("/home/matt/Pictures/trackr/body_visual.jpg") #os.system("/usr/bin/gsettings set org.gnome.desktop.background picture-uri file:///home/matt/Pictures/trackr/body_visual.jpg") plt.close()
def track_chess(cur): last_entry_date = date_related.get_date_of_last_entry(cur, "chess") today = date_related.get_date() if today != last_entry_date[0]: chessData = scrape_chess() sql = ''' INSERT INTO chess(Date, Daily, Daily960, Rapid, Bullet, Blitz) VALUES (?,?,?,?,?,?)''' new_entry = [ today, chessData["chess_daily"]["last"]["rating"], 0, chessData["chess_rapid"]["last"]["rating"], chessData["chess_bullet"]["last"]["rating"], chessData["chess_blitz"]["last"]["rating"] ] cur.execute(sql, new_entry[0:6]) print("New data added to Chess.") else: print("Chess was already updated today.")
def track_duolingo(cur): last_entry_date = date_related.get_date_of_last_entry(cur, "duolingo") today = date_related.get_date() if today != last_entry_date[0]: scrape_duolingo() sql = ''' INSERT INTO duolingo(Date, Greek, Esperanto, Vietnamese, Italian, Welsh, Irish, Czech, Indonesian, Spanish, Chinese, Russian, Portuguese, Norwegian, Turkish, Romanian, Polish, Dutch, French, German, HighValyrian, Korean, Danish, Hungarian, Japanese, Hebrew, Swahili, Swedish, Ukrainian ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''' new_entry = [today] for each in order: new_entry.append(points[each]) cur.execute(sql, new_entry[0:29]) print("New data added to Duolingo.") else: print("Duolingo was already updated today.")
def update_track_warmup(trackr_cur): SRC = "/home/matt/Warm-Up-Quiz/warmUp.db" today = date_related.get_date() new_data = [] py_count = 0 ex_count = 0 # Pull activity data from db warmup_conn = sqlite3.connect(SRC) warmup_cur = warmup_conn.cursor() warmup_cur.execute("SELECT tool, date FROM calls WHERE calls_id > 64") raw_data = warmup_cur.fetchall() # Trim time for datetime for i in range(len(raw_data)): new_data.append((raw_data[i][0], raw_data[i][1][0:10])) # Count datapoints for each category for i in range(len(new_data)): if new_data[i][1] == today: if new_data[i][0] == "python": py_count += 1 elif new_data[i][0] == "excel": ex_count += 1 else: print( "Rogue datapoint! Doesn't belong to an established category." ) ''' # Pull previous data point trackr_cur.execute("SELECT Python, Excel FROM warmup WHERE Date = ?", (today,)) prev_py_count, prev_ex_count = trackr_cur.fetchone() ''' # Update db with counts trackr_cur.execute( "UPDATE warmup SET Python = ?, Excel = ? WHERE Date = ?", (py_count, ex_count, today)) warmup_conn.commit() print("Completed update to warmup.")
def update_hackerrank(cur): today = date_related.get_date() # Navigate through login with Selenium url = "https://www.hackerrank.com/{}".format(personal.data["hrUsername"]) browser = webdriver.Firefox() browser.get(url) loginElem = browser.find_element_by_class_name("login").click() username = browser.find_element_by_name("login") username.send_keys(personal.data["hrUsername"]) password = browser.find_element_by_name("password") password.send_keys(personal.data["hrPassword"]) password.send_keys(Keys.RETURN) #Estimate number of pages in HackerRank table cur.execute("SELECT Count(*) FROM hackerRankItems") items = cur.fetchone() maxPage = items[0] // 10 + 2 # Navigate through pages of history data = [] for page in range(1, maxPage): hackosUrl = "https://www.hackerrank.com/{}/hackos/page/{}".format( personal.data["hrUsername"], page) browser.get(hackosUrl) # Often has a 404 error, but refreshing helps count = 0 while count < 5: innerHTML = browser.execute_script( "return document.body.innerHTML") bsSoup = bs4.BeautifulSoup(innerHTML, "lxml") try: h3 = bsSoup.find("h3") if "Hackos" in h3.text: break except: count += 1 browser.refresh() # Close if it is still on 404 if not h3: browser.quit() print("HackerRank - 404.") return cur # Extract Total Hackos count from first page if page == 1: hackosRegex = re.compile(r"Total Hackos: (\d*)") mo = hackosRegex.search(h3.text) hackos = int(mo.group(1)) # Extract table data line by line table = bsSoup.findAll("div", {"class": "hacko-transaction-list-view"}) try: for row in table: temp = [] for col in row.findAll("p"): temp.append(col.text) data.append(temp) except: pass # Renumber IDs (reverse direction) data = data[::-1] for i in range(len(data)): data[i][0] = i + 1 # Add to hackerRankItems table cur.execute("SELECT * FROM hackerRankItems ORDER BY SlNumber") all_entries = cur.fetchall() selectAdd = 0 diff = len(data) - len(all_entries) if diff == 0: print("No new line items in HackerRank.") for i in range(diff): index = i + len(all_entries) sql = '''INSERT INTO hackerRankItems(SlNumber, Action, HackosEarned, DateAdded, Qualifier) VALUES (?,?,?,?,?)''' if "logged in" in data[index][1] or "Hackos everyone" in data[index][1]: qualifier = "N" else: qualifier = "Y" selectAdd += data[index][2] new_entry = [ data[index][0], data[index][1], data[index][2], today, qualifier ] cur.execute(sql, new_entry[0:5]) print("Added a line item for HackerRank.") # Add to hackerRank table cur.execute("SELECT * FROM hackerRank ORDER BY Date DESC LIMIT 1 ") last_entry = cur.fetchone() if today != last_entry[0]: sql = ''' INSERT INTO hackerRank(Date, Hackos, SelectHackos) VALUES (?,?,?)''' new_entry = [today, hackos, last_entry[2] + selectAdd] cur.execute(sql, new_entry[0:3]) print("New data added to HackerRank daily table.") else: print("HackerRank was already updated today.") browser.quit()
def set_dates(week_date): for i in range(7, 0, -1): week_date.append(get_date(i)) return week_date