def employee_tree(db: pymongo.MongoClient, employee_doc: dict, tree_depth: int): employee_id = employee_doc["employeeId"] company_id = employee_doc["companyId"] is_manager = bool(employee_doc["isManager"]) link_to_manager = links.get("company/{}/employee/{}/manager".format( company_id, employee_id)) link_to_employees = links.get("company/{}/employee/{}".format( company_id, employee_id)) employees = None if not is_manager else link_to_employees if tree_depth == 0 else [ employee_tree(db, employee, tree_depth - 1) for employee in db["Employees"].find({ "managerId": employee_id, "companyId": company_id }) ] return { "firstName": employee_doc["firstName"], "lastName": employee_doc["lastName"], "companyName": "TODO", "positionTitle": employee_doc["positionTitle"], "isManager": is_manager, "email": employee_doc["email"], "employeeId": employee_doc["employeeId"], "companyId": employee_doc["companyId"], "startDate": employee_doc["startDate"], "manager": link_to_manager, "managerId": employee_doc.get("managerId"), "employees": employees, "actions": { # TODO } }
def link(update, context): user_id = update.message.chat.id args = context.args try: if len(args) == 0 or (len(args) == 1 and args[0] in ["list", "show"]): message = links.show_list(user_id) elif args[0] == "add": name, url = parse_add_link(args[1:]) message = links.add(user_id, name, url) elif len(args) == 2 and args[0] == "remove": message = links.remove(user_id, args[1]) elif len(args) == 1 and args[0] == "clear": message = links.clear(user_id) elif len(args) == 1: message = links.get(user_id, args[0]) else: raise ValueError("I couldn't understand you :(") context.bot.send_message( chat_id=update.effective_chat.id, text=message, parse_mode=telegram.ParseMode.MARKDOWN, ) except ValueError as e: context.bot.send_message(chat_id=update.effective_chat.id, text=str(e))
def index(): redirect(URL("default", "run")) plots = UL(LI(A("[Barchart] Total score per over", _href=links.get(1))), LI(A("[Barchart] Runs made per over", _href=links.get(2))), LI(A("[Line] Total score per over", _href=links.get(3))), LI(A("[Line] Runs made per over", _href=links.get(4))), LI(A("[Barchart] Average runs per over", _href=links.get(5))), LI(A("[Line] Wickets per 5 over", _href=links.get(6))), LI(A("[Line] Successful chases", _href=URL("default", "allwins")))) title = H1("Plots") body = DIV(title, plots, _style="margin-left: 5em") return dict(body=body)
# Delete old links and comments if 'runall' in argv or 'cleanup' in argv: if _['delete_links_after'] > -1: cur.execute("delete from t3 where created < date_sub(now(), interval %s second)", (_['delete_links_after'],)) if _['delete_comments_after'] > -1: cur.execute("delete from t1 where created < date_sub(now(), interval %s second)", (_['delete_comments_after'],)) db.commit(); # Build/store locations to retrieve links if 'runall' in argv or 'locations' in argv: locations.build(_['crawl_subreddits'], _['crawl_urls']) # Crawls URLS from locations if 'runall' in argv or 'links' in argv: cur.execute("select id, url from crawl_locations where last_crawled < date_sub(now(), interval %s second)", (_['find_links_after'],)) for l in cur.fetchall(): links.get("%s?limit=%d" % (l[1], _['links_per_page'])) cur.execute("update crawl_locations set last_crawled = now() where id = %s", (l[0],)) db.commit() # Crawl eligible links if 'runall' in argv or 'comments' in argv: cur.execute("select id, permalink from t3 where last_crawled < date_sub(now(), interval %s second)", (_['recrawl_links_after'],)) for l in cur.fetchall(): for sort in _['comment_sort']: comments.get("http://www.reddit.com%s" % l[1], 't3_' + lib.base36encode(l[0]).lower(), '', "limit=%d&depth=%d&sort=%s" % (_['comment_limit_per_request'], _['comment_depth_per_request'], sort)) cur.execute("update t3 set last_crawled = now() where id = %s", (l[0],)) db.commit() sleep(_['sleep']) #Login and respond to links/comments if 'runall' in argv or 'respond' in argv: