def generate_users_infographic(self, hardwork_summary, ps_summary, vaults_summary, from_date, to_date): img = Image.open("assets/background.jpg") farmlogo = Image.open('assets/farm.png') draw = ImageDraw.Draw(img) header = ImageFont.truetype("assets/font/D-DIN-Bold.otf", 68) dates = ImageFont.truetype("assets/font/D-DIN.otf", 38) font = ImageFont.truetype("assets/font/D-DIN.otf", 50) metric = ImageFont.truetype("assets/font/D-DIN-Bold.otf", 50) draw.text((240, 60),f"Harvest.Finance users metrics",(249,32,105),font=header) draw.text((430, 130),format_date(from_date) + " - " + format_date(to_date),(255,255,255),font=dates) text_height = 135 text_height+=75 draw.text((200, text_height),f"$FARM buyback",(255,255,255),font=font) draw.text((800, text_height),f"{format_number(hardwork_summary['farm_buybacks'])}",(207,20,135),font=metric) area = (1000, text_height) img.paste(farmlogo, area, farmlogo) text_height+=75 draw.text((200, text_height),f"Income for Farmers",(255,255,255),font=font) draw.text((800, text_height),f"{format_currency(hardwork_summary['total_income'])}",(207,20,135),font=metric) text_height+=75 draw.text((200, text_height),f"Saved in gas fees",(255,255,255),font=font) draw.text((800, text_height),f"{format_currency(hardwork_summary['gas_saved'])}",(207,20,135),font=metric) # text_height+=75 # draw.text((200, text_height),f"Deployed Strategies",(255,255,255),font=font) # draw.text((800, text_height),f"{format_number(vaults_summary['quantity'])}",(207,20,135),font=metric) return img
def test_format_date(): """Check the function format_date().""" date1 = '2019-05-21 06:44:15' date2 = 'N/A' date3 = 'None' date4 = 'Blah Blah' assert format_date(date1) == '21 May 2019' assert format_date(date2) == 'N/A' assert format_date(date3) == 'N/A' assert format_date(date4) == 'N/A'
def tab_tl_graph(ds, y, date_range): t_min, t_max = utils.format_date(date_range) df = data_collector(ds) dff = df[((df['Start_time_obj'] > t_min) & (df['Start_time_obj'] < t_max))] aux_df = dff[[y]] fig = px.histogram(dff, x=y) return fig
def save_rep(rep, master_load_id): """ Saves the rep data. """ results = execute_sp( 'UMA_TELECOM.SAVE_D_REP', { 'REP_ID': rep['$id'], 'REP_userId': rep['userId'], 'REP_homeSite': rep['homeSite'], 'REP_firstName': rep['firstName'], 'REP_lastName': rep['lastName'], 'REP_displayName': rep['displayName'], 'REP_ntDomainUser': rep['ntDomainUser'], 'REP_extension': rep['extension'], 'REP_outboundANI': rep['outboundANI'], 'REP_id_LIST': rep['id'], 'REP_customAttributes': _get_custom_attributes( rep['customAttributes']), 'REP_dateAdded': format_date(parse_date(rep['dateAdded'])), 'LOAD_HISTORY_PKID': master_load_id }, out_arg='rep_id') result = get_sp_first_result_set(results) if not result: return False return result['rep_id']
def save_rep_role(rep_id, rep_role, master_load_id): """ Saves the rep role. """ execute_sp( 'UMA_TELECOM.SAVE_D_REP_ROLE', { 'D_REP_ID': rep_id, 'REP_ROLE_ID': rep_role['$id'], 'REP_ROLE_roleId': rep_role['roleId'], 'REP_ROLE_name': rep_role['name'], 'REP_ROLE_id_ALTERNATE': rep_role['id'], 'REP_ROLE_dateAdded': format_date(parse_date( rep_role['dateAdded'])), 'LOAD_HISTORY_PKID': master_load_id })
def save_rep_skill(rep_id, rep_skill, master_load_id): """ Saves the rep skill. """ execute_sp( 'UMA_TELECOM.SAVE_D_REP_SKILL', { 'D_REP_ID': rep_id, 'REP_SKILL_ID': rep_skill['$id'], 'REP_SKILL_displayName': rep_skill['displayName'], 'REP_SKILL_proficiency': rep_skill['proficiency'], 'REP_SKILL_desireToUse': rep_skill['desireToUse'], 'REP_SKILL_id_ALTERNATE': rep_skill['id'], 'REP_SKILL_dateAdded': format_date( parse_date(rep_skill['dateAdded'])), 'LOAD_HISTORY_PKID': master_load_id })
def tab_tl_graph(ds, y, date_range): t_min, t_max = utils.format_date(date_range) df = data_collector(ds) dff = df[((df['Start_time_obj'] > t_min) & (df['Start_time_obj'] < t_max))] aux_df = dff[[y]] fig = px.box(dff, x='weekday_num', y=y) fig.update_layout(xaxis=dict(title='Day of the Week', tickmode='array', tickvals=[1, 2, 3, 4, 5, 6, 7], ticktext=[ 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' ]), ) return fig
def get_github_details(package_node) -> GitHubDetails: """Get fields associated with Github statistics of a package node.""" date = format_date(package_node.get("gh_refreshed_on", ["N/A"])[0]) github_details = { "dependent_projects": package_node.get("libio_dependents_projects", [-1])[0], "dependent_repos": package_node.get("libio_dependents_repos", [-1])[0], "total_releases": package_node.get("libio_total_releases", [-1])[0], "latest_release_duration": str(datetime.datetime.utcfromtimestamp(package_node.get( "libio_latest_release", [1496302486.0])[0])), "watchers": package_node.get("gh_subscribers_count", [-1])[0], "issues": { "month": { "opened": package_node.get("gh_issues_last_month_opened", [-1])[0], "closed": package_node.get("gh_issues_last_month_closed", [-1])[0] }, "year": { "opened": package_node.get("gh_issues_last_year_opened", [-1])[0], "closed": package_node.get("gh_issues_last_year_closed", [-1])[0] }}, "pull_requests": { "month": { "opened": package_node.get("gh_prs_last_month_opened", [-1])[0], "closed": package_node.get("gh_prs_last_month_closed", [-1])[0] }, "year": { "opened": package_node.get("gh_prs_last_year_opened", [-1])[0], "closed": package_node.get("gh_prs_last_year_closed", [-1])[0] }}, "stargazers_count": package_node.get("gh_stargazers", [-1])[0], "forks_count": package_node.get("gh_forks", [-1])[0], "refreshed_on": date, "open_issues_count": package_node.get("gh_open_issues_count", [-1])[0], "contributors": package_node.get("gh_contributors_count", [-1])[0], "size": "N/A" } used_by = package_node.get("libio_usedby", []) used_by_list = [] for epvs in used_by: slc = epvs.split(':') used_by_dict = { 'name': slc[0], 'stars': int(slc[1]) } used_by_list.append(used_by_dict) github_details['used_by'] = used_by_list return GitHubDetails(**github_details)
def save_rep_workgroup(rep_id, rep_workgroup, master_load_id): """ Saves the rep workgroup. """ execute_sp( 'UMA_TELECOM.SAVE_D_REP_WORKGROUP', { 'D_REP_ID': rep_id, 'REP_WORKGROUP_ID': rep_workgroup['$id'], 'REP_WORKGROUP_Name': rep_workgroup['name'], 'REP_WORKGROUP_customAttributes': _get_custom_attributes(rep_workgroup['customAttributes']), 'REP_WORKGROUP_id_ALTERNATE': rep_workgroup['id'], 'REP_WORKGROUP_dateAdded': format_date(parse_date(rep_workgroup['dateAdded'])), 'LOAD_HISTORY_PKID': master_load_id })
def start_crawl(file_path, keywords, start_time, end_time): keywords_str = "+".join(keywords) item_set = set() url = f"https://www.bbc.co.uk/search?q={keywords_str}&page=1" r = requests.get(url=url, headers=get_header()) html_content = r.text soup = BeautifulSoup(html_content, "html.parser") match = re.search(">window.__INITIAL_DATA__=(.+);</script>", str(soup.find_all("script")[3])) if match: data = json.loads(match[1]) initial_results = jsonpath.jsonpath(data, "$..initialResults")[0] totals = initial_results["count"] for page in range(1, totals // 10): # 结果太多,限制条数 if page == 10: break try: time.sleep(1) url = f"https://www.bbc.co.uk/search?q={keywords_str}&page={page}" r = requests.get(url=url, headers=get_header()) html_content = r.text soup = BeautifulSoup(html_content, "html.parser") match = re.search(">window.__INITIAL_DATA__=(.+);</script>", str(soup.find_all("script")[3])) if match: data = json.loads(match[1]) initial_results = jsonpath.jsonpath( data, "$..initialResults")[0] for item in initial_results["items"]: # 17 April 2017 # 8 hours ago origin_date = utils.format_date( item["metadataStripItems"][0]["text"]) if origin_date != -1 and int( start_time) <= origin_date <= int(end_time): article = entity.Article() article.title = item["headline"] article.title_cn = utils.translate_with_webdriver( article.title) article.url = item["url"] article.date = str(origin_date) try: time.sleep(1) art = ns.Article(item["url"], headers=get_header()) art.download() art.parse() article.text = art.text if art.text.strip() == "": title, publish_date, content = utils.get_title_time_content( item["url"], header=get_header()) article.text = content article.text_cn = utils.translate_with_webdriver( article.text) except Exception as exc: continue item_set.add(article) except Exception as exc: continue finally: try: global TOTALS TOTALS += len(item_set) utils.write_xlsx_apend(file_path, item_set) item_set.clear() except: pass
def tab_IR_slider_container(date_range): t_min, t_max = utils.format_date(date_range) return 'Date range is "{}" -- "{}"'.format(t_min, t_max)
def tab_tl_graph(ds1, y1, z1, ds2, y2, z2, date_range, tog_tz): fig = make_subplots(rows=2, cols=1, vertical_spacing=0.05) t_min, t_max = utils.format_date(date_range) df1 = data_collector(ds1) df2 = data_collector(ds2) df1 = df1[((df1['Start_time_obj'] > t_min) & (df1['Start_time_obj'] < t_max))] df2 = df2[((df2['Start_time_obj'] > t_min) & (df2['Start_time_obj'] < t_max))] if z1 == 'None': fig.append_trace(go.Scattergl(x=df1['Start_time_obj'], y=df1[y1], mode='markers', opacity=1., marker=dict(size=5), showlegend=False), row=1, col=1) else: for i in df1[z1].unique(): dff = df1[df1[z1] == i] fig.append_trace(go.Scattergl( x=dff['Start_time_obj'], y=dff[y1], mode='markers', opacity=1., marker=dict(size=5), name=str(i), ), row=1, col=1) #Bottom plot, df2 if z2 == 'None': fig.append_trace(go.Scattergl(x=df2['Start_time_obj'], y=df2[y2], mode='markers', opacity=1., marker=dict(size=5), showlegend=False), row=2, col=1) else: for i in df2[z2].unique(): dff = df2[df2[z2] == i] fig.append_trace(go.Scattergl( x=dff['Start_time_obj'], y=dff[y2], mode='markers', opacity=1., marker=dict(size=5), name=str(i), ), row=2, col=1) #Toggle Timezone changes. if tog_tz % 2 == 0: if 'time_offset' in df1.columns: tz_list = utils.make_tz_changers(df1, 'date', 'time_offset') for i, t in enumerate(tz_list): fig.add_shape(go.layout.Shape( type='line', x0=t.replace('/', '-'), x1=t.replace('/', '-'), y0=0, y1=1, line=dict( color='LightSeaGreen', width=2, dash='dot', ), ), row=1, col=1) fig.layout.shapes[i]['yref'] = 'paper' fig.update_yaxes(title_text=y1, row=1, col=1) fig.update_xaxes(title_text='Date', row=2, col=1) fig.update_yaxes(title_text=y2, row=2, col=1) fig.update_layout( height=650, margin=go.layout.Margin(b=20, t=10), ) return fig
def extract_component_details(component): """Extract details from given component.""" date = format_date(component.get("package", {}).get("gh_refreshed_on", ["N/A"])[0]) github_details = { "dependent_projects": component.get("package", {}).get("libio_dependents_projects", [-1])[0], "dependent_repos": component.get("package", {}).get("libio_dependents_repos", [-1])[0], "total_releases": component.get("package", {}).get("libio_total_releases", [-1])[0], "latest_release_duration": str(datetime.datetime.fromtimestamp(component.get("package", {}).get( "libio_latest_release", [1496302486.0])[0])), "first_release_date": "Apr 16, 2010", "issues": { "month": { "opened": component.get("package", {}).get("gh_issues_last_month_opened", [-1])[0], "closed": component.get("package", {}).get("gh_issues_last_month_closed", [-1])[0] }, "year": { "opened": component.get("package", {}).get("gh_issues_last_year_opened", [-1])[0], "closed": component.get("package", {}).get("gh_issues_last_year_closed", [-1])[0] }}, "pull_requests": { "month": { "opened": component.get("package", {}).get("gh_prs_last_month_opened", [-1])[0], "closed": component.get("package", {}).get("gh_prs_last_month_closed", [-1])[0] }, "year": { "opened": component.get("package", {}).get("gh_prs_last_year_opened", [-1])[0], "closed": component.get("package", {}).get("gh_prs_last_year_closed", [-1])[0] }}, "stargazers_count": component.get("package", {}).get("gh_stargazers", [-1])[0], "forks_count": component.get("package", {}).get("gh_forks", [-1])[0], "refreshed_on": date, "open_issues_count": component.get("package", {}).get("gh_open_issues_count", [-1])[0], "contributors": component.get("package", {}).get("gh_contributors_count", [-1])[0], "size": "N/A" } used_by = component.get("package", {}).get("libio_usedby", []) used_by_list = [] for epvs in used_by: slc = epvs.split(':') used_by_dict = { 'name': slc[0], 'stars': int(slc[1]) } used_by_list.append(used_by_dict) github_details['used_by'] = used_by_list code_metrics = { "code_lines": component.get("version", {}).get("cm_loc", [-1])[0], "average_cyclomatic_complexity": component.get("version", {}).get("cm_avg_cyclomatic_complexity", [-1])[0], "total_files": component.get("version", {}).get("cm_num_files", [-1])[0] } cves = [] recommended_latest_version = None name = component.get("version", {}).get("pname", [""])[0] version = component.get("version", {}).get("version", [""])[0] ecosystem = component.get("version", {}).get("pecosystem", [""])[0] if len(component.get("cves", [])) > 0: for cve in component.get("cves", []): component_cve = { 'CVE': cve.get('cve_id')[0], 'CVSS': cve.get('cvss_v2', [''])[0] } cves.append(component_cve) recommended_latest_version = component.get("package", {}).get("latest_non_cve_version", "") if not recommended_latest_version: recommended_latest_version = get_recommended_version(ecosystem, name, version) licenses = component.get("version", {}).get("declared_licenses", []) latest_version = select_latest_version( version, component.get("package", {}).get("libio_latest_version", [""])[0], component.get("package", {}).get("latest_version", [""])[0], name ) component_summary = { "ecosystem": ecosystem, "name": name, "version": version, "licenses": licenses, "security": cves, "osio_user_count": component.get("version", {}).get("osio_usage_count", 0), "latest_version": latest_version, "recommended_latest_version": recommended_latest_version, "github": github_details, "code_metrics": code_metrics } # Add transitive block for transitive deps if component.get('transitive', {}): if not cves: return None else: component_summary['transitive'] = component.get('transitive') return component_summary