def get_all_audit_lighthouse_score(): try: LS = LighthouseScore quer = db.session.query( LS.id, LS.url, LS.accessibility, LS.pwa, LS.seo, LS.best_practices, LS.performance, LS.status_job, LS.task_id, func.max(LS.begin_date).label('begin_date')).group_by(LS.url) results = quer.all() result_arr = {"results": [], "google_error": False} if app.config['GOOGLE_API_KEY'] == "None": result_arr["google_error"] = True for i in results: result_arr["results"].append({ "id": i.id, "url": i.url, "accessibility": i.accessibility, "pwa": i.pwa, "seo": i.seo, "best_practices": i.best_practices, "performance": i.performance, "status_job": i.status_job, "task_id": i.task_id, "begin_date": i.begin_date }) return generate_answer(data=result_arr) except Exception as e: print(e) return generate_answer(success=False)
def get_post_rank(): try: error = None if request.method == "POST": query = request.form["query"] domain = request.form["domain"] if not (domain.startswith('//') or domain.startswith('http://') or domain.startswith('https://')): domain = '//' + domain result = SerpRank.delay(query, domain, "en", "com") time.sleep(.300) result = Serp.query.order_by(Serp.begin_date.desc()).all() result_list = {"results": [], "error": error} for i in result: result_list["results"].append({ "id": i.id, "domain": i.domain, "pos": i.pos, "url": i.pos, "query": i.query_text, "time": i.begin_date, "status_job": i.status_job, "task_id": i.task_id }) return generate_answer(data=result_list) except Exception as e: print(e) return generate_answer(success=False)
def get_extract_headers_by_id(id): try: audit = Audit.query.filter(Audit.id == id).first() result = json.loads(audit.result) return generate_answer(data=result) except Exception as e: print(e) return generate_answer(success=False)
def post_delete_extract_headers(): try: id = request.form['id'] Audit.query.filter(Audit.id == id).delete() db.session.commit() return generate_answer(success=True) except Exception as e: print(e) return generate_answer(success=False)
def post_delete_keywords(): try: id = request.form['id'] Keywords.query.filter(Keywords.id == id).delete() db.session.commit() generate_answer(success=True) except Exception as e: print(e) return generate_answer(success=False)
def get_keywords_by_id(id): try: keyw = Keywords.query.filter(Keywords.id == id).first() results = json.loads(keyw.results) results_arr = {"results": results, "query": keyw.query_text} return generate_answer(data=results_arr) except Exception as e: print(e) return generate_answer(success=False)
def post_delete_rank(): try: id = request.form["id"] Serp.query.filter(Serp.id == id).delete() db.session.commit() return generate_answer(success=True) except Exception as e: print(e) return generate_answer(success=False)
def get_extract_links_website_by_id(id): try: audit = Audit.query.filter(Audit.id == id).first() result = json.loads(audit.result) results = {"results": result} return generate_answer(data=results) except Exception as e: print(e) return generate_answer(success=False)
def get_extract_links_status_by_id(id): try: audit = Audit.query.filter(Audit.id == id).first() result = json.loads(audit.result) links_status = [x for x in result] results = {"results": result, "link_status": links_status} return generate_answer(data=results) except Exception as e: print(e) return generate_answer(success=False)
def post_audit_lighthouse_score(): try: url = request.form['url'] if url: task = LighthouseAudit.delay(url) return generate_answer(data={"id": task.id}) else: return generate_answer(success=False) except Exception as e: print(e) return generate_answer(success=False)
def get_keywords_status_by_task(): try: task_id = request.form['task'] result = Keywords.query.filter(Keywords.task_id == task_id).first() if result and result.status_job == "FINISHED": return generate_answer(success=True) else: return generate_answer(success=False) except Exception as e: print(e) return generate_answer(success=False)
def post_extract_headers(): try: url = request.form['url'] count = Audit.query.filter(Audit.url == url).filter( Audit.type_audit == "Headers").count() if url and count == 0: Extractor.delay("Headers", url) time.sleep(.300) return generate_answer(success=True) except Exception as e: print(e) return generate_answer(success=False)
def get_audit_website_status_by_task(): try: print("hahahahaha") task_id = request.form['task'] result = Audit.query.filter(Audit.task_id == task_id).first() if result and result.status_job == "FINISHED": return generate_answer(success=True) else: return generate_answer(success=False) except Exception as e: print(e) return generate_answer(success=False)
def get_audit_status_by_task(): try: task_id = request.form['task'] result = LighthouseScore.query.filter( LighthouseScore.task_id == task_id).first() if result and result.status_job == "FINISHED": return generate_answer(success=True) else: return generate_answer(success=False) except Exception as e: print(e) return generate_answer(success=False)
def post_delete_lighthouse_score(): try: id = request.form['id'] result = LighthouseScore().query.filter( LighthouseScore.id == id).first() LighthouseScore().query.filter( LighthouseScore.url == result.url).delete() db.session.commit() return generate_answer(success=True) except Exception as e: print(e) return generate_answer(success=False)
def get_post_keywords(): try: if request.method == "POST": query = request.form["query"] result = KeywordsGet.delay(query) time.sleep(.300) keyw = Keywords.query.all() results = {"results":[]} for keyword in keyw: results["results"].append({"id":keyword.id,"query": keyword.query_text, "status_job": keyword.status_job,"task_id": keyword.task_id}) return generate_answer(data=results) except Exception as e: print(e) return generate_answer(success=False)
def get_graphs_by_id(id): try: results = Graphs.query.filter(Graphs.id == id).first() result = { "id": id, "script": results.script, "div": results.div, "domain": urllib.parse.urlparse(results.urls).netloc, "template": "Flask", "time": results.begin_date } return generate_answer(data=result) except Exception as e: print(e) return generate_answer(success=False)
def get_extract_links_status_all(): try: results = Audit.query.filter(Audit.type_audit == "Links").all() result_arr = {"results": []} for i in results: result_arr["results"].append({ "id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date, "task_id": i.task_id, "status_job": i.status_job }) return generate_answer(data=result_arr) except Exception as e: print(e) return generate_answer(success=False)
def get_audit_lighthouse_score_by_id(id): try: id_url = LighthouseScore.query.filter(LighthouseScore.id == id).first() results = LighthouseScore.query.filter( LighthouseScore.url == id_url.url).order_by( LighthouseScore.begin_date.desc()).all() result_arr = {"results": []} seo_list = [] accessibility_list = [] pwa_list = [] best_list = [] performance_list = [] labels = [] for i in results: labels.append(i.begin_date.strftime("%m/%d/%Y, %H:%M:%S")) seo_list.append(i.seo) accessibility_list.append(i.accessibility) pwa_list.append(i.pwa) best_list.append(i.best_practices) performance_list.append(i.performance) result_arr["results"].append({ "id": i.id, "url": i.url, "accessibility": i.accessibility, "pwa": i.pwa, "seo": i.seo, "best_practices": i.best_practices, "performance": i.performance, "begin_date": i.begin_date }) result_arr["url"] = id_url.url result_arr["id"] = id result_arr["table"] = ({ "labels": labels, "seo_list": seo_list, "accessibility_list": accessibility_list, "pwa_list": pwa_list, "best_list": best_list, "performance_list": performance_list }) return generate_answer(data=result_arr) except Exception as e: print(e) return generate_answer(success=False)
def get_audit_lighthouse_score_all(): try: LS = LighthouseScore results = LS.query.all() result_arr = {"results": []} for i in results: result_arr["results"].append({ "id": i.id, "url": i.url, "accessibility": i.accessibility, "pwa": i.pwa, "seo": i.seo, "best_practices": i.best_practices, "performance": i.performance, "begin_date": i.begin_date }) return generate_answer(data=result_arr) except Exception as e: print(e) return generate_answer(success=False)
def get_post_graphs(): try: error = None if request.method == "POST": domain = request.form["domain"] if domain.startswith("https://") or domain.startswith("http://"): result = GraphsGenerate.delay(domain) else: result = GraphsGenerate.delay("https://" + domain) time.sleep(0.3) results = Graphs.query.all() result_arr = {"results": []} for i in results: result_arr["results"].append({ "id": i.id, "urls": i.urls, "status_job": i.status_job, "task_id": i.task_id, "begin_date": i.begin_date }) return generate_answer(data=result_arr) except Exception as e: print(e) return generate_answer(success=False)