def company_research(): if 'bulk' in request.args.keys(): q.enqueue(Companies()._bulk, request.args['company_name'], request.args['api_key']) else: q.enqueue(Companies()._research, request.args['company_name'], request.args['api_key']) return {'Research has started.': True}
def employee_webhook(self, company_name, company_list, qry="", limit=5, list_id="", _report=""): _user, _company = company_list['user'], company_list['company'] employees = Companies()._employees(company_name, qry) company = Companies()._get_info(company_name) _company_list = company_list['objectId'] for index, row in employees.iterrows(): data = row.to_dict() company['user'], company['company'] = _user, _company prospect = company prospect['name'], prospect['pos'] = row['name'], row['title'] prospect['city'] = row['locale'] prospect['linkedin_url'] = row['linkedin_url'] prospect['lists'] = [Parse()._pointer('ProspectList', list_id)] if type(company['industry']) is list: company['industry'] = company['industry'][0] prospect['company_profile'] = company_list['profile'] r = Prospecter().create('Prospect', company) print "prospect_create_result", r.json() if RQueue()._has_completed("{0}_{1}".format(_company_list, list_id)): data = {'done': arrow.now().timestamp} r = Prospecter().update("SignalReport/" + _report, data) print "employee_webhook_has_completed -->", r.json()
def __init__(self): super().__init__() self.company = Companies() self.applications = Applications() self.communications = Communications() self.setup() self.recent_applications = [[None for i in range(4)] for j in range(3)] self.upcoming_activities = [[None for i in range(3)] for j in range(3)] self.load_recent_applications() self.load_upcoming_activities() self.company_details = []
def _company_bulk_upload(): #TODO - three fields, company_name, website, domain # - really only need 1 data = request.args["data"] user = request.args["user"] q.enqueue(Companies()._bulk_upload, data, user) return {"started":True}
def draw_on_image_version_1(self, database, x_image, y_image): list_of_arguments = ["name", "number_of_projects", "avg_color"] list_from_sql_1 = database.run_sql_command("""SELECT company_name, COUNT(*), NULL FROM project WHERE main_color is NOT NULL GROUP BY company_name""") list_from_sql_2 = database.run_sql_command("""SELECT company_name, main_color FROM project WHERE main_color is NOT NULL""") list_from_sql = [] for i in range(len(list_from_sql_1)): list_from_sql.append(list(list_from_sql_1[i])) list_from_sql[i][2] = [] for i in range(len(list_from_sql_2)): for j in range(len(list_from_sql)): if list_from_sql_2[i][0] == list_from_sql[j][0]: list_from_sql[j][2].append(list_from_sql_2[i][1]) companies_object_list = Companies.get_companies_object_list(list_of_arguments, list_from_sql) list_text_objects = [] list_doge = ["so", "much", "very", "many", "wow", "such", "good", "how"] for element in companies_object_list: list_text_objects.append( Texts(list_doge[randint(0, 7)] + ' ' + element.name, self.current_file_path + "/fonts/Prototype.ttf", int(element.number_of_projects * 6), element.avg_color[0], element.avg_color[1], element.avg_color[2])) file_title = open("doge_head.txt", "r") doge_head = file_title.read() text = Texts(doge_head, self.current_file_path + "/fonts/Cousine-Regular.ttf", 14, 0, 0, 0) self.occupied_x.append(set(range(0, 220))) self.occupied_y.append(set(range(200, 500))) text.draw_text(0, 200, self) self.draw_list_text_objects(list_text_objects, x_image, y_image)
def secondary_research(): name = "Coast Inc" domain = "onecoast.com" #print "ARGS", requests.args print "DATA", request.data name, domain = request.data["company_name"], request.data["domain"] q.enqueue(Companies()._secondary_research, name, domain, timeout=600) return {'started':True}
def company_streaming_info(): company = check_if_company_exists_in_db(request.args) if company != []: return company company= Companies()._get_info(request.args['company_name']) if str(company) == "not found": return {company_name: "Not Found."} else: q.enqueue(Parse()._add_company, company.ix[0].to_dict(), company_name) return company.ix[0].to_dict()
def _daily_news_source(): api_key = "9a31a1defcdc87a618e12970435fd44741d7b88794f7396cbec486b8" name, domain = request.args["company_name"], request.args["domain"] x = 6000 if "blog" == request.args["source"]: j0 = q.enqueue(Companies()._company_blog, domain, api_key, name, timeout=x) elif "glassdoor" == request.args["source"]: j2 = q.enqueue(GlassDoor()._reviews, domain, api_key, name, timeout=x) elif "press" == request.args["source"]: j3 = q.enqueue(Companies()._press_releases, domain, api_key, name, timeout=x) elif "news" == request.args["source"]: j4 = q.enqueue(Companies()._news, domain, api_key, name, timeout=x) elif "hiring" == request.args["source"]: j5 = q.enqueue(Companies()._hiring, domain, api_key, name, timeout=x) elif "twitter" == request.args["source"]: j6 = q.enqueue(Twitter()._daily_news, domain, api_key, name, timeout=x) elif "facebook" == request.args["source"]: j7 = q.enqueue(Facebook()._daily_news, domain, api_key, name, timeout=x) elif "linkedin" == request.args["source"]: j8 = q.enqueue(Linkedin()._daily_news, domain, api_key, name, timeout=x) return {'started':True}
def _start(self): #conn = r.connect(db="clearspark") conn = r.connect(**rethink_conn.conn()) contacts = list(r.table("user_contacts").run(conn)) contacts = pd.DataFrame(contacts).drop_duplicates("domain") for i, c in contacts.iterrows(): """ job = q.enqueue(Companies()._daily_secondary_research, company_name, domain, api_key, lists, timeout=60000) """ q.enqueue(Companies()._daily_secondary_research, c.company_name, c.domain, timeout=60000)
def _company_research(): #TODO - check if api key is valid and increment request count #TODO - add name if name is present company_name = remove_accents(request.args['company_name']) #api_key = request.args['api_key'] api_key = "9a31a1defcdc87a618e12970435fd44741d7b88794f7396cbec486b8" qry = {'where':json.dumps({'company_name':company_name})} company = Parse().get('Company', qry).json()['results'] name = "" print company if company: q.enqueue(Webhook()._update_company_info, company[0], api_key, name) return company[0] else: q.enqueue(Companies()._research, company_name, api_key, name) return {'Research has started.': True}
from companies import Companies Companies.get()
def test_daily_research(): name, domain = request.args["company_name"], request.args["domain"] print name, domain q.enqueue(Companies()._daily_secondary_research, name, domain, timeout=6000) return {'started':True}
def domain_research(): name = request.args["name"] domain = request.args["domain"] q.enqueue(Companies()._domain_research, domain, "", name) return {"test": "lol"}
class MainWindow(QObject): def __init__(self): super().__init__() self.company = Companies() self.applications = Applications() self.communications = Communications() self.setup() self.recent_applications = [[None for i in range(4)] for j in range(3)] self.upcoming_activities = [[None for i in range(3)] for j in range(3)] self.load_recent_applications() self.load_upcoming_activities() self.company_details = [] @Slot(int, int, result=str) def get_app_field(self, row, col): return self.recent_applications[row][col] @Slot(int, int, result=str) def get_act_field(self, row, col): return self.upcoming_activities[row][col] @Slot(str) def set_company_details(self, string): self.company_details.append(string) @Slot(str, result=str) def get_company_details(self): return self.company_details.pop() @Slot(str, result=str) def welcome_text(self, name): return 'Welcome ' + name def load_recent_applications(self): for i, (app_ID, co_ID, app_date, stage, job, desc) in enumerate( self.applications.get_recent_applications()): self.recent_applications[i][0] = self.iso_to_date(app_date) self.recent_applications[i][1] = self.company.get_from_id(co_ID)[1] self.recent_applications[i][2] = job self.recent_applications[i][3] = stage def load_upcoming_activities(self): for i, (comm_id, app_id, comm_date, interaction, status, notes) in enumerate( self.communications.get_upcoming_communications()): self.upcoming_activities[i][0] = self.iso_to_date(comm_date) self.upcoming_activities[i][1] = interaction self.upcoming_activities[i][2] = notes def iso_to_date(self, iso_date: str) -> str: return date.fromisoformat(iso_date).strftime('%B %-d, %Y') def setup(self): # Define connection and cursor connection = sqlite3.connect('jobs.db') cursor = connection.cursor() # Create companies' table cursor.execute("""CREATE TABLE IF NOT EXISTS companies ( company_ID INTEGER PRIMARY KEY, company_name TEXT UNIQUE, sector TEXT, co_desc TEXT )""") connection.commit() cursor.execute("""CREATE TABLE IF NOT EXISTS applications ( application_ID INTEGER PRIMARY KEY, company_ID INTEGER, application_date TEXT, stage TEXT, job_title TEXT, job_desc TEXT, FOREIGN KEY(company_ID) REFERENCES companies(company_ID) )""") connection.commit() cursor.execute("""CREATE TABLE IF NOT EXISTS communications ( communication_ID INTEGER PRIMARY KEY, application_ID INTEGER, communication_date TEXT, interaction TEXT, status TEXT, notes TEXT, FOREIGN KEY(application_ID) REFERENCES applications(application_ID) )""") connection.commit() connection.close() def display_all(code): tables = {0: 'companies', 1: 'applications', 2: 'communications'} connection = sqlite3.connect('jobs.db') cursor = connection.cursor() results = cursor.execute(f'SELECT * FROM {tables[code]}') for entry in results: print(entry) cursor.close() connection.close()
def _old_start(self): print "started" cp = Parse()._bulk_get("CompanyProspect") p = Parse()._bulk_get("Prospect") uc = Parse()._bulk_get("UserContact") cl = Parse().get("ContactList", {"limit": 1000}).json()["results"] print cl cl = pd.DataFrame(cl) print cl.head() cl["user_id"] = [i["objectId"] for i in cl.user] for count, i in enumerate(cp): if "company" in i.keys(): if "domain" in i["company"].keys(): cp[count]["domain"] = i["company"]["domain"] for count, i in enumerate(p): if "company" in i.keys(): if "domain" in i["company"].keys(): p[count]["domain"] = i["company"]["domain"] for count, i in enumerate(uc): if "company" in i.keys(): if "name" in i["company"].keys(): uc[count]["company_name"] = i["company"]["name"] else: uc[count]["company_name"] = "" else: uc[count]["company_name"] = "" # Adding Lists To Contacts / Prospects for count, i in enumerate(cp): if "user" not in i.keys(): continue user_id = i["user"]["objectId"] _cl = cl[(cl.user_id == user_id) & (cl.db_type == "all_company_prospect")] al = cl[(cl.user_id == user_id) & (cl.db_type == "all_feed_prospect")] _cl, al = _cl.to_dict('r'), al.to_dict('r') all_feed_id = al[0]["objectId"] if al else "" list_id = _cl[0]["objectId"] if _cl else "" if "lists" in i.keys(): cp[count]["lists"] = cp[count]["lists"] + [{ "objectId": list_id }, { "objectId": all_feed_id }] else: cp[count]["lists"] = [{ "objectId": list_id }, { "objectId": all_feed_id }] for count, i in enumerate(p): if "user" not in i.keys(): continue user_id = i["user"]["objectId"] _cl = cl[(cl.user_id == user_id) & (cl.db_type == "all_prospect")] al = cl[(cl.user_id == user_id) & (cl.db_type == "all_feed_prospect")] _cl, al = _cl.to_dict('r'), al.to_dict('r') all_feed_id = al[0]["objectId"] if al else "" list_id = _cl[0]["objectId"] if _cl else "" if "lists" in i.keys(): p[count]["lists"] = p[count]["lists"] + [{ "objectId": list_id }, { "objectId": all_feed_id }] else: p[count]["lists"] = [{ "objectId": list_id }, { "objectId": all_feed_id }] for count, i in enumerate(uc): if "user" not in i.keys(): continue db_type, user_id = i["db_type"], i["user"]["objectId"] _cl = cl[(cl.user_id == user_id) & (cl.db_type == db_type)] al = cl[(cl.user_id == user_id) & (cl.db_type == "all_feed_prospect")] _cl, al = _cl.to_dict('r'), al.to_dict('r') all_feed_id = al[0]["objectId"] if al else "" list_id = _cl[0]["objectId"] if _cl else "" if "lists" in i.keys(): uc[count]["lists"] = uc[count]["lists"] + [{ "objectId": list_id }, { "objectId": all_feed_id }] else: uc[count]["lists"] = [{ "objectId": list_id }, { "objectId": all_feed_id }] _p, _cp, _uc = pd.DataFrame(p), pd.DataFrame(cp), pd.DataFrame(uc) #print _p[_p.domain.isnull()].shape, _p.shape #print _cp[_cp.domain.isnull()].shape, _cp.shape # for user pointer add user_contact_list pointer print _p.shape, _cp.shape, _uc.shape i, j, tmp = 0, 0, pd.concat([_cp, _p, _uc]).reset_index() print tmp.domain.drop_duplicates().shape #return for a, b in tmp[["domain", "lists", "company_name", "user"]].groupby("domain"): if a == ".": continue i = i + 1 if b.lists.dropna().sum(): j = j + 1 lists = [ ii["objectId"] for ii in b.lists.dropna().sum() if "objectId" in ii.keys() ] lists = pd.Series(lists).unique().tolist() company_name, domain = b.company_name.tolist()[0], a #print lists, a, b.company_name.tolist()[0] ''' r = requests.post("https://clear-spark.herokuapp.com/v1/clearspark/daily_news", #r = requests.post("http://localhost:4000/v1/clearspark/daily_news", headers={'Content-type': 'application/json'}, data=json.dumps({"company_name":company_name,"domain":domain, "lists":lists,"source":"blog"})) print r.text ''' api_key = "9a31a1defcdc87a618e12970435fd44741d7b88794f7396cbec486b8" #if i > 2: break x = 600000 #job = q.enqueue(Companies()._news, domain, api_key, company_name, timeout=x) company_name = self.remove_non_ascii(company_name) domain = self.remove_non_ascii(domain) print j, company_name, domain #, lists, tmp.shape job = q.enqueue(Companies()._daily_secondary_research, company_name, domain, api_key, lists, timeout=60000) ''' job = q.enqueue(Companies()._recent_webpages_published, domain, api_key, company_name, timeout=60000) #time.sleep(0.5) #print lists job.meta["lists"] = lists job.meta["_lists"] = lists job.save() #RQueue()._meta(job, "lists", lists) ''' '''
import ui from companies import Companies from models import Models companies = Companies() models = Models() companies = ui.load("companies.pkl") models = ui.load("models.pkl") if not companies: companies = Companies() if not models: models = Models() key = 0 while not key == 9: ui.clear() ui.print_menu() key = ui.getKey("Please, input the number you would like to choose: ") ui.clear() try: if key == 0: ui.showCompanies(companies) elif key == 1: ui.showModels(models) elif key == 2: if ui.addCompany(companies): print "Company successfully added" elif key == 3: if ui.addModel(companies, models): print "Model successfully added" elif key == 4: if ui.deleteCompany(companies, models): print "Company successfully deleted"
def research_report(): report = request.args["report"] q.enqueue(Companies()._research_report, report) return {"test": "lol"}
def score_report(): report = request.args["report"] q.enqueue(Companies()._score_report, report) return {"test": "lol"}
def _company_webhook(): domain, company_name = request.data["domain"], request.data["company_name"] q.enqueue(Companies()._secondary_research, company_name, domain) return {'started':True}
def _daily_news(): api_key = "9a31a1defcdc87a618e12970435fd44741d7b88794f7396cbec486b8" q.enqueue(Companies()._daily_secondary_research, request.args["company_name"], request.args["domain"], api_key) return {'started':True}
def _company_prospect_webhook(): q.enqueue(Companies()._research, request.data["company_name"]) return {'started':True}