def scrape(): scraper = LinkedinScraper() scraper.login(os.environ['LINKEDIN_USER'], os.environ['LINKEDIN_PW']) gatherer = Gatherer(scraper) watchdog = Watchdog(gatherer) job_list = [ 'data intern', 'machine learning intern', 'software intern', 'python intern' ] location_list = [ 'Vancouver', 'New York', 'San Francisco Bay Area', 'Toronto', ] for job in job_list: for location in location_list: print('gathering for', job, location) watchdog.monitor_gather(job, location) for job in job_list: print('gathering for', job, 'Worldwide') watchdog.monitor_gather(job, 'Worldwide') return 'Done'
def __init__(self, preferences: Preferences, cache: Cache, **kwargs): Gtk.Window.__init__(self, **kwargs) self.preferences = preferences self.cache = cache self.news_store = NewsStore() # self.gatherer = StubGatherer(self.news_store) self.gatherer = Gatherer(self.news_store) self.connect_signals() self.prepare_appearance() self.css_provider = self.create_css() self.current_view = None self.switch_view(self.news_store, self.preferences)
def sysinfo(interval=5, host='localhost', port=35367): """Takes display name, port and update interval as arguments.""" g = Gatherer() d = DisplayClient(host, port) def backlight_switch(signum, frame): d.backlight() date = strftime("Date: %a %b %d, %Y ") d.write(1, 1, string="%s %s %s %s" % (g.hostname, g.system, g.system_release, g.machine)) d.write(1, 26, string=date) d.write(1, 15, string="Uptime: ") d.write(1, 37, string="Time: ") d.write(1, 48, string="Load: ") try: while True: signal(SIGUSR1, backlight_switch) g.refresh() if date != strftime("Date: %a %b %d, %Y "): date = strftime("Date: %a %b %d, %Y ") d.write(1, 26, string=date) d.write( 51, 15, string="%s " % ' '.join( g.uptime.split('up')[1].split('user')[0].split()[:-1])[:-1] ) d.write(35, 37, string=strftime("%H:%M")) d.write(35, 48, string=g.uptime.split('average:')[-1].strip()) sleep(interval) except KeyboardInterrupt: d.close()
from pycallgraph import PyCallGraph from pycallgraph.output import GraphvizOutput from gatherer import Gatherer tickrate = 60 with PyCallGraph(output=GraphvizOutput()): aaron = Gatherer(startingpos=[380, 280]) aaron.update()
def monitor_gather(self, keywords, location, start=0): while True: #try: self.gatherer.gather_jobs(keywords, location, start, progress_file='progress') break #except Exception as e: # print('ERROR!') # print(e) # print('********************************') # start = int(open('progress').read()) if __name__ == "__main__": import os from gatherer import Gatherer from linkedinScraper import LinkedinScraper scraper = LinkedinScraper() #scraper.login(os.environ['LINKEDIN_USER'], os.environ['LINKEDIN_PW']) gatherer = Gatherer(scraper) watchdog = Watchdog(gatherer) keywords = 'software intern' location = 'Worldwide' watchdog.monitor_gather(keywords, location)