def task_satellites(): 'Periodically retrieve the most recent satellite elements.' return { 'actions': ['python -m astro.satellites --retrieve'], 'uptodate': [timeout(datetime.timedelta(days=3))], 'clean': True, 'file_dep': ['visual.txt'], }
def task_download_data(): """Downloads all raw data files from the Met Office website""" for data_type in data_sets: data_url, data_target = get_data_file_parameters(data_type) yield { 'actions': ['wget -O %(targets)s {0}'.format(data_url)], 'targets': [data_target], 'name': data_type, 'uptodate': [timeout(datetime.timedelta(weeks=4))], }
def task_download_data(): """Downloads all raw data files from the Met Office website""" for data_type in data_sets: data_url, data_target = get_data_file_parameters(data_type) yield { 'actions': ['wget -O %(targets)s {0}'.format(data_url)], 'targets': [ data_target ], 'name' : data_type, 'uptodate': [timeout(datetime.timedelta(weeks=4))], }
def test_int(self, monkeypatch): monkeypatch.setattr(tools.time_module, 'time', lambda: 100) uptodate = tools.timeout(5) t = task.Task("TaskX", None, uptodate=[uptodate]) assert False == uptodate(t, t.values) t.save_extra_values() assert 100 == t.values['success-time'] monkeypatch.setattr(tools.time_module, 'time', lambda: 103) assert True == uptodate(t, t.values) monkeypatch.setattr(tools.time_module, 'time', lambda: 106) assert False == uptodate(t, t.values)
def test_timedelta_big(self, monkeypatch): monkeypatch.setattr(tools.time_module, 'time', lambda: 10) limit = datetime.timedelta(days=2, minutes=5) uptodate = tools.timeout(limit) t = task.Task("TaskX", None, uptodate=[uptodate]) assert False == uptodate(t, t.values) t.save_extra_values() assert 10 == t.values['success-time'] monkeypatch.setattr(tools.time_module, 'time', lambda: 3600 * 30) assert True == uptodate(t, t.values) monkeypatch.setattr(tools.time_module, 'time', lambda: 3600 * 49) assert False == uptodate(t, t.values)
def test_timedelta(self, monkeypatch): monkeypatch.setattr(tools.time_module, 'time', lambda: 10) limit = datetime.timedelta(minutes=2) uptodate = tools.timeout(limit) t = task.Task("TaskX", None, uptodate=[uptodate]) assert False == uptodate(t, t.values) t.execute() assert 10 == t.values['success-time'] monkeypatch.setattr(tools.time_module, 'time', lambda: 100) assert True == uptodate(t, t.values) monkeypatch.setattr(tools.time_module, 'time', lambda: 200) assert False == uptodate(t, t.values)
def gen_tasks(self): """Tweak theming by generating a LESS definition file.""" template = self.site.config.get("LESS_THEME_TEMPLATE", "") if not template: print ("No less theme template found... exiting.") yield {"basename": self.name, "actions": []} return timeout_time = self.site.config.get("THEME_TIMEOUT", datetime.timedelta(days=1)) kw = {"cache_folder": self.site.config["CACHE_FOLDER"], "themes": self.site.THEMES, "template": template} # Build targets and write CSS files base_path = utils.get_theme_path(self.site.THEMES[0]) dst_dir = os.path.join(base_path, self.sources_folder) target = os.path.join(dst_dir, "define.less") json_target = os.path.join(self.site.config["OUTPUT_FOLDER"], "assets", "js", "background_image_data.json") def write_theme_define(): """ Write the theme file and json data file. """ try: image_data = get_random_image() bg_url = image_data["url"].strip() thumbnail = image_data["thumbnail_url"].strip() base_color = color_from_url(thumbnail) except Exception as e: print "Failed to change image." print e return {"basename": self.name, "actions": []} with codecs.open(target, "w", "utf-8") as f: f.write(template % (base_color, bg_url)) with codecs.open(json_target, "w", "utf-8") as f: json.dump(image_data, f, indent=2) yield { "basename": self.name, "name": target, "targets": [target, json_target], "actions": [(write_theme_define, [])], "uptodate": [timeout(timeout_time), os.path.exists(target), utils.config_changed(kw)], "clean": True, "verbosity": 2, }
def task_update_feeds(self): """Download feed contents, add entries to the database.""" def update_feed(feed): modified = feed.last_modified.timetuple() etag = feed.etag try: parsed = feedparser.parse( feed.url, etag=etag, modified=modified ) feed.last_status = str(parsed.status) except: # Probably a timeout # TODO: log failure return if parsed.feed.get('title'): LOGGER.notice(parsed.feed.title) else: LOGGER.notice(feed.url) feed.etag = parsed.get('etag', 'foo') modified = tuple(parsed.get('date_parsed', (1970, 1, 1)))[:6] LOGGER.notice("==========>", modified) modified = datetime.datetime(*modified) feed.last_modified = modified feed.save() # No point in adding items from missinfg feeds if parsed.status > 400: # TODO log failure return for entry_data in parsed.entries: LOGGER.notice("=========================================") date = entry_data.get('published_parsed', None) if date is None: date = entry_data.get('updated_parsed', None) if date is None: LOGGER.error("Can't parse date from:\n", entry_data) return False LOGGER.notice("DATE:===>", date) date = datetime.datetime(*(date[:6])) title = "%s: %s" % (feed.name, entry_data.get('title', 'Sin título')) content = entry_data.get('content', None) if content: content = content[0].value if not content: content = entry_data.get('description', None) if not content: content = entry_data.get('summary', 'Sin contenido') guid = str(entry_data.get('guid', entry_data.link)) link = entry_data.link LOGGER.notice(repr([date, title])) e = list(Entry.select().where(Entry.guid == guid)) LOGGER.notice( repr(dict( date=date, title=title, content=content, guid=guid, feed=feed, link=link, )) ) if not e: entry = Entry.create( date=date, title=title, content=content, guid=guid, feed=feed, link=link, ) else: entry = e[0] entry.date = date entry.title = title entry.content = content entry.link = link entry.save() flag = False for feed in Feed.select(): flag = True task = { 'basename': self.name + "_fetch_feed", 'name': str(feed.url), 'actions': [(update_feed, (feed, ))], 'uptodate': [timeout(datetime.timedelta(minutes= self.site.config.get('PLANETOID_REFRESH', 60)))], } yield task if not flag: yield { 'basename': self.name + "_fetch_feed", 'name': '', 'actions': [], }
def task_update_feeds(self): """Download feed contents, add entries to the database.""" def update_feed(feed): modified = feed.last_modified.timetuple() etag = feed.etag try: parsed = feedparser.parse(feed.url, etag=etag, modified=modified) feed.last_status = str(parsed.status) except Exception: # Probably a timeout # TODO: log failure return if parsed.feed.get('title'): LOGGER.info(parsed.feed.title) else: LOGGER.info(feed.url) feed.etag = parsed.get('etag', 'foo') modified = tuple(parsed.get('date_parsed', (1970, 1, 1)))[:6] LOGGER.info("==========> %s", modified) modified = datetime.datetime(*modified) feed.last_modified = modified feed.save() # No point in adding items from missinfg feeds if parsed.status > 400: # TODO log failure return for entry_data in parsed.entries: LOGGER.info("=========================================") date = entry_data.get('published_parsed', None) if date is None: date = entry_data.get('updated_parsed', None) if date is None: LOGGER.error("Can't parse date from: %s", entry_data) return False LOGGER.info("DATE:===> %s", date) date = datetime.datetime(*(date[:6])) title = "%s: %s" % (feed.name, entry_data.get('title', 'Sin título')) content = entry_data.get('content', None) if content: content = content[0].value if not content: content = entry_data.get('description', None) if not content: content = entry_data.get('summary', 'Sin contenido') guid = str(entry_data.get('guid', entry_data.link)) link = entry_data.link LOGGER.info(repr([date, title])) e = list(Entry.select().where(Entry.guid == guid)) LOGGER.info( repr( dict( date=date, title=title, content=content, guid=guid, feed=feed, link=link, ))) if not e: entry = Entry.create( date=date, title=title, content=content, guid=guid, feed=feed, link=link, ) else: entry = e[0] entry.date = date entry.title = title entry.content = content entry.link = link entry.save() flag = False for feed in Feed.select(): flag = True task = { 'basename': self.name + "_fetch_feed", 'name': str(feed.url), 'actions': [(update_feed, (feed, ))], 'uptodate': [ timeout( datetime.timedelta(minutes=self.site.config.get( 'PLANETOID_REFRESH', 60))) ], } yield task if not flag: yield { 'basename': self.name + "_fetch_feed", 'name': '', 'actions': [], }
def task_update_feeds(self): """Download feed contents, add entries to the database.""" def update_feed(feed): modified = feed.last_modified.timetuple() etag = feed.etag try: parsed = feedparser.parse(feed.url, etag=etag, modified=modified) feed.last_status = str(parsed.status) except: # Probably a timeout # TODO: log failure return if parsed.feed.get("title"): print(parsed.feed.title) else: print(feed.url) feed.etag = parsed.get("etag", "foo") modified = tuple(parsed.get("date_parsed", (1970, 1, 1)))[:6] print("==========>", modified) modified = datetime.datetime(*modified) feed.last_modified = modified feed.save() # No point in adding items from missinfg feeds if parsed.status > 400: # TODO log failure return for entry_data in parsed.entries: print("=========================================") date = entry_data.get("published_parsed", None) if date is None: date = entry_data.get("updated_parsed", None) if date is None: print("Can't parse date from:") print(entry_data) return False print("DATE:===>", date) date = datetime.datetime(*(date[:6])) title = "%s: %s" % (feed.name, entry_data.get("title", "Sin título")) content = entry_data.get("content", None) if content: content = content[0].value if not content: content = entry_data.get("description", None) if not content: content = entry_data.get("summary", "Sin contenido") guid = str(entry_data.get("guid", entry_data.link)) link = entry_data.link print(repr([date, title])) e = list(Entry.select().where(Entry.guid == guid)) print(repr(dict(date=date, title=title, content=content, guid=guid, feed=feed, link=link))) if not e: entry = Entry.create(date=date, title=title, content=content, guid=guid, feed=feed, link=link) else: entry = e[0] entry.date = date entry.title = title entry.content = content entry.link = link entry.save() for feed in Feed.select(): task = { "basename": self.name, "name": str(feed.url), "actions": [(update_feed, (feed,))], "uptodate": [timeout(datetime.timedelta(minutes=self.site.config.get("PLANETOID_REFRESH", 60)))], } yield task
def task_expire(): return { 'actions': ['echo test expire; date'], 'uptodate': [timeout(datetime.timedelta(minutes=5))], 'verbosity': 2, }