def test_get_next_passes_viirs(self, exists): exists.return_code = True # mymock: with patch('pyorbital.orbital.Orbital') as mymock: instance = mymock.return_value instance.get_next_passes = self.orb.get_next_passes allpasses = get_next_passes(self.satellites, self.utctime, 4, (16, 58, 0), tle_file='nonexisting') self.assertEqual(len(allpasses), 2) rt1 = datetime(2018, 11, 28, 10, 53, 42, 79483) ft1 = datetime(2018, 11, 28, 11, 9, 6, 916787) rt2 = datetime(2018, 11, 28, 12, 34, 44, 667963) ft2 = datetime(2018, 11, 28, 12, 49, 25, 134067) rise_times = [p.risetime for p in allpasses] fall_times = [p.falltime for p in allpasses] assert rt1 in rise_times assert rt2 in rise_times assert ft1 in fall_times assert ft2 in fall_times assert all([p.instrument == 'viirs' for p in allpasses])
def main(): from trollsched.satpass import get_next_passes from datetime import datetime passes = get_next_passes(["noaa 19", "suomi npp"], datetime.now(), 24, (16, 58, 0)) for p in passes: save_fig(p, directory="/tmp/plots/")
def test_get_next_passes_with_aquadumps(self, dumps_from_ftp, exists): dumps_from_ftp.return_value = self.dumpdata exists.return_code = True # mymock: with patch('pyorbital.orbital.Orbital') as mymock: instance = mymock.return_value instance.get_next_passes = self.aqua_orb.get_next_passes allpasses = get_next_passes(self.aquas, self.utctime, 6, (16, 58, 0), tle_file='nonexisting', aqua_terra_dumps=True) self.assertEqual(len(allpasses), 3) rt1 = datetime(2018, 11, 28, 11, 12, 8, 728455) ft1 = datetime(2018, 11, 28, 11, 26, 8, 250021) rt2 = datetime(2018, 11, 28, 12, 50, 46, 574975) ft2 = datetime(2018, 11, 28, 13, 3, 53, 262440) rt3 = datetime(2018, 11, 28, 14, 33, 33, 973194) ft3 = datetime(2018, 11, 28, 14, 40, 10, 761405) for mypass in allpasses: dtmin = timedelta(seconds=10000000) for risetime in [rt1, rt2, rt3]: dt_ = abs(mypass.risetime - risetime) if dt_ < dtmin: dtmin = dt_ self.assertAlmostEqual(dtmin.seconds, 0) dtmin = timedelta(seconds=10000000) for falltime in [ft1, ft2, ft3]: dt_ = abs(mypass.falltime - falltime) if dt_ < dtmin: dtmin = dt_ self.assertAlmostEqual(dtmin.seconds, 0) self.assertEqual(mypass.instrument, 'modis')
def test_get_next_passes_viirs(self, exists): exists.return_code = True # mymock: with patch('pyorbital.orbital.Orbital') as mymock: instance = mymock.return_value instance.get_next_passes = self.orb.get_next_passes allpasses = get_next_passes(self.satellites, self.utctime, 4, (16, 58, 0), tle_file='nonexisting') self.assertEqual(len(allpasses), 2) n20pass1 = allpasses.pop() rt1 = datetime(2018, 11, 28, 10, 53, 42, 79483) ft1 = datetime(2018, 11, 28, 11, 9, 6, 916787) rt2 = datetime(2018, 11, 28, 12, 34, 44, 667963) ft2 = datetime(2018, 11, 28, 12, 49, 25, 134067) dt_ = n20pass1.risetime - rt1 self.assertAlmostEqual(dt_.seconds, 0) dt_ = n20pass1.falltime - ft1 self.assertAlmostEqual(dt_.seconds, 0) n20pass2 = allpasses.pop() dt_ = n20pass2.risetime - rt2 self.assertAlmostEqual(dt_.seconds, 0) dt_ = n20pass2.falltime - ft2 self.assertAlmostEqual(dt_.seconds, 0) self.assertEqual(n20pass2.instrument, 'viirs')
+ " to new risetime " + str(dump_pass.falltime)) overpass.risetime = dump_pass.falltime overpass.boundary = SwathBoundary(overpass) elif (dump_pass.uptime >= overpass.uptime and dump_pass.risetime < overpass.falltime): logger.debug("adjusting " + str(overpass) + " to new falltime " + str(dump_pass.risetime)) overpass.falltime = dump_pass.risetime overpass.boundary = SwathBoundary(overpass) if overpass.falltime <= overpass.risetime: add = False logger.debug("skipping " + str(overpass)) if add and overpass.seconds() > MIN_PASS * 60: passes["aqua"].append(overpass) else: passes[sat] = [Pass(sat, rtime, ftime, satorb, uptime, instrument) for rtime, ftime, uptime in passlist if ftime - rtime > timedelta(minutes=MIN_PASS)] return set(reduce(operator.concat, passes.values())) if __name__ == '__main__': from trollsched.satpass import get_next_passes passes = get_next_passes( ["noaa 19", "suomi npp"], datetime.now(), 24, (16, 58, 0)) for p in passes: p.save_fig(directory="/tmp/plots/")
def single_station(self, sched, start_time, tle_file): """Calculate passes, graph, and schedule for one station.""" logger.debug("station: %s coords: %s area: %s scores: %s", self.id, self.coords, self.area.area_id, self.satellites) opts = sched.opts pattern = sched.patterns pattern_args = { "station": self.id, "output_dir": opts.output_dir, "date": start_time.strftime("%Y%m%d"), "time": start_time.strftime("%H%M%S") } if opts.xml: pattern_args['mode'] = "request" elif opts.report: pattern_args['mode'] = "report" logger.info("Computing next satellite passes") allpasses = get_next_passes( self.satellites, start_time, sched.forward, self.coords, tle_file, aqua_terra_dumps=(sched.dump_url or True if opts.no_aqua_terra_dump else None), min_pass=self.min_pass, local_horizon=self.local_horizon) logger.info("Computation of next overpasses done") logger.debug(str(sorted(allpasses, key=lambda x: x.risetime))) area_boundary = AreaDefBoundary(self.area, frequency=500) self.area.poly = area_boundary.contour_poly if opts.plot: logger.info("Saving plots to %s", build_filename("dir_plots", pattern, pattern_args)) from threading import Thread image_saver = Thread(target=save_passes, args=(allpasses, self.area.poly, build_filename("dir_plots", pattern, pattern_args), sched.plot_parameters, sched.plot_title)) image_saver.start() if opts.avoid is not None: avoid_list = get_passes_from_xml_file(opts.avoid) else: avoid_list = None logger.info("computing best schedule for area %s" % self.area.area_id) schedule, (graph, labels) = get_best_sched(allpasses, self.area, timedelta(seconds=opts.delay), avoid_list) logger.debug(pformat(schedule)) for opass in schedule: opass.rec = True logger.info("generating file") if opts.scisys: generate_sch_file( build_filename("file_sci", pattern, pattern_args), allpasses, self.coords) if opts.meos: generate_meos_file( build_filename("file_meos", pattern, pattern_args), allpasses, self.coords, start_time + timedelta(hours=sched.start), True) # Ie report mode if opts.plot: logger.info("Waiting for images to be saved...") image_saver.join() logger.info("Done!") if opts.metno_xml: generate_metno_xml_file( build_filename("file_metno_xml", pattern, pattern_args), allpasses, self.coords, start_time + timedelta(hours=sched.start), start_time + timedelta(hours=sched.forward), self.id, sched.center_id, True) if opts.xml or opts.report: url = urlparse(opts.output_url or opts.output_dir) if opts.xml or opts.report: """Allways create xml-file in request-mode""" pattern_args['mode'] = "request" xmlfile = generate_xml_file( allpasses, start_time + timedelta(hours=sched.start), start_time + timedelta(hours=sched.forward), build_filename("file_xml", pattern, pattern_args), self.id, sched.center_id, False) logger.info("Generated " + str(xmlfile)) send_file(url, xmlfile) if opts.report: """'If report-mode was set""" pattern_args['mode'] = "report" xmlfile = generate_xml_file( allpasses, start_time + timedelta(hours=sched.start), start_time + timedelta(hours=sched.forward), build_filename("file_xml", pattern, pattern_args), self.id, sched.center_id, True) logger.info("Generated " + str(xmlfile)) if opts.graph or opts.comb: graph.save(build_filename("file_graph", pattern, pattern_args)) graph.export( labels=[str(label) for label in labels], filename=build_filename("file_graph", pattern, pattern_args) + ".gv") if opts.comb: import pickle ph = open( os.path.join( build_filename("dir_output", pattern, pattern_args), "allpasses.%s.pkl" % self.id), "wb") pickle.dump(allpasses, ph) ph.close() return graph, allpasses
def run(): """The schedule command """ import argparse global logger parser = argparse.ArgumentParser() parser.add_argument("--lon", help="Longitude, degrees east", type=float) parser.add_argument("--lat", help="Latitude, degrees north", type=float) parser.add_argument("--alt", help="Altitude, km", type=float) parser.add_argument("-l", "--log", help="File to log to (defaults to stdout)", default=None) parser.add_argument("-m", "--mail", nargs="*", help="mail address(es) to send error messages to.", default=None) parser.add_argument("-v", "--verbose", help="print debug messages too", action="store_true") parser.add_argument("-g", "--graph", help="save graph info to this directory", default=None) parser.add_argument("-t", "--tle", help="tle file to use", default=None) parser.add_argument("-f", "--forward", type=float, help="time ahead to compute the schedule") parser.add_argument("-s", "--start-time", type=parse_datetime, help="start time of the schedule to compute") parser.add_argument("-d", "--delay", default=60, type=float, help="delay (in seconds) needed between two " + "consecutive passes (60 seconds by default)") parser.add_argument("-c", "--config", help="configuration file to use", default=None) parser.add_argument("-o", "--output-dir", help="where to put generated plots", default=None) parser.add_argument("-a", "--avoid", help="xml request file with passes to avoid") group = parser.add_argument_group(title="output") group.add_argument("-x", "--xml", help="generate an xml request file and" " put it in this directory. Could be a url", default=None) group.add_argument("-r", "--report", help="generate an xml report file and" " put it in this directory. Could be a url", default=None) group.add_argument("--scisys", default=None, help="path to the schedule file") opts = parser.parse_args() if opts.config: coords, scores, station, area, forward, start = read_config( opts.config) if (not opts.config) and (not (opts.lon or opts.lat or opts.alt)): parser.error("Coordinates must be provided in the absence of " "configuration file.") if not (opts.xml or opts.scisys or opts.report): parser.error("No output specified, use '--scisys' or '-x/--xml'") if opts.log: previous = os.path.exists(opts.log) handler = logging.handlers.RotatingFileHandler(opts.log, backupCount=7) if previous: handler.doRollover() else: handler = logging.StreamHandler() handler.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", '%Y-%m-%d %H:%M:%S')) if opts.verbose: loglevel = logging.DEBUG else: loglevel = logging.INFO handler.setLevel(loglevel) logging.getLogger('').setLevel(loglevel) logging.getLogger('').addHandler(handler) if opts.mail: mhandler = logging.handlers.SMTPHandler("localhost", "*****@*****.**", opts.mail, "Scheduler") mhandler.setLevel(logging.WARNING) logging.getLogger('').addHandler(mhandler) logger = logging.getLogger("trollsched") if opts.lon and opts.lat and opts.alt: coords = (opts.lon, opts.lat, opts.alt) # test line # python schedule.py -v 16.148649 58.581844 0.052765 -f 216 -s # 20140118140000 -t tle_20140120.txt -x . --scisys myched.txt satellites = scores.keys() logger.info("Computing next satellite passes") tle_file = opts.tle if opts.forward: forward = opts.forward if opts.start_time: start_time = opts.start_time else: start_time = datetime.utcnow() allpasses = get_next_passes(satellites, start_time, forward, coords, tle_file) logger.info("Computation of next overpasses done") logger.debug(str(sorted(allpasses, key=lambda x: x.risetime))) area_boundary = AreaDefBoundary(area, frequency=500) area.poly = area_boundary.contour_poly if opts.output_dir is not None: logger.info("Saving plots to %s", opts.output_dir) from threading import Thread save_passes(allpasses, area.poly, opts.output_dir) image_saver = Thread(target=save_passes, args=(allpasses, area.poly, opts.output_dir)) image_saver.start() if opts.avoid is not None: avoid_list = get_passes_from_xml_file(opts.avoid) else: avoid_list = None logger.info("computing best schedule for area euron1") schedule, (graph, labels) = get_best_sched(allpasses, area, scores, timedelta(seconds=opts.delay), avoid_list) logger.debug(pformat(schedule)) for opass in schedule: opass.rec = True logger.info("generating file") if opts.scisys: generate_sch_file(opts.scisys, allpasses, coords) if opts.xml or opts.report: url = urlparse.urlparse(opts.xml or opts.report) if url.scheme not in ["file", ""]: directory = "/tmp" else: directory = url.path if opts.report: logger.info("Waiting for images to be saved...") image_saver.join() logger.info("Done!") xmlfile = generate_xml_file(allpasses, start_time + timedelta(hours=start), start_time + timedelta(hours=forward), directory, station, opts.report) logger.info("Generated " + str(xmlfile)) pathname, filename = os.path.split(xmlfile) del pathname if url.scheme in ["file", ""]: pass elif url.scheme == "ftp": import ftplib session = ftplib.FTP(url.hostname, url.username, url.password) with open(xmlfile, "rb") as xfile: session.storbinary('STOR ' + str(filename), xfile) session.quit() else: logger.error("Cannot save to " + str(url.scheme) + ", but file is there" + str(xmlfile)) if opts.graph is not None: now = datetime.now() graph.save("graph" + now.isoformat()) graph.export(labels=[str(label) for label in labels], filename=os.path.join(opts.graph, "sched" + now.isoformat() + ".gv"))
def single_station(self, sched, start_time, tle_file): """Calculate passes, graph, and schedule for one station.""" logger.debug("station: %s coords: %s area: %s scores: %s", self.id, self.coords, self.area.area_id, self.satellites) opts = sched.opts pattern = sched.patterns pattern_args = { "station": self.id, "output_dir": opts.output_dir, "date": start_time.strftime("%Y%m%d"), "time": start_time.strftime("%H%M%S") } if opts.xml: pattern_args['mode'] = "request" elif opts.report: pattern_args['mode'] = "report" logger.info("Computing next satellite passes") allpasses = get_next_passes(self.satellites, start_time, sched.forward, self.coords, tle_file, aqua_terra_dumps=(sched.dump_url or True if opts.no_aqua_terra_dump else None) ) logger.info("Computation of next overpasses done") logger.debug(str(sorted(allpasses, key=lambda x: x.risetime))) area_boundary = AreaDefBoundary(self.area, frequency=500) self.area.poly = area_boundary.contour_poly if opts.plot: logger.info("Saving plots to %s", build_filename( "dir_plots", pattern, pattern_args)) from threading import Thread image_saver = Thread( target=save_passes, args=(allpasses, self.area.poly, build_filename( "dir_plots", pattern, pattern_args) ) ) image_saver.start() if opts.avoid is not None: avoid_list = get_passes_from_xml_file(opts.avoid) else: avoid_list = None logger.info("computing best schedule for area %s" % self.area.area_id) schedule, (graph, labels) = get_best_sched(allpasses, self.area, timedelta(seconds=opts.delay), avoid_list) logger.debug(pformat(schedule)) for opass in schedule: opass.rec = True logger.info("generating file") if opts.scisys: generate_sch_file(build_filename("file_sci", pattern, pattern_args), allpasses, self.coords) if opts.xml or opts.report: url = urlparse(opts.output_url or opts.output_dir) if url.scheme not in ["file", ""]: directory = "/tmp" else: directory = url.path if opts.plot: logger.info("Waiting for images to be saved...") image_saver.join() logger.info("Done!") if opts.xml or opts.report: """Allways create xml-file in request-mode""" pattern_args['mode'] = "request" xmlfile = generate_xml_file(allpasses, start_time + timedelta(hours=sched.start), start_time + timedelta(hours=sched.forward), build_filename( "file_xml", pattern, pattern_args), self.id, sched.center_id, False ) logger.info("Generated " + str(xmlfile)) send_file(url, xmlfile) if opts.report: """'If report-mode was set""" pattern_args['mode'] = "report" xmlfile = generate_xml_file(allpasses, start_time + timedelta(hours=sched.start), start_time + timedelta(hours=sched.forward), build_filename( "file_xml", pattern, pattern_args), self.id, sched.center_id, True ) logger.info("Generated " + str(xmlfile)) if opts.graph or opts.comb: graph.save(build_filename("file_graph", pattern, pattern_args)) graph.export( labels=[str(label) for label in labels], filename=build_filename("file_graph", pattern, pattern_args) + ".gv" ) if opts.comb: import pickle ph = open(os.path.join(build_filename("dir_output", pattern, pattern_args), "allpasses.%s.pkl" % self.id), "wb") pickle.dump(allpasses, ph) ph.close() return graph, allpasses