def ingest(start_date, end_date, product=_PRODUCT, outdir='', overwrite=False): """ Ingest all granules between two dates """ d1 = parse(start_date) d2 = parse(end_date) dates = [d1 + datetime.timedelta(n) for n in range((d2 - d1).days + 1)] for day in [d.date() for d in dates]: start = datetime.datetime.now() index_fname = str(day) + '_scenes.txt' if exists( os.path.join('s3://%s' % bucket, os.path.join(product, index_fname))) and not overwrite: logger.info("Scenes for %s already processed" % day) continue logger.info('Processing date %s' % day) granules = query(day, day, product=product) metadata = [] try: for gran in granules: metadata.append(ingest_granule(gran, outdir=outdir)) except RuntimeError as e: logger.error('Error processing %s: %s' % (day, str(e))) # skip this entire date for now continue # upload index file if len(granules) > 0: fname = make_scene_list(metadata, fout=index_fname) push_to_s3(fname, bucket, prefix=product) logger.info('Completed processing %s: %s' % (day, datetime.datetime.now() - start))
def setUpClass(self): """ Setup class once by issuing a query """ self.q = query(parse(self.date1).date(), parse(self.date1).date()) self.fnames = download_granule(self.q[0])
def test_query_2days(self): """ Query CMR for two days """ q = query(self.date1, self.date2) self.assertEqual(len(q), 598)
def _test_query_30days(self): """ Query CMR for 30 days """ q = query(self.date1, self.date3) self.assertEqual(len(q), 9272)
def setUpClass(self): """ Setup class once by issuing a query """ self.q = query(self.date1, self.date1)