def test_dataproduct_save_util(self): # file_mock = mock.MagicMock(spec=File) # out_path = settings.DATA_ROOT # all_frames = {} # dl_frames = download_files(all_frames, out_path) xml = os.path.abspath(os.path.join('photometrics', 'tests', 'example_scamp.xml')) file_name = 'test.xml' save_to_default(xml, os.path.dirname(xml)) # Test with a block # with mock.patch('builtins.open', mock.mock_open()) as m: save_dataproduct(obj=self.test_block, filepath=os.path.basename(xml), filetype=DataProduct.PDS_XML, filename=file_name) new_blocks = DataProduct.content.block().filter(object_id=self.test_block.id) self.assertTrue(new_blocks.count() == 1) self.assertEqual(new_blocks[0].content_object, self.test_block) self.assertTrue(new_blocks[0].product.storage.exists(new_blocks[0].product.name)) # Test with a body # with mock.patch('builtins.open', mock.mock_open()) as m: save_dataproduct(obj=self.body, filepath=os.path.basename(xml), filetype=DataProduct.PDS_XML, filename=file_name) new_body = DataProduct.content.body().filter(object_id=self.body.id) self.assertTrue(new_body.count() == 1) self.assertEqual(new_body[0].content_object, self.body) self.assertTrue(new_body[0].product.storage.exists(new_body[0].product.name))
def test_dataproduct_save_ALCDEF(self): file_name = 'test_ALCDEF.txt' file_content = "some text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=file_name, content=file_content) dp = DataProduct.objects.get(product__contains=file_name) self.assertEqual(dp.content_object, self.test_sblock) self.assertEqual(dp.product.name, os.path.join('products', file_name)) self.assertTrue(dp.product.storage.exists(dp.product.name)) test_file = dp.product.open(mode='r') lines = test_file.readlines() dp.product.close() self.assertEqual(lines[0], file_content) first_time_stamp = dp.created # Overwrite file new_content = "some other text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=file_name, content=new_content) dp1 = DataProduct.objects.get(product__contains=file_name) self.assertEqual(dp1.content_object, self.test_sblock) self.assertEqual(dp1.product.name, os.path.join('products', file_name)) self.assertTrue(dp1.product.storage.exists(dp1.product.name)) test_file = dp1.product.open(mode='r') lines = test_file.readlines() dp1.product.close() self.assertEqual(lines[0], new_content) self.assertNotEqual(dp1.created, first_time_stamp)
def test_dataproduct_save_gif(self): # Create gif fits = os.path.abspath(os.path.join('photometrics', 'tests', 'banzai_test_frame.fits')) frames = [fits, fits, fits, fits, fits] movie_file = make_gif(frames, sort=False, init_fr=100, out_path=settings.MEDIA_ROOT, center=3, progress=False) save_dataproduct(obj=self.test_block, filepath=movie_file, filetype=DataProduct.FRAME_GIF) dp = DataProduct.objects.get(product__contains=os.path.basename(movie_file)) self.assertTrue(dp.product.storage.exists(dp.product.name))
def test_reverse_lookup(self): file_name = 'test_ALCDEF.txt' file_content = "some text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=file_name, content=file_content) sb = SuperBlock.objects.filter(dataproduct__filetype=DataProduct.ALCDEF_TXT) raw_blk = Block.objects.filter(dataproduct__filetype=DataProduct.ALCDEF_TXT) sb_blk = Block.objects.filter(superblock__dataproduct__filetype=DataProduct.ALCDEF_TXT) self.assertEqual(len(sb), 1) self.assertEqual(len(raw_blk), 0) self.assertEqual(len(sb_blk), 1)
def test_dataproduct_save_util_from_dataroot(self): out_path = settings.DATA_ROOT xml_test = os.path.abspath(os.path.join('photometrics', 'tests', 'example_scamp.xml')) xml = os.path.join(out_path, os.path.basename(xml_test)) file_name = 'test.xml' shutil.copyfile(xml_test, xml) # Test with a block # with mock.patch('builtins.open', mock.mock_open()) as m: save_dataproduct(obj=self.test_block, filepath=xml, filetype=DataProduct.PDS_XML, filename=file_name) new_blocks = DataProduct.content.block().filter(object_id=self.test_block.id) self.assertTrue(new_blocks.count() == 1) self.assertEqual(new_blocks[0].content_object, self.test_block) self.assertTrue(new_blocks[0].product.storage.exists(new_blocks[0].product.name))
def test_can_view_comet_lightcurve( self): # test opening up a ALCDEF file associated with a body save_dataproduct(self.comet, self.lcname, DataProduct.ALCDEF_TXT) self.login() lc_url = reverse('lc_plot', args=[self.comet.id]) self.browser.get(self.live_server_url + lc_url) self.assertIn('Lightcurve for object: C/2017 K2', self.browser.title) canvas = self.browser.find_element_by_xpath( "/html/body[@class='page']/div[@id='page-wrapper']/div[@id='page']/div[@id='main']/div[@name='lc_plot']/div[@class='bk']/div[@class='bk'][2]/div[@class='bk'][1]/div[@class='bk']/div[@class='bk bk-canvas-events']" ) phase_tab = self.browser.find_element_by_xpath( "/html/body[@class='page']/div[@id='page-wrapper']/div[@id='page']/div[@id='main']/div[@name='lc_plot']/div[@class='bk']/div[@class='bk bk-tabs-header bk-above']/div[@class='bk bk-headers-wrapper']/div[@class='bk bk-headers']/div[@class='bk bk-tab']" ) phase_tab.click() period_box = self.browser.find_element_by_xpath( "/html/body[@class='page']/div[@id='page-wrapper']/div[@id='page']/div[@id='main']/div[@name='lc_plot']/div[@class='bk']/div[@class='bk'][2]/div[@class='bk'][2]/div[@class='bk'][2]/div[@class='bk']/div[@class='bk'][1]/div[@class='bk'][1]/div[@class='bk']/div[@class='bk bk-input-group']/div[@class='bk bk-spin-wrapper']/input[@class='bk bk-input']" ) return
def test_dataproduct_delete(self): # Build file/DP file_name = 'test_SB_png.txt' file_content = "some text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.PNG_ASTRO, filename=file_name, content=file_content) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.PNG_ASTRO) pathname = os.path.join(settings.MEDIA_ROOT, dp_qset[0].product.name) self.assertTrue(os.path.isfile(pathname)) # Delete DP dp_qset.delete() # file is gone self.assertFalse(os.path.isfile(pathname)) new_db_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.PNG_ASTRO) self.assertFalse(new_db_qset.exists()) # Make new DP file_content2 = "some new text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.PNG_ASTRO, filename=file_name, content=file_content2) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.PNG_ASTRO) # Overwrites old file self.assertIn(file_name, dp_qset[0].product.name)
def handle(self, *args, **options): if options['reqnum'] is not None: blocks = Block.objects.filter(request_number=options['reqnum']) else: start_year = datetime.strptime(options['start'], '%Y') blocks = Block.objects.filter(num_observed__gte=1).filter( block_start__gte=start_year) n = 0 a = 0 s = 0 tn_list = [] for bl in blocks: obj_list = [ sanitize_object_name(bl.current_name()), bl.current_name().replace(' ', '_'), bl.current_name().replace(" ", "") ] if bl.body: obj_list.append(sanitize_object_name(bl.body.old_name())) other_designations = Designations.objects.filter(body=bl.body) for desig in other_designations: obj_list.append(sanitize_object_name(desig.value)) if bl.calibsource: obj_list.append(sanitize_object_name(bl.calibsource.name)) obj_list = list(set(obj_list)) req = bl.request_number tn = bl.superblock.tracking_number d1 = bl.when_observed d2 = bl.when_observed - timedelta(days=1) date_obs_options = [d1.strftime('%Y%m%d'), d2.strftime('%Y%m%d')] # find gifs for date_obs in date_obs_options: for obj in obj_list: try: path = os.path.join(date_obs, obj + '_' + req) except TypeError: continue base_dir = date_obs if bl.obstype in [ Block.OPT_IMAGING, Block.OPT_IMAGING_CALIB ]: movie_file = "{}_{}_framemovie.gif".format(obj, req) elif bl.obstype in [ Block.OPT_SPECTRA, Block.OPT_SPECTRA_CALIB ]: movie_file = "{}_{}_guidemovie.gif".format(obj, req) base_dir = os.path.join(path, "Guide_frames") else: movie_file = None if movie_file: gif_path = search(base_dir, movie_file, latest=True) if gif_path: n += 1 self.stdout.write( f"Found GIF for {bl.current_name()} in Block {bl.id} (Reqnum:{req}). \n" f"===> Creating DataProduct for {gif_path}.") save_dataproduct(obj=bl, filepath=gif_path, filetype=DataProduct.FRAME_GIF, force=options['overwrite']) # find Spectra spec_list = search(path, matchpattern='.*_2df_ex.fits', latest=False) if spec_list: for spec in spec_list: s += 1 spec_path = os.path.join(path, spec) self.stdout.write( f"Found Spectrum for {bl.current_name()} in Block {bl.id} (Reqnum:{req}). \n" f"===> Creating DataProduct for {spec_path}.") save_dataproduct(obj=bl, filepath=spec_path, filetype=DataProduct.FITS_SPECTRA, force=options['overwrite']) # find ALCDEFs if tn not in tn_list: tn_list.append(tn) for obj in obj_list: try: reduction_dir = os.path.join('Reduction', obj) except TypeError: continue alcdef_base = f'.*.{tn}_ALCDEF.txt' alcdef_list = search(reduction_dir, alcdef_base, latest=False) if alcdef_list: a += len(alcdef_list) for alcfile in alcdef_list: alc_path = os.path.join(reduction_dir, alcfile) self.stdout.write( f"Found ALCDEF for {bl.current_name()} in SuperBlock {bl.superblock.id} (TrackingNum:{tn}). \n" f"===> Creating DataProduct for {alc_path}.") save_dataproduct(obj=bl.superblock, filepath=alc_path, filetype=DataProduct.ALCDEF_TXT, force=options['overwrite']) self.stdout.write( f"{n} Gifs, {s} Spectra, and {a} ALCDEF Data products Created")
def handle(self, *args, **options): body_name = options['body'] body_name = body_name.replace('_', ' ') object_list = [] if body_name.isdigit(): object_list = Body.objects.filter(Q(designations__value=body_name) | Q(name=body_name)) if not object_list and not (body_name.isdigit() and int(body_name) < 2100): object_list = Body.objects.filter(Q(designations__value__iexact=body_name) | Q(provisional_name=body_name) | Q(provisional_packed=body_name) | Q(name=body_name)) try: body = object_list[0] except IndexError: print(f"Couldn't find {body_name}") return if options['ext_alcdef'] is not None: save_dataproduct(obj=body, filepath=options['ext_alcdef'], filetype=DataProduct.ALCDEF_TXT) alcdef_files = DataProduct.content.fullbody(bodyid=body.id).filter(filetype=DataProduct.ALCDEF_TXT) meta_list = [] lc_list = [] for alcdef in alcdef_files: meta_list, lc_list = import_alcdef(alcdef, meta_list, lc_list) body_elements = model_to_dict(body) obj_name = sanitize_object_name(body.current_name()) if options['filters']: filt_list = options['filters'].upper() else: filt_list = list(set([meta['FILTER'] for meta in meta_list])) # Create lightcurve input file path = os.path.join(options['path'], 'Reduction', obj_name) lcs_input_filename = os.path.join(path, obj_name + '_input.lcs') lcs_input_file = open(lcs_input_filename, 'w') mag_means, lc_ltt_list = self.create_lcs_input(lcs_input_file, meta_list, lc_list, body_elements, filt_list) lcs_input_file.close() pmin, pmax, period = self.get_period_range(body, options) dir_num = 0 dirs = [item for item in os.listdir(path) if 'DamitDocs' in item] if dirs: for d in dirs: d_num = int(d.split('_')[1]) if dir_num < d_num: dir_num = d_num if options['period_scan']: # Create period_scan input file psinput_filename = self.import_or_create_psinput(path, obj_name, pmin, pmax) # Run Period Scan psoutput_filename = os.path.join(path, f'{obj_name}_{pmin}T{pmax}_period_scan.out') ps_retcode_or_cmdline = run_damit_periodscan(lcs_input_filename, psinput_filename, psoutput_filename) save_dataproduct(obj=body, filepath=psoutput_filename, filetype=DataProduct.PERIODOGRAM_RAW) elif options['lc_model']: if isinstance(options['lc_model'], str): try: start_stop_dates = options['lc_model'].split('-') start_date = datetime.strptime(start_stop_dates[0], '%Y%m%d') end_date = datetime.strptime(start_stop_dates[1], '%Y%m%d') except ValueError: raise CommandError(usage) else: start_date = options['lc_model'][0] end_date = options['lc_model'][1] dir_name = os.path.join(path, f"DamitDocs_{str(dir_num + 1).zfill(3)}_{period}_model_{options['lc_model']}") if not os.path.exists(dir_name): os.makedirs(dir_name) epoch_input_filename = os.path.join(dir_name, obj_name + '_epoch.lcs') epoch_input_file = open(epoch_input_filename, 'w') jpl_mean_mag, model_ltt_list = self.create_epoch_input(epoch_input_file, period, start_date, end_date, body_elements) epoch_input_file.close() # Create Model lc for given epochs. period_tag = f'_{period}_' shape_models = DataProduct.content.fullbody(bodyid=body.id).filter(filetype=DataProduct.MODEL_SHAPE, product__icontains=period_tag).order_by('-created') model_params = DataProduct.content.fullbody(bodyid=body.id).filter(filetype=DataProduct.MODEL_LC_PARAM, product__icontains=period_tag).order_by('-created') if not model_params or not shape_models: raise CommandError("Both convinv_par.out and model.shape files required for lc_model.") shape_model = shape_models[0] model_param = model_params[0] shape_model_filename = os.path.join(dir_name, os.path.basename(shape_model.product.name)) convinv_outpar_filename = os.path.join(dir_name, os.path.basename(model_param.product.name)) # Save files locally with open(shape_model_filename, 'wb') as shape_file: sm = shape_model.product.open() shape_file.write(sm.read()) with open(convinv_outpar_filename, 'wb') as param_file: mp = model_param.product.open() param_file.write(mp.read()) lcgen_outlcs_filename = os.path.join(dir_name, obj_name + f'_{options["lc_model"]}_lcgen_lcs.out') lcgen_lc_final_filename = os.path.join(dir_name, obj_name + f'_{options["lc_model"]}_lcgen_lcs.final') lcgenerat_retcode_or_cmdline = run_damit('lcgenerator', epoch_input_filename, f" {convinv_outpar_filename} {shape_model_filename} {lcgen_outlcs_filename}") self.zip_lc_model(epoch_input_filename, lcgen_outlcs_filename, lcgen_lc_final_filename, jpl_mean_mag, model_ltt_list) save_dataproduct(obj=body, filepath=lcgen_lc_final_filename, filetype=DataProduct.MODEL_LC_RAW) else: # Create convinv input file dir_name = os.path.join(path, f"DamitDocs_{str(dir_num + 1).zfill(3)}_{period}_{len(meta_list)}") if not os.path.exists(dir_name): os.makedirs(dir_name) convinv_input_filename, conjinv_input_filename = self.import_or_create_cinv_input(dir_name, obj_name, period) basename = os.path.join(dir_name, f'{obj_name}_{period}') convinv_outpar_filename = basename + '_convinv_par.out' convinv_outlcs_filename = basename + '_convinv_lcs.out' convinv_lc_final_filename = basename + '_convinv_lcs.final' conjinv_outareas_filename = basename + '_conjinv_areas.out' conjinv_outlcs_filename = basename + '_conjinv_lcs.out' conjinv_lc_final_filename = basename + '_conjinv_lcs.final' mink_faces_filename = basename + '_model.shape' shape_model_filename = basename + '_trifaces.shape' # Invert LC and calculate orientation/rotation parameters convexinv_retcode_or_cmdline = run_damit('convexinv', lcs_input_filename, f"-s -p {convinv_outpar_filename} {convinv_input_filename} {convinv_outlcs_filename}") self.zip_lc_model(lcs_input_filename, convinv_outlcs_filename, convinv_lc_final_filename, mag_means, lc_ltt_list) # Refine output faces conjgdinv_retcode_or_cmdline = run_damit('conjgradinv', lcs_input_filename, f"-s -o {conjinv_outareas_filename} {conjinv_input_filename} {convinv_outpar_filename} {conjinv_outlcs_filename}") self.zip_lc_model(lcs_input_filename, conjinv_outlcs_filename, conjinv_lc_final_filename, mag_means, lc_ltt_list) # Calculate polygon faces for shape mink_face_file = open(mink_faces_filename, 'w+') minkowski_retcode_or_cmdline = run_damit('minkowski', conjinv_outareas_filename, f"", write_out=mink_face_file) mink_face_file.close() # Convert faces into triangles shape_model_file = open(shape_model_filename, 'w+') stanrdtri_retcode_or_cmdline = run_damit('standardtri', mink_faces_filename, f"", write_out=shape_model_file) shape_model_file.close() # Create Data Products save_dataproduct(obj=body, filepath=convinv_lc_final_filename, filetype=DataProduct.MODEL_LC_RAW) save_dataproduct(obj=body, filepath=conjinv_lc_final_filename, filetype=DataProduct.MODEL_LC_RAW) save_dataproduct(obj=body, filepath=convinv_outpar_filename, filetype=DataProduct.MODEL_LC_PARAM) save_dataproduct(obj=body, filepath=shape_model_filename, filetype=DataProduct.MODEL_SHAPE) return
def test_dataproduct_save_robust(self): # Add Superblock linked CSV file_name = 'test_SB_CSV.txt' file_content = "some text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.CSV, filename=file_name, content=file_content) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.CSV) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.CSV) dp_bod = DataProduct.content.body().filter(object_id=self.body.id, filetype=DataProduct.CSV) dp_blk = DataProduct.content.block().filter(object_id=self.test_block.id, filetype=DataProduct.CSV) self.assertEqual(len(dp_qset), 1) self.assertEqual(len(dp_sb), 1) self.assertEqual(len(dp_bod), 0) self.assertEqual(len(dp_blk), 0) test_file = dp_sb[0].product.file lines = test_file.readlines() self.assertEqual(lines[0], file_content.encode('utf-8')) timestamp = dp_sb[0].created # overwrite with normal dataproduct file_content2 = "some other text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.CSV, filename=file_name, content=file_content2) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.CSV) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.CSV) self.assertEqual(len(dp_qset), 1) self.assertEqual(len(dp_sb), 1) test_file = dp_sb[0].product.file lines = test_file.readlines() self.assertEqual(lines[0], file_content2.encode('utf-8')) timestamp2 = dp_sb[0].created self.assertNotEqual(timestamp2, timestamp) # overwrite with robust dataproduct file_content3 = "even other text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.CSV, filename=file_name, content=file_content3, force=True) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.CSV) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.CSV) self.assertEqual(len(dp_qset), 1) self.assertEqual(len(dp_sb), 1) test_file = dp_sb[0].product.file lines = test_file.readlines() self.assertEqual(lines[0], file_content3.encode('utf-8')) self.assertFalse(dp_sb[0].update) timestamp3 = dp_sb[0].created self.assertNotEqual(timestamp2, timestamp3) # Fail to overwrite robust dataproduct file_content4 = "woogaoooooowoo" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.CSV, filename=file_name, content=file_content4) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.CSV) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.CSV) self.assertEqual(len(dp_qset), 1) self.assertEqual(len(dp_sb), 1) test_file = dp_sb[0].product.file lines = test_file.readlines() self.assertEqual(lines[0], file_content3.encode('utf-8')) self.assertFalse(dp_sb[0].update) timestamp4 = dp_sb[0].created self.assertEqual(timestamp4, timestamp3) # succeed to overwrite robust dataproduct file_content5 = "final text" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.CSV, filename=file_name, content=file_content5, force=True) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.CSV) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.CSV) self.assertEqual(len(dp_qset), 1) self.assertEqual(len(dp_sb), 1) test_file = dp_sb[0].product.file lines = test_file.readlines() self.assertEqual(lines[0], file_content5.encode('utf-8')) self.assertFalse(dp_sb[0].update) timestamp5 = dp_sb[0].created self.assertNotEqual(timestamp4, timestamp5)
def test_dataproduct_retrieve_ALCDEF(self): # Create gif file_mock = mock.MagicMock(spec=File) file_mock.name = 'test.gif' gif_dp = DataProduct.objects.create(product=file_mock, filetype=DataProduct.FRAME_GIF, content_object=self.test_block) # Add Superblock linked ALCDEF file_name = 'test_SB_ALCDEF.txt' file_content = "some text here" save_dataproduct(obj=self.test_sblock, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=file_name, content=file_content) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.ALCDEF_TXT) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.ALCDEF_TXT) dp_bod = DataProduct.content.body().filter(object_id=self.body.id, filetype=DataProduct.ALCDEF_TXT) dp_blk = DataProduct.content.block().filter(object_id=self.test_block.id, filetype=DataProduct.ALCDEF_TXT) self.assertEqual(len(dp_qset), 1) self.assertEqual(len(dp_sb), 1) self.assertEqual(len(dp_bod), 0) self.assertEqual(len(dp_blk), 0) # Add Block linked ALCDEF file_name = 'test_Bloc_ALCDEF.txt' file_content = "some other text here" save_dataproduct(obj=self.test_block, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=file_name, content=file_content) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.ALCDEF_TXT) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.ALCDEF_TXT) dp_bod = DataProduct.content.body().filter(object_id=self.body.id, filetype=DataProduct.ALCDEF_TXT) dp_blk = DataProduct.content.block().filter(object_id=self.test_block.id, filetype=DataProduct.ALCDEF_TXT) self.assertEqual(len(dp_qset), 2) self.assertEqual(len(dp_sb), 1) self.assertEqual(len(dp_bod), 0) self.assertEqual(len(dp_blk), 1) # Add Body linked ALCDEF file_name = 'test_Bod_ALCDEF.txt' file_content = "even other text here" save_dataproduct(obj=self.test_sblock.body, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=file_name, content=file_content) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.ALCDEF_TXT) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.ALCDEF_TXT) dp_bod = DataProduct.content.body().filter(object_id=self.body.id, filetype=DataProduct.ALCDEF_TXT) dp_blk = DataProduct.content.block().filter(object_id=self.test_block.id, filetype=DataProduct.ALCDEF_TXT) self.assertEqual(len(dp_qset), 3) self.assertEqual(len(dp_sb), 1) self.assertEqual(len(dp_bod), 1) self.assertEqual(len(dp_blk), 1) # Add unrelated body linked ALCDEF file_name = 'test_newBod_ALCDEF.txt' file_content = "woogaoooooowoo" save_dataproduct(obj=self.body2, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=file_name, content=file_content) dp_qset = DataProduct.content.fullbody(bodyid=self.body.id).filter(filetype=DataProduct.ALCDEF_TXT) dp_sb = DataProduct.content.sblock().filter(object_id=self.test_sblock.id, filetype=DataProduct.ALCDEF_TXT) dp_bod = DataProduct.content.body().filter(object_id=self.body.id, filetype=DataProduct.ALCDEF_TXT) dp_blk = DataProduct.content.block().filter(object_id=self.test_block.id, filetype=DataProduct.ALCDEF_TXT) self.assertEqual(len(dp_qset), 3) self.assertEqual(len(dp_sb), 1) self.assertEqual(len(dp_bod), 1) self.assertEqual(len(dp_blk), 1)
def handle(self, *args, **options): usage = "Incorrect usage. Usage: %s [YYYYMMDD] [proposal code]" % ( argv[1]) if isinstance(options['date'], str): try: obs_date = datetime.strptime(options['date'], '%Y%m%d') obs_date += timedelta(seconds=17 * 3600) except ValueError: raise CommandError(usage) else: obs_date = options['date'] proposals = determine_active_proposals(options['proposal']) if len(proposals) == 0: raise CommandError("No valid proposals found") verbose = True if options['verbosity'] < 1: verbose = False archive_token = settings.ARCHIVE_TOKEN if archive_token is not None: auth_headers = archive_login() start_date, end_date = determine_archive_start_end(obs_date) end_date = end_date + timedelta(days=options['numdays']) for proposal in proposals: self.stdout.write("Looking for frames between %s->%s from %s" % (start_date, end_date, proposal)) obstypes = ['EXPOSE', 'ARC', 'LAMPFLAT', 'SPECTRUM'] if (proposal == 'LCOEngineering' and options['dlengimaging'] is False) or options['spectraonly'] is True: # Not interested in imaging frames obstypes = ['ARC', 'LAMPFLAT', 'SPECTRUM'] all_frames = {} for obstype in obstypes: if obstype == 'EXPOSE': redlevel = [ '91', ] else: # '' seems to be needed to get the tarball of FLOYDS products redlevel = ['0', ''] frames = get_frame_data(start_date, end_date, auth_headers, obstype, proposal, red_lvls=redlevel) for red_lvl in frames.keys(): if red_lvl in all_frames: all_frames[red_lvl] = all_frames[red_lvl] + frames[ red_lvl] else: all_frames[red_lvl] = frames[red_lvl] if 'CATALOG' in obstype or obstype == '': catalogs = get_catalog_data(frames, auth_headers) for red_lvl in frames.keys(): if red_lvl in all_frames: all_frames[red_lvl] = all_frames[ red_lvl] + catalogs[red_lvl] else: all_frames[red_lvl] = catalogs[red_lvl] for red_lvl in all_frames.keys(): self.stdout.write( "Found %d frames for reduction level: %s" % (len(all_frames[red_lvl]), red_lvl)) out_path = options['datadir'] dl_frames = download_files(all_frames, out_path, verbose) self.stdout.write("Downloaded %d frames" % (len(dl_frames))) # unpack tarballs and make movie. for frame in all_frames.get('', []): if "tar.gz" in frame['filename']: tar_path = make_data_dir(out_path, frame) obj = sanitize_object_name(frame['OBJECT']) req_num = str(frame['REQNUM']) movie_file = make_movie(frame['DATE_OBS'], obj, req_num, tar_path, out_path, frame['PROPID'], tarfile=frame['filename']) blocks = Block.objects.filter(request_number=req_num) for block in blocks: if block.current_name() == frame['OBJECT']: save_dataproduct( obj=block, filepath=movie_file, filetype=DataProduct.GUIDER_GIF) filenames = glob( os.path.join(tar_path, obj + '_' + req_num, '*_2df_ex.fits')) if filenames: for filename in filenames: save_dataproduct( obj=block, filepath=filename, filetype=DataProduct.FITS_SPECTRA) break else: self.stdout.write( "No token defined (set ARCHIVE_TOKEN environment variable)") # Check if we're using a temp dir and then delete it if gettempdir() in out_path: shutil.rmtree(out_path)
def test_can_view_lightcurve( self): # test opening up a ALCDEF file associated with a body save_dataproduct(self.body2, self.lcname, DataProduct.ALCDEF_TXT) lc_url = reverse('lc_plot', args=[self.body2.id]) self.browser.get(self.live_server_url + lc_url) self.assertIn('Lightcurve for object: 433', self.browser.title) canvas = self.browser.find_element_by_xpath( "/html/body[@class='page']/div[@id='page-wrapper']/div[@id='page']/div[@id='main']/div[@name='lc_plot']/div[@class='bk']/div[@class='bk'][2]/div[@class='bk'][1]/div[@class='bk']/div[@class='bk bk-canvas-events']" ) phase_tab = self.browser.find_element_by_xpath( "/html/body[@class='page']/div[@id='page-wrapper']/div[@id='page']/div[@id='main']/div[@name='lc_plot']/div[@class='bk']/div[@class='bk bk-tabs-header bk-above']/div[@class='bk bk-headers-wrapper']/div[@class='bk bk-headers']/div[@class='bk bk-tab']" ) phase_tab.click() period_box = self.browser.find_element_by_xpath( "/html/body[@class='page']/div[@id='page-wrapper']/div[@id='page']/div[@id='main']/div[@name='lc_plot']/div[@class='bk']/div[@class='bk'][2]/div[@class='bk'][2]/div[@class='bk'][2]/div[@class='bk']/div[@class='bk'][1]/div[@class='bk'][1]/div[@class='bk']/div[@class='bk bk-input-group']/div[@class='bk bk-spin-wrapper']/input[@class='bk bk-input']" ) period_text = self.browser.find_element_by_xpath( "/html/body[@class='page']/div[@id='page-wrapper']/div[@id='page']/div[@id='main']/div[@name='lc_plot']/div[@class='bk']/div[@class='bk'][2]/div[@class='bk'][2]/div[@class='bk'][2]/div[@class='bk']/div[@class='bk'][1]/div[@class='bk'][1]/div[@class='bk']/div[@class='bk bk-input-group']/label[@class='bk']" ).text self.assertIn('Period (Default: 5.27h)', period_text) # check for period table self.check_for_header_in_table('id_periods', 'Period [hours] Source Notes Date') test_lines = ['1 5.27 ±0.0015 (2-) None testy test note Dec. 10, 2021'] for test_line in test_lines: self.check_for_row_in_table('id_periods', test_line) # Check for no form try: arrow_link = self.browser.find_element_by_id("arrow") raise Exception("Should be logged in for this form") except NoSuchElementException: pass # Login self.login() self.browser.get(self.live_server_url + lc_url) self.assertIn('Lightcurve for object: 433', self.browser.title) # find form arrow_link = self.browser.find_element_by_id("arrow") arrow_link.click() # Fill out Form and submit. MockDateTime.change_date(2021, 12, 12) period_box = self.get_item_input_box("id_period") period_box.send_keys('-4.35') add_button = self.browser.find_element_by_id("add_new_period-btn") with self.wait_for_page_load(timeout=10): add_button.click() arrow_link = self.browser.find_element_by_id("arrow") arrow_link.click() error_msg = self.browser.find_element_by_class_name('errorlist').text self.assertIn("Please enter a positive number for Period", error_msg) test_lines = ['1 5.27 ±0.0015 (2-) None testy test note Dec. 10, 2021'] for test_line in test_lines: self.check_for_row_in_table('id_periods', test_line) period_box = self.get_item_input_box("id_period") period_box.clear() period_box.send_keys('4.35') preferred_box = self.browser.find_element_by_id("id_preferred") preferred_box.click() quality_choices = Select(self.browser.find_element_by_id('id_quality')) quality_choices.select_by_visible_text("Unique (3-)") add_button = self.browser.find_element_by_id("add_new_period-btn") with self.wait_for_page_load(timeout=10): add_button.click() # check for updated period table self.check_for_header_in_table('id_periods', 'Period [hours] Source Notes Date') test_lines = [ '1 4.35 (3-) NEOX Dec. 12, 2021', '2 5.27 ±0.0015 (2-) None testy test note Dec. 10, 2021' ] for test_line in test_lines: self.check_for_row_in_table('id_periods', test_line) # get current window pw = self.browser.current_window_handle data_link = self.browser.find_element_by_link_text("2019-01-12") self.assertEqual(len(self.browser.window_handles), 1) data_link.click() WebDriverWait(self.browser, timeout=10).until(EC.number_of_windows_to_be(2)) all_windows = self.browser.window_handles for window in all_windows: if window != pw: self.browser.switch_to.window(window) WebDriverWait(self.browser, timeout=10).until(EC.url_contains('txt')) self.assertIn('txt', self.browser.current_url) alcdef_text = self.browser.find_element_by_xpath("/html/body/pre").text self.assertIn('OBJECTNUMBER=433', alcdef_text) return
def setUp(self): # Setup Basics super(SummaryPageTest, self).setUp() settings.MEDIA_ROOT = self.test_dir spectradir = os.path.abspath( os.path.join('photometrics', 'tests', 'test_spectra')) # Copy files into temp media root spec_path = 'target_2df_ex.fits' save_to_default(os.path.join(spectradir, spec_path), spectradir) analog_path = 'analog_2df_ex.fits' save_to_default(os.path.join(spectradir, analog_path), spectradir) analog2_path = 'test_2df_ex.fits' save_to_default(os.path.join(spectradir, analog2_path), spectradir) # Create a superuser to test login self.username = '******' self.password = '******' self.email = '*****@*****.**' self.bart = User.objects.create_user(username=self.username, password=self.password, email=self.email) self.bart.first_name = 'Bart' self.bart.last_name = 'Simpson' self.bart.is_active = 1 self.bart.is_staff = 1 self.bart.is_superuser = 1 self.bart.save() # insert extra body params = { 'name': 'q382918r', 'abs_mag': 21.0, 'slope': 0.15, 'epochofel': '2015-03-19 00:00:00', 'meananom': 325.2636, 'argofperih': 85.19251, 'longascnode': 147.81325, 'orbinc': 8.34739, 'eccentricity': 0.1896865, 'meandist': 1.2176312, 'source_type': 'N', 'elements_type': 'MPC_MINOR_PLANET', 'active': True, 'origin': 'N', 'ingest': '2015-05-11 17:20:00', 'score': 85, 'discovery_date': '2015-05-10 12:00:00', 'update_time': '2015-05-18 05:00:00', 'num_obs': 35, 'arc_length': 42.0, 'not_seen': 2.22, 'updated': False } self.body2, created = Body.objects.get_or_create(pk=3, **params) # build individual target blocks sblock_params = { 'cadence': False, 'body': self.body, 'proposal': self.test_proposal, 'block_start': '2015-04-20 13:00:00', 'block_end': '2015-04-24 03:00:00', 'tracking_number': '4242', 'active': True } self.test_sblock = SuperBlock.objects.create(pk=3, **sblock_params) block_params = { 'telclass': '2m0', 'site': 'coj', 'body': self.body, 'superblock': self.test_sblock, 'obstype': Block.OPT_SPECTRA, 'block_start': '2019-07-27 13:00:00', 'block_end': '2019-07-28 03:00:00', 'request_number': '1878696', 'num_exposures': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 16, 42, 51) } self.test_block = Block.objects.create(pk=3, **block_params) save_dataproduct(self.test_block, spec_path, DataProduct.FITS_SPECTRA) analog_block_params = { 'telclass': '2m0', 'site': 'coj', 'body': self.body, 'calibsource': self.calib, 'superblock': self.test_sblock, 'obstype': Block.OPT_SPECTRA_CALIB, 'block_start': '2019-07-27 13:00:00', 'block_end': '2019-07-28 03:00:00', 'request_number': '1878697', 'num_exposures': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 18, 42, 51) } self.analog_block = Block.objects.create(pk=7, **analog_block_params) save_dataproduct(self.analog_block, analog_path, DataProduct.FITS_SPECTRA) fparams = { 'sitecode': 'E10', 'filename': 'sp233/a265962.sp233.txt', 'exptime': 1800.0, 'midpoint': '2015-04-21 00:00:00', 'frametype': Frame.SPECTRUM_FRAMETYPE, 'block': self.test_block, 'frameid': 1, } self.spec_frame = Frame.objects.create(**fparams) afparams = { 'sitecode': 'E10', 'filename': 'sp233/a265962.sp233.txt', 'exptime': 1800.0, 'midpoint': '2015-04-21 00:00:00', 'frametype': Frame.SPECTRUM_FRAMETYPE, 'block': self.analog_block, 'frameid': 7, } self.analogspec_frame = Frame.objects.create(**afparams) sblock2_params = { 'cadence': False, 'body': self.body, 'proposal': self.test_proposal, 'block_start': '2015-04-20 13:00:00', 'block_end': '2015-04-22 03:00:00', 'tracking_number': '4243', 'active': False } self.test_sblock2 = SuperBlock.objects.create(pk=4, **sblock2_params) block2_params = { 'telclass': '2m0', 'site': 'ogg', 'body': self.body, 'superblock': self.test_sblock2, 'obstype': Block.OPT_IMAGING, 'block_start': '2015-04-22 13:00:00', 'block_end': '2015-04-24 03:00:00', 'request_number': '54321', 'num_exposures': 1, 'exp_length': 1800.0, 'active': False, 'when_observed': datetime(2015, 7, 27, 16, 42, 51) } self.test_block2 = Block.objects.create(pk=4, **block2_params) save_dataproduct(self.test_block2, spec_path, DataProduct.FITS_SPECTRA, filename='test2_2df_ex.fits') analog_block2_params = { 'telclass': '2m0', 'site': 'coj', 'calibsource': self.calib, 'superblock': self.test_sblock2, 'obstype': Block.OPT_SPECTRA_CALIB, 'block_start': '2019-07-27 13:00:00', 'block_end': '2019-07-28 03:00:00', 'request_number': '54321', 'num_exposures': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 18, 42, 51) } self.analog_block2 = Block.objects.create(pk=10, **analog_block2_params) save_dataproduct(self.analog_block2, analog2_path, DataProduct.FITS_SPECTRA) # Build multi-frame Blocks msblock_params = { 'cadence': False, 'body': self.body, 'proposal': self.test_proposal, 'block_start': '2018-01-01 00:00:00', 'block_end': '2018-01-01 03:00:00', 'tracking_number': '4244', 'active': True } self.test_msblock = SuperBlock.objects.create(pk=5, **msblock_params) mblock1_params = { 'telclass': '2m0', 'site': 'coj', 'body': self.body, 'superblock': self.test_msblock, 'obstype': Block.OPT_SPECTRA, 'block_start': '2018-01-01 00:00:00', 'block_end': '2018-01-01 02:00:00', 'request_number': '54322', 'num_exposures': 2, 'num_observed': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 16, 42, 51) } self.test_mblock1 = Block.objects.create(pk=5, **mblock1_params) save_dataproduct(self.test_mblock1, spec_path, DataProduct.FITS_SPECTRA, filename='test3_2df_ex.fits') save_dataproduct(self.test_mblock1, spec_path, DataProduct.FITS_SPECTRA, filename='test3.2_2df_ex.fits') mfparams1 = { 'sitecode': 'F65', 'filename': 'sp233/a265962.sp233.txt', 'exptime': 1800.0, 'midpoint': '2018-01-01 01:00:00', 'frametype': Frame.SPECTRUM_FRAMETYPE, 'block': self.test_mblock1, 'frameid': 10, } self.mspec_frame1 = Frame.objects.create(**mfparams1) mblock2_params = { 'telclass': '2m0', 'site': 'ogg', 'body': self.body, 'superblock': self.test_msblock, 'obstype': Block.OPT_SPECTRA, 'block_start': '2018-01-01 01:00:00', 'block_end': '2018-01-01 03:00:00', 'request_number': '54323', 'num_exposures': 1, 'num_observed': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 16, 42, 51) } self.test_mblock2 = Block.objects.create(pk=6, **mblock2_params) save_dataproduct(self.test_mblock2, spec_path, DataProduct.FITS_SPECTRA, filename='test4_2df_ex.fits') mfparams2 = { 'sitecode': 'F65', 'filename': 'sp233/a265962.sp233.txt', 'exptime': 1800.0, 'midpoint': '2018-01-01 02:00:00', 'frametype': Frame.SPECTRUM_FRAMETYPE, 'block': self.test_mblock2, 'frameid': 11, } self.mspec_frame2 = Frame.objects.create(**mfparams2) # make empty block sblock_params_empty = msblock_params self.test_sblock_empty = SuperBlock.objects.create( pk=6, **sblock_params_empty) block_params_empty = mblock2_params block_params_empty['superblock'] = self.test_sblock_empty block_params_empty['when_observed'] = datetime(2019, 9, 27, 16, 42, 51) self.test_block_empty = Block.objects.create(**block_params_empty) frame_params_empty = mfparams2 frame_params_empty['block'] = self.test_block_empty self.spec_frame_empty = Frame.objects.create(**frame_params_empty) # Add ALCDEF Data Products lcname = '433_738215_ALCDEF.txt' lcpath = os.path.abspath(os.path.join('photometrics', 'tests')) save_to_default(os.path.join(lcpath, lcname), lcpath) save_dataproduct(self.test_sblock, lcname, DataProduct.ALCDEF_TXT) # Add period period_dict = { 'value': 12, 'error': .3, 'parameter_type': 'P', 'units': 'h', 'preferred': True, 'reference': 'NEOX', 'quality': 5, 'notes': "testy test tested" } self.body.save_physical_parameters(period_dict)
def setUp(self): super(SpectraplotTest, self).setUp() settings.MEDIA_ROOT = self.test_dir spectradir = os.path.abspath( os.path.join('photometrics', 'tests', 'test_spectra')) spec_path = 'target_2df_ex.fits' save_to_default(os.path.join(spectradir, spec_path), spectradir) analog_path = 'analog_2df_ex.fits' save_to_default(os.path.join(spectradir, analog_path), spectradir) analog2_path = 'test_2df_ex.fits' save_to_default(os.path.join(spectradir, analog2_path), spectradir) self.username = '******' self.password = '******' self.email = '*****@*****.**' self.bart = User.objects.create_user(username=self.username, password=self.password, email=self.email) self.bart.first_name = 'Bart' self.bart.last_name = 'Simpson' self.bart.is_active = 1 self.bart.save() # build individual target blocks sblock_params = { 'cadence': False, 'body': self.body, 'proposal': self.test_proposal, 'block_start': '2015-04-20 13:00:00', 'block_end': '2015-04-24 03:00:00', 'tracking_number': '4242', 'active': True } self.test_sblock = SuperBlock.objects.create(pk=3, **sblock_params) block_params = { 'telclass': '2m0', 'site': 'coj', 'body': self.body, 'superblock': self.test_sblock, 'obstype': Block.OPT_SPECTRA, 'block_start': '2019-07-27 13:00:00', 'block_end': '2019-07-28 03:00:00', 'request_number': '1878696', 'num_exposures': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 16, 42, 51) } self.test_block = Block.objects.create(pk=3, **block_params) save_dataproduct(self.test_block, spec_path, DataProduct.FITS_SPECTRA) analog_block_params = { 'telclass': '2m0', 'site': 'coj', 'body': self.body, 'calibsource': self.calib, 'superblock': self.test_sblock, 'obstype': Block.OPT_SPECTRA_CALIB, 'block_start': '2019-07-27 13:00:00', 'block_end': '2019-07-28 03:00:00', 'request_number': '1878697', 'num_exposures': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 18, 42, 51) } self.analog_block = Block.objects.create(pk=7, **analog_block_params) save_dataproduct(self.analog_block, analog_path, DataProduct.FITS_SPECTRA) fparams = { 'sitecode': 'E10', 'filename': 'sp233/a265962.sp233.txt', 'exptime': 1800.0, 'midpoint': '2015-04-21 00:00:00', 'frametype': Frame.SPECTRUM_FRAMETYPE, 'block': self.test_block, 'frameid': 1, } self.spec_frame = Frame.objects.create(**fparams) afparams = { 'sitecode': 'E10', 'filename': 'sp233/a265962.sp233.txt', 'exptime': 1800.0, 'midpoint': '2015-04-21 00:00:00', 'frametype': Frame.SPECTRUM_FRAMETYPE, 'block': self.analog_block, 'frameid': 7, } self.analogspec_frame = Frame.objects.create(**afparams) sblock2_params = { 'cadence': False, 'body': self.body, 'proposal': self.test_proposal, 'block_start': '2015-04-20 13:00:00', 'block_end': '2015-04-22 03:00:00', 'tracking_number': '4243', 'active': False } self.test_sblock2 = SuperBlock.objects.create(pk=4, **sblock2_params) block2_params = { 'telclass': '2m0', 'site': 'ogg', 'body': self.body, 'superblock': self.test_sblock2, 'obstype': Block.OPT_IMAGING, 'block_start': '2015-04-22 13:00:00', 'block_end': '2015-04-24 03:00:00', 'request_number': '54321', 'num_exposures': 1, 'exp_length': 1800.0, 'active': False, 'when_observed': datetime(2015, 7, 27, 16, 42, 51) } self.test_block2 = Block.objects.create(pk=4, **block2_params) save_dataproduct(self.test_block2, spec_path, DataProduct.FITS_SPECTRA, filename='test2_2df_ex.fits') analog_block2_params = { 'telclass': '2m0', 'site': 'coj', 'calibsource': self.calib, 'superblock': self.test_sblock2, 'obstype': Block.OPT_SPECTRA_CALIB, 'block_start': '2019-07-27 13:00:00', 'block_end': '2019-07-28 03:00:00', 'request_number': '54321', 'num_exposures': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 18, 42, 51) } self.analog_block2 = Block.objects.create(pk=10, **analog_block2_params) save_dataproduct(self.analog_block2, analog2_path, DataProduct.FITS_SPECTRA) # Build multi-frame Blocks msblock_params = { 'cadence': False, 'body': self.body, 'proposal': self.test_proposal, 'block_start': '2018-01-01 00:00:00', 'block_end': '2018-01-01 03:00:00', 'tracking_number': '4244', 'active': True } self.test_msblock = SuperBlock.objects.create(pk=5, **msblock_params) mblock1_params = { 'telclass': '2m0', 'site': 'coj', 'body': self.body, 'superblock': self.test_msblock, 'obstype': Block.OPT_SPECTRA, 'block_start': '2018-01-01 00:00:00', 'block_end': '2018-01-01 02:00:00', 'request_number': '54322', 'num_exposures': 2, 'num_observed': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 16, 42, 51) } self.test_mblock1 = Block.objects.create(pk=5, **mblock1_params) save_dataproduct(self.test_mblock1, spec_path, DataProduct.FITS_SPECTRA, filename='test3_2df_ex.fits') save_dataproduct(self.test_mblock1, spec_path, DataProduct.FITS_SPECTRA, filename='test3.2_2df_ex.fits') mfparams1 = { 'sitecode': 'F65', 'filename': 'sp233/a265962.sp233.txt', 'exptime': 1800.0, 'midpoint': '2018-01-01 01:00:00', 'frametype': Frame.SPECTRUM_FRAMETYPE, 'block': self.test_mblock1, 'frameid': 10, } self.mspec_frame1 = Frame.objects.create(**mfparams1) mblock2_params = { 'telclass': '2m0', 'site': 'ogg', 'body': self.body, 'superblock': self.test_msblock, 'obstype': Block.OPT_SPECTRA, 'block_start': '2018-01-01 01:00:00', 'block_end': '2018-01-01 03:00:00', 'request_number': '54323', 'num_exposures': 1, 'num_observed': 1, 'exp_length': 1800.0, 'active': True, 'when_observed': datetime(2019, 7, 27, 16, 42, 51) } self.test_mblock2 = Block.objects.create(pk=6, **mblock2_params) save_dataproduct(self.test_mblock2, spec_path, DataProduct.FITS_SPECTRA, filename='test4_2df_ex.fits') mfparams2 = { 'sitecode': 'F65', 'filename': 'sp233/a265962.sp233.txt', 'exptime': 1800.0, 'midpoint': '2018-01-01 02:00:00', 'frametype': Frame.SPECTRUM_FRAMETYPE, 'block': self.test_mblock2, 'frameid': 11, } self.mspec_frame2 = Frame.objects.create(**mfparams2) # make empty block sblock_params_empty = msblock_params self.test_sblock_empty = SuperBlock.objects.create( pk=6, **sblock_params_empty) block_params_empty = mblock2_params block_params_empty['superblock'] = self.test_sblock_empty block_params_empty['when_observed'] = datetime(2019, 9, 27, 16, 42, 51) self.test_block_empty = Block.objects.create(**block_params_empty) frame_params_empty = mfparams2 frame_params_empty['block'] = self.test_block_empty self.spec_frame_empty = Frame.objects.create(**frame_params_empty) update_proposal_permissions(self.bart, [{ 'code': self.neo_proposal.code }])
def handle(self, *args, **options): # Suppress incorrect FITSFixedWarnings warnings.simplefilter('ignore', FITSFixedWarning) self.stdout.write("==== Light curve building %s ====" % (datetime.now().strftime('%Y-%m-%d %H:%M'))) try: start_super_block = SuperBlock.objects.get(tracking_number=options['supblock']) except SuperBlock.DoesNotExist: self.stdout.write("Cannot find SuperBlock with Tracking Number %d" % options['supblock']) exit(-1) start_blocks = Block.objects.filter(superblock=start_super_block.id) start_block = start_blocks[0] if options['single'] is True: super_blocks = [start_super_block, ] else: super_blocks = SuperBlock.objects.filter(body=start_super_block.body, block_start__gte=start_super_block.block_start-timedelta(days=options['timespan'])) obs_date = None if options['date']: if isinstance(options['date'], str): try: obs_date = datetime.strptime(options['date'], '%Y%m%d') except ValueError: raise CommandError(usage) else: obs_date = options['date'] # Initialize lists times = [] alltimes = [] mags = [] mag_errs = [] zps = [] zp_errs = [] mpc_lines = [] psv_lines = [] total_frame_count = 0 mpc_site = [] fwhm = [] air_mass = [] output_file_list = [] # build directory path / set permissions obj_name = sanitize_object_name(start_super_block.body.current_name()) datadir = os.path.join(options['datadir'], obj_name) out_path = settings.DATA_ROOT data_path = '' rw_permissions = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH if not os.path.exists(datadir) and not settings.USE_S3: try: os.makedirs(datadir) # Set directory permissions correctly for shared directories # Sets to (r)ead,(w)rite,e(x)ecute for owner & group, r-x for others os.chmod(datadir, stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH) except: msg = "Error creating output path %s" % datadir raise CommandError(msg) sb_day = start_super_block.block_start.strftime("%Y%m%d") # Turn telescope class into a diameter for theoretical FWHM curve tel_classes = start_super_block.get_telclass() if len(tel_classes.split(",")) > 1: self.stdout.write("Multiple telescope sizes found; theoretical FWHM curve will be wrong") tel_class = tel_classes.split(",")[0] else: tel_class = tel_classes try: tel_diameter = float(tel_class.replace('m', '.')) tel_diameter *= u.m except ValueError: self.stdout.write("Error determining telescope diameter, assuming 0.4m") tel_diameter = 0.4*u.m # Set offsets, convert from Arcsec to Radians ra_offset = radians(options['ra_offset'] / 3600) dec_offset = radians(options['dec_offset'] / 3600) for super_block in super_blocks: # Create, name, open ALCDEF file. if obs_date: alcdef_date = options['date'] else: alcdef_date = super_block.block_start.strftime("%Y%m%d") base_name = '{}_{}_{}_{}_'.format(obj_name, super_block.get_sites().replace(',', ''), alcdef_date, super_block.tracking_number) alcdef_filename = base_name + 'ALCDEF.txt' output_file_list.append('{},{}'.format(alcdef_filename, datadir.lstrip(out_path))) alcdef_txt = '' block_list = Block.objects.filter(superblock=super_block.id) if obs_date: block_list = block_list.filter(when_observed__lt=obs_date+timedelta(days=2)).filter(when_observed__gt=obs_date) self.stdout.write("Analyzing SuperblockBlock# %s for %s" % (super_block.tracking_number, super_block.body.current_name())) for block in block_list: block_mags = [] block_mag_errs = [] block_times = [] outmag = "NONE" self.stdout.write("Analyzing Block# %d" % block.id) obs_site = block.site # Get all Useful frames from each block frames_red = Frame.objects.filter(block=block.id, frametype__in=[Frame.BANZAI_RED_FRAMETYPE]).order_by('filter', 'midpoint') frames_ql = Frame.objects.filter(block=block.id, frametype__in=[Frame.BANZAI_QL_FRAMETYPE]).order_by('filter', 'midpoint') if len(frames_red) >= len(frames_ql): frames_all_zp = frames_red else: frames_all_zp = frames_ql frames = frames_all_zp.filter(zeropoint__isnull=False) self.stdout.write("Found %d frames (of %d total) for Block# %d with good ZPs" % (frames.count(), frames_all_zp.count(), block.id)) self.stdout.write("Searching within %.1f arcseconds and +/-%.2f delta magnitudes" % (options['boxwidth'], options['deltamag'])) total_frame_count += frames.count() frame_data = [] if frames_all_zp.count() != 0: elements = model_to_dict(block.body) filter_list = [] for frame in frames_all_zp: # get predicted position and magnitude of target during time of each frame emp_line = compute_ephem(frame.midpoint, elements, frame.sitecode) ra = S.sla_dranrm(emp_line['ra'] + ra_offset) dec = copysign(S.sla_drange(emp_line['dec'] + dec_offset), emp_line['dec'] + dec_offset) mag_estimate = emp_line['mag'] (ra_string, dec_string) = radec2strings(ra, dec, ' ') # Find list of frame sources within search region of predicted coordinates sources = search_box(frame, ra, dec, options['boxwidth']) midpoint_string = frame.midpoint.strftime('%Y-%m-%d %H:%M:%S') self.stdout.write("%s %s %s V=%.1f %s (%d) %s" % (midpoint_string, ra_string, dec_string, mag_estimate, frame.sitecode, len(sources), frame.filename)) best_source = None # Find source most likely to be target (Could Use Some Work) if len(sources) != 0 and frame.zeropoint is not None: if len(sources) == 1: best_source = sources[0] elif len(sources) > 1: # If more than 1 source, pick closest within deltamag min_sep = options['boxwidth'] * options['boxwidth'] for source in sources: sep = S.sla_dsep(ra, dec, radians(source.obs_ra), radians(source.obs_dec)) sep = degrees(sep) * 3600.0 src_ra_string, src_dec_string = radec2strings(radians(source.obs_ra), radians(source.obs_dec)) if len(block_mags) > 0: delta_mag = abs(block_mags[-1] - source.obs_mag) else: delta_mag = abs(mag_estimate - source.obs_mag) self.stdout.write("%s %s %s %s %.1f %.1f-%.1f %.1f" % ( ra_string, dec_string, src_ra_string, src_dec_string, sep, mag_estimate, source.obs_mag, delta_mag)) if sep < min_sep and delta_mag <= options['deltamag']: min_sep = sep best_source = source # Save target source and add to output files. if best_source and best_source.obs_mag > 0.0 and abs(mag_estimate - best_source.obs_mag) <= 3 * options['deltamag']: block_times.append(frame.midpoint) mpc_line, psv_line = self.make_source_measurement(block.body, frame, best_source, persist=options['persist']) mpc_lines.append(mpc_line) psv_lines.append(psv_line) block_mags.append(best_source.obs_mag) block_mag_errs.append(best_source.err_obs_mag) filter_list.append(frame.ALCDEF_filter_format()) # We append these even if we don't have a matching source or zeropoint # so we can plot conditions for all frames zps.append(frame.zeropoint) zp_errs.append(frame.zeropoint_err) frame_data.append({'ra': ra, 'dec': dec, 'mag': mag_estimate, 'bw': options['boxwidth'], 'dm': options['deltamag'], 'best_source': best_source}) alltimes.append(frame.midpoint) fwhm.append(frame.fwhm) azimuth, altitude = moon_alt_az(frame.midpoint, ra, dec, *get_sitepos(frame.sitecode)[1:]) zenith_distance = radians(90) - altitude air_mass.append(S.sla_airmas(zenith_distance)) obs_site = frame.sitecode catalog = frame.photometric_catalog if catalog == 'GAIA-DR2': outmag = 'GG' elif catalog == 'UCAC4': outmag = 'SR' if obs_site not in mpc_site: mpc_site.append(obs_site) if len(block_times) > 1: filter_set = list(set(filter_list)) for filt in filter_set: mag_set = [m for m, f in zip(block_mags, filter_list) if f == filt] time_set = [t for t, f in zip(block_times, filter_list) if f == filt] error_set = [e for e, f in zip(block_mag_errs, filter_list) if f == filt] alcdef_txt += self.output_alcdef(block, obs_site, time_set, mag_set, error_set, filt, outmag) mags += block_mags mag_errs += block_mag_errs times += block_times # Create gif of fits files used for LC extraction data_path = make_data_dir(out_path, model_to_dict(frames_all_zp[0])) frames_list = [os.path.join(data_path, f.filename) for f in frames_all_zp] if not options['nogif']: movie_file = make_gif(frames_list, sort=False, init_fr=100, center=3, out_path=data_path, plot_source=True, target_data=frame_data, show_reticle=True, progress=True) if "WARNING" not in movie_file: # Add write permissions to movie file try: os.chmod(movie_file, rw_permissions) except PermissionError: pass # Create DataProduct save_dataproduct(obj=block, filepath=movie_file, filetype=DataProduct.FRAME_GIF, force=options['overwrite']) output_file_list.append('{},{}'.format(movie_file, data_path.lstrip(out_path))) self.stdout.write("New gif created: {}".format(movie_file)) else: self.stdout.write(movie_file) save_dataproduct(obj=super_block, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=alcdef_filename, content=alcdef_txt, force=options['overwrite']) self.stdout.write("Found matches in %d of %d frames" % (len(times), total_frame_count)) if not settings.USE_S3: # Write light curve data out in similar format to Make_lc.csh i = 0 lightcurve_file = open(os.path.join(datadir, base_name + 'lightcurve_data.txt'), 'w') mpc_file = open(os.path.join(datadir, base_name + 'mpc_positions.txt'), 'w') psv_file = open(os.path.join(datadir, base_name + 'ades_positions.psv'), 'w') output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve_data.txt'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'mpc_positions.txt'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'ades_positions.psv'), datadir.lstrip(out_path))) # Calculate integer part of JD for first frame and use this as a # constant in case of wrapover to the next day if len(times) > 0 and len(mags) > 0: mjd_offset = int(datetime2mjd_utc(times[0])) for time in times: time_jd = datetime2mjd_utc(time) time_jd_truncated = time_jd - mjd_offset if i == 0: lightcurve_file.write('#Object: %s\n' % start_super_block.body.current_name()) lightcurve_file.write("#MJD-%.1f Mag. Mag. error\n" % mjd_offset) lightcurve_file.write("%7.5lf %6.3lf %5.3lf\n" % (time_jd_truncated, mags[i], mag_errs[i])) i += 1 lightcurve_file.close() try: os.chmod(os.path.join(datadir, base_name + 'lightcurve_data.txt'), rw_permissions) except PermissionError: pass # Write out MPC1992 80 column file for mpc_line in mpc_lines: mpc_file.write(mpc_line + '\n') mpc_file.close() try: os.chmod(os.path.join(datadir, base_name + 'mpc_positions.txt'), rw_permissions) except PermissionError: pass # Write out ADES Pipe Separated Value file for psv_line in psv_lines: psv_file.write(psv_line + '\n') psv_file.close() try: os.chmod(os.path.join(datadir, base_name + 'ades_positions.psv'), rw_permissions) except PermissionError: pass # Create Default Plot Title if options['title'] is None: sites = ', '.join(mpc_site) try: # for single dates and short site lists, put everything on single line. if options['timespan'] < 1 and len(sites) <= 13: plot_title = '%s from %s (%s) on %s' % (start_super_block.body.current_name(), start_block.site.upper(), sites, start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = '' # for lc covering multiple nights, reformat title elif options['timespan'] < 1: plot_title = '%s from %s to %s' % (start_block.body.current_name(), (start_super_block.block_end - timedelta( days=options['timespan'])).strftime("%Y-%m-%d"), start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = 'Sites: ' + sites # for single night LC using many sites, put sites on 2nd line. else: plot_title = '%s from %s on %s' % (start_super_block.body.current_name(), start_block.site.upper(), start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = 'Sites: ' + sites except TypeError: plot_title = 'LC for %s' % (start_super_block.body.current_name()) subtitle = '' else: plot_title = options['title'] subtitle = '' # Make plots if not settings.USE_S3: self.plot_timeseries(times, alltimes, mags, mag_errs, zps, zp_errs, fwhm, air_mass, title=plot_title, sub_title=subtitle, datadir=datadir, filename=base_name, diameter=tel_diameter) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve_cond.png'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve.png'), datadir.lstrip(out_path))) try: os.chmod(os.path.join(datadir, base_name + 'lightcurve_cond.png'), rw_permissions) except PermissionError: pass try: os.chmod(os.path.join(datadir, base_name + 'lightcurve.png'), rw_permissions) except PermissionError: pass else: self.stdout.write("No sources matched.") if data_path: with open(os.path.join(data_path, base_name + 'lc_file_list.txt'), 'w') as outfut_file_file: outfut_file_file.write('# == Files created by Lightcurve Extraction for {} on {} ==\n'.format(obj_name, sb_day)) for output_file in output_file_list: outfut_file_file.write(output_file) outfut_file_file.write('\n') self.stdout.write(f"New lc file list created: {os.path.join(data_path, base_name + 'lc_file_list.txt')}") try: os.chmod(os.path.join(data_path, base_name + 'lc_file_list.txt'), rw_permissions) except PermissionError: pass