def test_execute_step(self): sample_id = None with db.session_scope() as session: sample = SampleModel(name=None) session.add(sample) session.commit() sample_id = sample.id run.execute_step(Step1, sync=True) with db.session_scope() as session: query = session.query(SampleModel) self.assertEqual(query.count(), 1) sample = session.query(SampleModel).get(sample_id) self.assertEqual(sample.name, "Step1") run.execute_step(Step2, sync=True) with db.session_scope() as session: query = session.query(SampleModel) self.assertEqual(query.count(), 2) sample = session.query(SampleModel).get(sample_id) self.assertEqual(sample.name, "Step2") run.execute_step(Step1, sync=True) with db.session_scope() as session: query = session.query(SampleModel) self.assertEqual(query.count(), 2) sample = session.query(SampleModel).get(sample_id) self.assertEqual(sample.name, "Step2")
def test_execute_alert(self): alert_to = Alert1.alert_to[0] sample_id = None with db.session_scope() as session: sample = SampleModel(name="catch_alert") session.add(sample) session.commit() sample_id = sample.id run.execute_alert(Alert1, sync=True) self.assertEquals(alert_to.fp.getvalue(), "catch_alert") with db.session_scope() as session: query = session.query(Alerted) self.assertEqual(query.count(), 1) alerted = query.first() sample = session.query(SampleModel).get(sample_id) self.assertEquals(alerted.model, sample) self.assertEquals(alerted.alert, Alert1) run.execute_alert(Alert1, sync=True) with db.session_scope() as session: query = session.query(Alerted) self.assertEqual(query.count(), 1) alerted = query.first() sample = session.query(SampleModel).get(sample_id) self.assertEquals(alerted.model, sample) self.assertEquals(alerted.alert, Alert1)
def handle(self): obsSite = models.Observatory() obsSite.name = "Macon Ridge" obsSite.latitude = -1.1 obsSite.longitude = -1.1 obsSite.description = "Macon Ridge Toritos site" telescopeCCD = models.CCD() telescopeCCD.name = "" telescopeCCD.brand = "Apogee" telescopeCCD.model = "" telescopeCCD.description = "" telescopeCCD.xpixsize = 4096 telescopeCCD.ypixsize = 4096 campaign = models.Campaign() campaign.name = "LIGO O1" campaign.description = "EM Follow-up Counterpart for the O1 LIGO Science Run." campaign.observatory = obsSite campaign.ccd = telescopeCCD with db.session_scope() as session: session.add(obsSite) session.add(telescopeCCD) session.add(campaign)
def test_execute_loader(self): run.execute_loader(TestLoader, sync=True) with db.session_scope() as session: self.assertEqual(session.query(SampleModel).count(), 1) with mock.patch("tests.steps.TestLoader.generate", return_value=[None]): with self.assertRaises(TypeError): run.execute_loader(TestLoader, sync=True)
def tearDown(self): self.teardown() try: from corral import util, db with db.session_scope() as session: for model in util.collect_subclasses(db.Model): session.query(model).delete() except: pass
def test_manual_register_filter_registered(self): with db.session_scope() as session: sample = SampleModel(name="catch_alert") session.add(sample) session.commit() with self.assertRaises(NotImplementedError): run.execute_alert(Alert1, sync=True) with mock.patch("tests.alerts.Alert1.filter_registered") as flt_reg: run.execute_alert(Alert1, sync=True) with db.session_scope() as session: actual = flt_reg.call_args[0][0].statement.compile(db.engine) expected = session.query( Alert1.model ).filter(*Alert1.conditions).statement.compile(db.engine) self.assertEquals(str(actual), str(expected)) self.assertEquals(actual.params, expected.params)
def handle(self): states = [] for order, (errorlabel, folder) in enumerate([('raw', 'raw'), ('preprocessed', 'preprocessed'), ('stacked', 'stacks')]): st, st_err = self.makeStateAndErrorState(errorlabel, folder, order) states.append(st) states.append(st_err) with db.session_scope() as session: for astate in states: session.add(astate)
def handle(self, tiles, output, append, procs, sync): mode = "r+" if append else "w" if os.path.exists(output) and mode == "w": raise OSError("File '{}' already exists".format(output)) with db.session_scope() as session: procs = tiles2hdf5.dump(session, tiles, output, mode, procs, sync) if not sync: for proc in procs: proc.join() exitcodes = [proc.exitcode for proc in procs] status = sum(exitcodes) if status: sys.exit(status)
def test_email_endpoint(self, smtp): with db.session_scope() as session: sample = SampleModel(name="catch_alert") session.add(sample) session.commit() to = ["*****@*****.**"] alert_to = ep.Email(to) with mock.patch("tests.alerts.Alert1.alert_to", [alert_to]): run.execute_alert(Alert1, sync=True) actual = alert_to.server.sendmail.call_args[0][:2] expected = (conf.settings.EMAIL["user"], to) self.assertEquals(actual, expected)
def main(): with db.session_scope() as ses: query = ses.query(LightCurves) rows = [] for tile in ses.query(Tile.name, Tile.size, Tile.ogle3_tagged_number).filter_by( ready=True).order_by(Tile.name).all(): rows.append({k: getattr(tile, k) for k in tile.keys()}) df = pd.DataFrame(rows)[["name", "size", "ogle3_tagged_number"]] resume = df.sum().to_dict() resume["name"] = "Total" df = df.append(resume, ignore_index=True) df.columns = ["Nombre", u"Tamaño", "Variables"] print df.to_latex(index=False)
def test_manual_register_register(self): sample_id = None with db.session_scope() as session: sample = SampleModel(name="catch_alert") session.add(sample) session.commit() sample_id = sample.id with self.assertRaises(NotImplementedError): run.execute_alert(Alert1, sync=True) called_with = [] def reg(self, obj): called_with.append({"id": obj.id, "name": obj.name}) with mock.patch("tests.alerts.Alert1.register", reg): run.execute_alert(Alert1, sync=True) self.assertEquals(len(called_with), 1) with db.session_scope() as session: actual = called_with[0] sample = session.query(SampleModel).get(sample_id) self.assertEquals(actual["id"], sample.id) self.assertEquals(actual["name"], sample.name)
def handle(self, filter): table = Texttable(max_width=0) table.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) table.header(("Tile", "Pawprint", "Status")) with db.session_scope() as session: query = session.query(PawprintXTile) if filter == "synced": query = query.filter(PawprintXTile.status == "sync") elif filter == "unsynced": query = query.filter(PawprintXTile.status != "sync") for pxt in query: table.add_row([pxt.tile.name, pxt.pawprint.name, pxt.status]) print(table.draw())
def class_setup(cls): def chunk_it(seq, num): avg = len(seq) / float(num) out = [] last = 0.0 while last < len(seq): out.append(seq[int(last):int(last + avg)]) last += avg return sorted(out, reverse=True) with db.session_scope() as session: all_ids = tuple( r[0] for r in session.query(cls.model.id).filter(*cls.conditions)) db.engine.dispose() cls.chunks = chunk_it(all_ids, cls.get_procno())
def handle(self, status): table = Texttable(max_width=0) table.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) table.header(("Pawprint", "Status", "MJD", "Size", "Readed")) cnt = 0 with db.session_scope() as session: query = session.query( Pawprint.name, Pawprint.status, Pawprint.mjd, Pawprint.data_size, Pawprint.data_readed) if status: query = query.filter(Pawprint.status.in_(status)) map(table.add_row, query) cnt = query.count() print(table.draw()) print("Count: {}".format(cnt))
def handle(self, status): log2critcal() table = Texttable(max_width=0) table.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) table.header(("Tile", "Status", "VS Tags", "Size", "Ready")) cnt = 0 with db.session_scope() as session: query = session.query(Tile.name, Tile.status, Tile.ogle3_tagged_number, Tile.size, Tile.ready) if status: query = query.filter(Tile.status.in_(status)) map(table.add_row, query) cnt = query.count() print(table.draw()) print("Count: {}".format(cnt))
def main(): with db.session_scope() as ses: query = ses.query(Tile).filter(Tile.name.in_(tile_names)) for tile in query: if tile.name in cache: print "Skiping {}".format(lc.tile.name) continue lc = tile.lcurves feats = pd.DataFrame(lc.features) feats = remove_bad_color(feats) # here we remove the bad colors gc.collect() obs = pd.DataFrame(lc.observations) obs = obs[obs.bm_src_id.isin(feats.id)] gc.collect() to_proc = tqdm.tqdm(get_old_feats(feats), desc=lc.tile.name) with joblib.Parallel(n_jobs=CPUS) as P: new_feats = P( joblib.delayed(extract) ( # the exract make the source sigmaclip sid=sid, obs=obs[obs.bm_src_id == sid], old_feats=old_feats) for sid, old_feats in to_proc) new_feats = pd.DataFrame(new_feats)[[ f for f in feats.columns if f not in COLUMNS_TO_REMOVE ]] new_feats = new_feats[new_feats.cnt >= 30] # herw we remove the to short lc new_feats = reorder(new_feats) new_feats = to_recarray(new_feats) # ~ lc.tile.ready = False # ~ lc.features = new_feats # ~ lc.tile.ready = True gc.collect() cache[tile.name] = new_feats
def test_execute_alert_default_render(self): alert_to = Alert1.alert_to[0] with db.session_scope() as session: sample = SampleModel(name="catch_alert") session.add(sample) session.commit() expected = [] def render(self, *args, **kwargs): rendered = super(Alert1, self).render_alert(*args, **kwargs) expected.append(rendered) return expected[-1] with mock.patch("tests.alerts.Alert1.render_alert", render): run.execute_alert(Alert1, sync=True) self.assertEquals(alert_to.fp.getvalue(), expected[-1])
def main(): with db.session_scope() as ses: query = ses.query(LightCurves) rows = [] for tile in ses.query(Tile).filter_by(ready=True).order_by( Tile.name).all(): sources = tile.load_npy_file() sources = pd.DataFrame(sources) rr = len(sources[sources.vs_type.str.startswith("RRLyr-")]) rows.append({"name": tile.name, "size": tile.size, "rr": rr}) df = pd.DataFrame(rows)[["name", "size", "rr"]] resume = df.sum().to_dict() resume["name"] = "Total" resume = df.mean().to_dict() resume["name"] = "Promedio" df = df.append(resume, ignore_index=True) df.columns = ["Nombre", u"Tamaño", "RR-Lyrae"] print df.to_latex(index=False)
def main(): result = [] with db.session_scope() as ses: query = ses.query(LightCurves).join(Tile) for lc in query: print lc features = pd.DataFrame(lc.features) features = features[features.Mean > 12] features = features[features.Mean < 16.5] sample = features.sample(1000) result.append(sample) print "Merging" result = pd.concat(result, ignore_index=True) print "Saving to {}".format("sample_all.pkl") result.to_pickle("sample_all.pkl.bz2", compression="bz2")
def handle(self, status): log2critcal() table = Texttable(max_width=0) table.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) table.header(("Tile", "Pawprint", "Matched N.", "Status")) with db.session_scope() as session: query = session.query(PawprintStackXTile) if status: query = query.filter(PawprintStackXTile.status.in_(status)) for pxt in query: table.add_row([ pxt.tile.name, pxt.pawprint_stack.name, pxt.matched_number, pxt.status ]) cnt = query.count() print(table.draw()) print("Count: {}".format(cnt))
def handle(self, ready, status): table = Texttable(max_width=0) table.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) table.header(("Tile", "Status", "Size", "Readed", "Ready")) cnt = 0 with db.session_scope() as session: query = session.query( Tile.name, Tile.status, Tile.data_size, Tile.data_readed, Tile.ready) if ready is not None: ready = self._bool(ready) query = query.filter(Tile.ready == ready) if status: query = query.filter(Tile.status.in_(status)) map(table.add_row, query) cnt = query.count() print(table.draw()) print("Count: {}".format(cnt))
def handle(self, cql_query, print_ast, print_sql): cql_query_object = cql.from_string(cql_query, cql.NAMESPACES) ast = cql_query_object.compile() if print_ast: pprint(ast) print("") return with db.session_scope() as session: result = cql.eval(ast, session) if print_sql: print( "DOWNLOAD '{}':\n\t{}".format(result.fmt(), result.query) if isinstance(result, cql.Writer) else result[0]) print("") return # DOWNLOAD if isinstance(result, cql.Writer): result.dump(sys.stdout) return # NORMAL QUERY sqla_query, column_names = result table = Texttable(max_width=0) table.set_deco( Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) count = sqla_query.count() if sqla_query.count() > self.MAX_ROWS: sqla_query = sqla_query.limit(self.MAX_ROWS) count = self.MAX_ROWS table.header(column_names) for idx, row in enumerate(sqla_query): table.add_row(row) print(table.draw()) print("ROWCOUNT = {}".format(count))
def main(): with db.session_scope() as ses: query = ses.query(LightCurves) rows = [] for lc in query.all(): print lc tile = lc.tile feats = pd.DataFrame(lc.features) if "vs_catalog" in feats.columns: print "skiping" continue srcs = pd.DataFrame(tile.load_npy_file()[["id", "vs_catalog"]]) srcs = srcs[srcs.id.isin(feats.id)] new_feats = pd.merge(srcs, feats, on="id", how="inner") new_feats = to_recarray(new_feats) lc.tile.ready = False lc.features = new_feats lc.tile.ready = True
def main(): with db.session_scope() as ses: query = ses.query(LightCurves).join(Tile).filter(Tile.name.in_(tiles)) for lc in query: print lc feats = lc.features if "AmplitudeJH" in feats.dtype.names: print " skip!" continue columns = [ ("AmplitudeJH", feats["AmplitudeJ"] - feats["AmplitudeH"]), ("AmplitudeJK", feats["AmplitudeJ"] - feats["Amplitude"]) ] feats = add_columns(feats, columns, append=True) lc.tile.ready = False lc.features = feats lc.tile.ready = True ses.commit() print " Done"
def tearDown(self): self.teardown() from corral import util, db with db.session_scope() as session: for model in util.collect_subclasses(db.Model): session.query(model).delete()
def handle(self, tnames, output, cone_search, no_cls_size, no_saturated, no_faint, include_vs, memory_check, vs_type): min_memory, mem = int(32e+9), virtual_memory() if memory_check and mem.total < min_memory: min_memory_gb = min_memory / 1e+9 total_gb = mem.total / 1e+9 msg = "You need at least {}GB of memory. Found {}GB" raise MemoryError(msg.format(min_memory_gb, total_gb)) if no_cls_size == "O2O" and not include_vs: self.parser.error( "You can't set the parameter '--ucls-size/-u' to 'O2O' " "if you set the flag '--no-variable-stars/-nvs'") if vs_type and not include_vs: self.parser.error( "You can't set the parameter '--variable-stars-type/-vst' " "if you set the flag '--no-variable-stars/-nvs'") import pandas as pd result = [] with db.session_scope() as session: query = session.query(LightCurves).join(Tile).filter( Tile.name.in_(tnames)) for lc in query: print "Reading features of tile {}...".format(lc.tile.name) features = pd.DataFrame(lc.features) print "Sources {}".format(len(features)) if no_saturated: print "No saturated <-" features = features[features.Mean > 12] if no_faint: print "No Faint <-" features = features[features.Mean < 16.5] if cone_search: ra, dec, radius = cone_search print "ConeSearch({}, {}, {}) <-".format(ra, dec, radius) features = self.cone_search(features=features, ra_c=ra, dec_c=dec, sr_c=radius) if include_vs: print "Retrieving '{}' VS <-".format(vs_type or all) vss = features[features.vs_type != ""] if vs_type: vss = vss[vss.vs_type.str.contains(vs_type)] if len(vss): result.append(vss) print "Sampling Unk Src <-" unk = features[features.vs_type == ""] if no_cls_size == "ALL": sample_size = len(unk) elif no_cls_size == "O2O": sample_size = len(vss) if sample_size == 0: continue else: sample_size = no_cls_size unk = unk.sample(sample_size) result.append(unk) print "Merging" result = pd.concat(result, ignore_index=True) print("Total Size {}".format(len(result))) print "Saving to {}".format(output) ext = os.path.splitext(output)[-1] if ext == ".csv": result.to_csv(output, index=False) elif ext == ".pkl": result.to_pickle(output) elif ext == ".bz2": result.to_pickle(output, compression="bz2") else: msg = "unknow type {}".format(output) raise ValueError(msg)
def handle(self, tnames, status): log2critcal() with db.session_scope() as session: for tile in session.query(Tile).filter(Tile.name.in_(tnames)): tile.status = status print("[SUCCESS] Tile '{}' -> {}".format(tile.name, status))
def main(): with db.session_scope() as ses: run(ses)
#!/usr/bin/env python # -*- coding: utf-8 -*- from corral import db from carpyncho.models import PawprintXTile tile_names = "b278 b202 b261 b264 b263 b262, b220".split() with db.session_scope() as session: end = False for tile_name in tile_names: query = session.query(PawprintXTile).filter( PawprintXTile.tile.has(name=tile_name) & (PawprintXTile.status == "raw") & (PawprintXTile.status != "pending") ) for pxt in query: pxt.status = "pending" print "Pending: {}".format(pxt) end = True if end: break
def main(): with db.session_scope() as ses: process(ses)