def setup_permissions(): """Create default ACLs/Roles needed by application. If a given ACL or Role already exists, it will be skipped.""" all_acls = [ACL.create_or_get(a) for a in all_acl_ids] DBSession().add_all(all_acls) DBSession().commit() for r, acl_ids in role_acls.items(): role = Role.create_or_get(r) role.acls = [ACL.query.get(a) for a in acl_ids] DBSession().add(role) DBSession().commit()
def teardown(spectrum): if is_already_deleted(spectrum, Spectrum): return instrument = spectrum.instrument reducers = spectrum.reducers observers = spectrum.observers for reducer in reducers: UserFactory.teardown(reducer.id) for observer in observers: UserFactory.teardown(observer.id) DBSession().delete(spectrum) DBSession().commit() InstrumentFactory.teardown(instrument)
def add_super_user(username): """Initializes a super user with full permissions.""" setup_permissions() # make sure permissions already exist super_user = User.query.filter(User.username == username).first() if super_user is None: super_user = User(username=username) social = TornadoStorage.user.create_social_auth( super_user, super_user.username, 'google-oauth2') admin_role = Role.query.get('Super admin') if admin_role not in super_user.roles: super_user.roles.append(admin_role) DBSession().add(super_user) DBSession().commit()
def teardown(request): if is_already_deleted(request, FollowupRequest): return requester = request.requester.id allocation = request.allocation obj = request.obj DBSession().delete(request) DBSession().commit() UserFactory.teardown(request.last_modified_by.id) UserFactory.teardown(requester) AllocationFactory.teardown(allocation) ObjFactory.teardown(obj)
def test_javascript_sexagesimal_conversion(public_source, driver, user): public_source.ra = 342.0708127 public_source.dec = 56.1130711 DBSession().commit() driver.get(f"/become_user/{user.id}") driver.get(f"/source/{public_source.id}") driver.wait_for_xpath('//*[contains(., "22:48:17.00")]') driver.wait_for_xpath('//*[contains(., "+56:06:47.06")]') public_source.ra = 75.6377796 public_source.dec = 15.606709 DBSession().commit() driver.refresh() driver.wait_for_xpath('//*[contains(., "05:02:33.07")]') driver.wait_for_xpath('//*[contains(., "+15:36:24.15")]')
def teardown(classification): if is_already_deleted(classification, Classification): return author = classification.author.id obj = classification.obj taxonomy = classification.taxonomy.id DBSession().delete(classification) DBSession().commit() UserFactory.teardown(author) ObjFactory.teardown(obj) TaxonomyFactory.teardown(taxonomy)
def group_admin_user(public_group, public_stream): user = UserFactory( groups=[public_group], roles=[models.Role.query.get("Group admin")], streams=[public_stream], ) user_id = user.id group_user = (DBSession().query(GroupUser).filter( GroupUser.group_id == public_group.id, GroupUser.user_id == user.id).first()) group_user.admin = True DBSession().commit() yield user UserFactory.teardown(user_id)
def test_observing_run_skycam_component(driver, super_admin_user, public_source, red_transients_run, super_admin_token): driver.get(f"/become_user/{super_admin_user.id}") status, data = post_assignment( public_source, red_transients_run, priority="3", comment="Observe please", token=super_admin_token, ) assert status == 200 assert data["status"] == "success" driver.get(f"/run/{red_transients_run.id}") # 20 second timeout to give the backend time to perform ephemeris calcs driver.wait_for_xpath('//*[text()="Current Conditions"]', timeout=20) driver.wait_for_xpath( f'//img[contains(@src, "{red_transients_run.instrument.telescope.skycam_link}")]' ) red_transients_run.instrument.telescope.skycam_link = ( 'http://this.is.a.bad.link.web.biz') DBSession().add(red_transients_run.instrument.telescope) DBSession().commit() driver.get(f"/run/{red_transients_run.id}") driver.wait_for_xpath( f'//b[contains(text(), "{red_transients_run.instrument.name}")]') driver.wait_for_xpath('//*[text()="Current Conditions"]') fallback_url = "static/images/static.jpg" driver.wait_for_xpath(f'//img[contains(@src, "{fallback_url}")]') red_transients_run.instrument.telescope.skycam_link = None DBSession().add(red_transients_run.instrument.telescope) DBSession().commit() driver.get(f"/run/{red_transients_run.id}") # 20 second timeout to give the backend time to perform ephemeris calcs driver.wait_for_xpath( f'//b[contains(text(), "{red_transients_run.instrument.name}")]', timeout=20) driver.wait_for_xpath_to_disappear('//*[text()="Current Conditions"]') driver.wait_for_xpath_to_disappear( f'//img[contains(@src, "{red_transients_run.instrument.telescope.skycam_link}")]' )
def groups(obj, create, extracted, **kwargs): if not create: return if extracted: for group in extracted: obj.groups.append(group) DBSession().add(obj) DBSession().commit() # always add the sitewide group sitewide_group = (DBSession().query(Group).filter( Group.name == cfg['misc']['public_group_name']).first()) obj.groups.append(sitewide_group) DBSession().commit()
def teardown(obj): if is_already_deleted(obj, Obj): return instruments = obj.instruments comment_authors = list(map(lambda x: x.author.id, obj.comments)) for author in comment_authors: UserFactory.teardown(author) spectra = DBSession().query(Spectrum).filter(Spectrum.obj_id == obj.id).all() for spectrum in spectra: SpectrumFactory.teardown(spectrum) DBSession().delete(obj) DBSession().commit() for instrument in instruments: InstrumentFactory.teardown(instrument)
def teardown(assignment): if is_already_deleted(assignment, ClassicalAssignment): return requester = assignment.requester.id run = assignment.run obj = assignment.obj last_modified_by = assignment.last_modified_by.id DBSession().delete(assignment) DBSession().commit() ObservingRunFactory.teardown(run) ObjFactory.teardown(obj) UserFactory.teardown(last_modified_by) UserFactory.teardown(requester)
def test_top_source_prefs(driver, user, public_group, upload_data_token): # Add an old source and give it an old view obj_id = str(uuid.uuid4()) status, data = api( 'POST', 'sources', data={ 'id': obj_id, 'ra': 50.4, 'dec': 22.33, 'redshift': 2.1, "altdata": { "simbad": { "class": "RRLyr" } }, 'transient': False, 'ra_dis': 2.3, 'group_ids': [public_group.id], }, token=upload_data_token, ) assert status == 200 assert data['data']['id'] == obj_id twenty_days_ago = datetime.datetime.now() - datetime.timedelta(days=20) sv = SourceView( obj_id=obj_id, username_or_token_id=upload_data_token, is_token=True, created_at=twenty_days_ago, ) DBSession().add(sv) DBSession().commit() driver.get(f'/become_user/{user.id}') driver.get('/') # Wait for just top source widget to show up last_30_days_button = "//button[contains(@data-testid, 'topSources_30days')]" driver.wait_for_xpath(last_30_days_button) # Test that source doesn't show up in last 7 days of views source_view_xpath = f"//div[@data-testid='topSourceItem_{obj_id}']" driver.wait_for_xpath_to_disappear(source_view_xpath) # Test that source view appears after changing prefs driver.click_xpath(last_30_days_button) driver.wait_for_xpath(source_view_xpath)
def create_token(group_id, permissions=[], created_by_id=None, description=None): group = Group.query.get(group_id) t = Token(acl_ids=permissions, created_by_id=created_by_id, description=description) t.groups.append(group) if created_by_id: u = User.query.get(created_by_id) u.tokens.append(t) DBSession().add(u) DBSession().add(t) DBSession().commit() return t.id
def public_source_groupphotometry(public_source_photometry_point): return (DBSession().execute( sa.select(GroupPhotometry).filter( GroupPhotometry.group_id == public_source_photometry_point.groups[0].id, GroupPhotometry.photometr_id == public_source_photometry_point.id, )).scalars().first())
def public_source_followup_request(public_group_sedm_allocation, public_source, user): fr = FollowupRequest( obj=public_source, allocation=public_group_sedm_allocation, payload={ 'priority': "5", 'start_date': '3020-09-01', 'end_date': '3022-09-01', 'observation_type': 'IFU', }, requester_id=user.id, ) DBSession().add(fr) DBSession().commit() return fr
def public_streamuser(public_stream, user): return ( DBSession() .query(StreamUser) .filter(StreamUser.user_id == user.id, StreamUser.stream_id == public_stream.id) .first() )
def is_already_deleted(instance, table): """ Helper function to check if a given ORM instance has already been deleted previously, either by earlier teardown functions or by a test itself through the API. """ # If the instance is marked detached, that means it was deleted earlier in the # current transaction. if instance in DBSession() and inspect(instance).detached: return True if instance not in DBSession(): return DBSession().query(table).filter(table.id == instance.id).first() is None # If the instance is in the session and has not been detached (deleted + committed) # then it still requires some teardown actions. return False
def public_source_followup_request_target_group( public_source_followup_request): return (DBSession().query(FollowupRequestTargetGroup).filter( FollowupRequestTargetGroup.followuprequest_id == public_source_followup_request.id, FollowupRequestTargetGroup.group_id == public_source_followup_request.target_groups[0].id, ).first())
def public_thumbnail(public_source): return ( DBSession() .query(Thumbnail) .filter(Thumbnail.obj_id == public_source.id) .order_by(Thumbnail.id.desc()) .first() )
def add_user(username, roles=[], auth=False): user = User.query.filter(User.username == username).first() if user is None: user = User(username=username) if auth: social = TornadoStorage.user.create_social_auth( user, user.username, 'google-oauth2') for rolename in roles: role = Role.query.get(rolename) if role not in user.roles: user.roles.append(role) DBSession().add(user) DBSession().commit() return User.query.filter(User.username == username).first()
def source_notification_user(public_group): uid = str(uuid.uuid4()) username = f"{uid}@cesium.ml.org" user = User( username=username, contact_email=username, contact_phone="+12345678910", groups=[public_group], roles=[models.Role.query.get("Full user")], preferences={ "allowEmailNotifications": True, "allowSMSNotifications": True }, ) DBSession().add(user) DBSession().commit() return user
def groups(obj, create, passed_groups, *args, **kwargs): if not passed_groups: passed_groups = [] instruments = [InstrumentFactory(), InstrumentFactory()] filters = ['ztfg', 'ztfr', 'ztfi'] for instrument, filter in islice( zip(cycle(instruments), cycle(filters)), 10): np.random.seed() phot1 = PhotometryFactory( obj_id=obj.id, instrument=instrument, filter=filter, groups=passed_groups, origin=uuid.uuid4(), ) DBSession().add(phot1) DBSession().add( PhotometryFactory( obj_id=obj.id, flux=99.0, fluxerr=99.0, instrument=instrument, filter=filter, groups=passed_groups, origin=uuid.uuid4(), )) DBSession().add(ThumbnailFactory(obj_id=obj.id, type="new")) DBSession().add(ThumbnailFactory(obj_id=obj.id, type="ps1")) DBSession().add(CommentFactory(obj_id=obj.id, groups=passed_groups)) DBSession().add( SpectrumFactory(obj_id=obj.id, instrument=instruments[0])) DBSession().commit()
def public_source_followup_request_target_group( public_source_followup_request): return (DBSession().execute( sa.select(FollowupRequestTargetGroup).filter( FollowupRequestTargetGroup.followuprequest_id == public_source_followup_request.id, FollowupRequestTargetGroup.group_id == public_source_followup_request.target_groups[0].id, )).scalars().first())
def public_source_groupspectrum(public_source_spectrum): return ( DBSession() .query(GroupSpectrum) .filter( GroupSpectrum.group_id == public_source_spectrum.groups[0].id, GroupSpectrum.spectr_id == public_source_spectrum.id, ) .first() )
def public_groupstream(public_group): return ( DBSession() .query(GroupStream) .filter( GroupStream.group_id == public_group.id, GroupStream.stream_id == public_group.streams[0].id, ) .first() )
def shift_user(public_group, public_stream): user = UserFactory( groups=[public_group], roles=[ DBSession().execute( sa.select(models.Role).filter( models.Role.id == "View only")).scalars().first() ], streams=[public_stream], acls=[ DBSession().execute( sa.select(models.ACL).filter( models.ACL.id == "Manage shifts")).scalars().first() ], ) user_id = user.id yield user UserFactory.teardown(user_id)
def group_admin_user(public_group, public_stream): user = UserFactory( groups=[public_group], roles=[ DBSession().execute( sa.select(models.Role).filter( models.Role.id == "Group admin")).scalars().first() ], streams=[public_stream], ) user_id = user.id group_user = (DBSession().execute( sa.select(GroupUser).filter( GroupUser.group_id == public_group.id, GroupUser.user_id == user.id)).scalars().first()) group_user.admin = True DBSession().commit() yield user UserFactory.teardown(user_id)
def add_phot_spec(source, create, value, *args, **kwargs): instruments = [InstrumentFactory(), InstrumentFactory()] filters = ['g', 'rpr', 'ipr'] for instrument, filter in islice( zip(cycle(instruments), cycle(filters)), 10): DBSession().add( PhotometryFactory(source_id=source.id, instrument=instrument, filter=filter)) DBSession().add( PhotometryFactory(source_id=source.id, mag=99., e_mag=99., lim_mag=30., instrument=instrument, filter=filter)) DBSession().add( SpectrumFactory(source_id=source.id, instrument=instruments[0])) DBSession().commit()
def public_source_groupphotometry(public_source_photometry_point): return ( DBSession() .query(GroupPhotometry) .filter( GroupPhotometry.group_id == public_source_photometry_point.groups[0].id, GroupPhotometry.photometr_id == public_source_photometry_point.id, ) .first() )
def import_table(ptf_table, skyportal_table, columns=None, column_map={}, condition=None, dedupe=[], sql_statement=None): df = pd.read_sql(sql_statement if sql_statement is not None else ptf_table, pengine, columns=columns) df = df[columns] df.rename(columns=column_map, inplace=True) if condition: df = df[df.apply(condition, axis=1)] if 'created_at' not in df: df['created_at'] = datetime.now() for col in dedupe: df.drop_duplicates(subset=[col], inplace=True) df.to_sql(skyportal_table, DBSession().bind, index=False, if_exists='append') try: max_id = DBSession().execute(f"SELECT MAX(id) FROM {skyportal_table};").first()[0] DBSession().execute(f"ALTER SEQUENCE {skyportal_table}_id_seq RESTART WITH {max_id + 1};") except Exception as e: print("Ignored exception:", e)