def make_app(cfg, baselayer_handlers, baselayer_settings): """Create and return a `tornado.web.Application` object with specified handlers and settings. Parameters ---------- cfg : Config Loaded configuration. Can be specified with '--config' (multiple uses allowed). baselayer_handlers : list Tornado handlers needed for baselayer to function. baselayer_settings : cfg Settings needed for baselayer to function. """ if cfg['cookie_secret'] == 'abc01234': print('!' * 80) print(' Your server is insecure. Please update the secret string ') print(' in the configuration file!') print('!' * 80) handlers = baselayer_handlers + [ # (r'/some_url(/.*)?', MyTornadoHandler), (r'/example_compute', ExampleComputationHandler) ] settings = baselayer_settings settings.update({}) # Specify any additional Tornado settings here app = tornado.web.Application(handlers, **settings) models.init_db(**cfg['database']) model_util.create_tables() app.cfg = cfg return app
def insert_test_data(): with status("Dropping all tables"): drop_tables() with status("Creating tables"): create_tables() for model in models.Base.metadata.tables: print(' -', model) USERNAME = '******' with status(f"Creating dummy user: {USERNAME}... "): u = models.User(username=USERNAME) models.DBSession().add(u) models.DBSession().commit() for i in range(3): with status("Inserting dummy project"): p = models.Project(name=f'test project {i}', users=[u]) models.DBSession().add(p) models.DBSession().commit() print(f"\n{textwrap.indent(str(p), ' ')}\n") with status("Assert that user has 3 projects"): assert len(u.projects) == 3 with status("Inserting dummy dataset and time series... "): files = [models.DatasetFile(uri=f'/dir/ts{i}.npz') for i in range(3)] d = models.Dataset(name='test dataset', project=p, files=files) models.DBSession().add_all(files + [d]) models.DBSession().commit() with status("Inserting dummy featureset... "): f = models.Featureset(project=p, name='test featureset', file_uri='/dir/fset.npz', features_list=['amplitude']) models.DBSession().add(f) models.DBSession().commit() with status("Inserting dummy model... "): m = models.Model(project=p, featureset=f, name='test model', params={'n_estimators': 10}, type='RFC', file_uri='/tmp/blah.pkl') models.DBSession().add(m) models.DBSession().commit() with status("Inserting dummy prediction... "): pr = models.Prediction(project=p, model=m, file_uri='/tmp/blergh.pkl', dataset=d) models.DBSession().add(pr) models.DBSession().commit()
def make_app(cfg, baselayer_handlers, baselayer_settings): """Create and return a `tornado.web.Application` object with specified handlers and settings. Parameters ---------- cfg : Config Loaded configuration. Can be specified with '--config' (multiple uses allowed). baselayer_handlers : list Tornado handlers needed for baselayer to function. baselayer_settings : cfg Settings needed for baselayer to function. """ if baselayer_settings['cookie_secret'] == 'abc01234': print('!' * 80) print(' Your server is insecure. Please update the secret string ') print(' in the configuration file!') print('!' * 80) for path_name, path in cfg['paths'].items(): if not os.path.exists(path): print("Creating %s" % path) try: os.makedirs(path) except Exception as e: print(e) handlers = baselayer_handlers + [ (r'/project(/.*)?', ProjectHandler), (r'/dataset(/.*)?', DatasetHandler), (r'/features(/[0-9]+)?', FeatureHandler), (r'/features/([0-9]+)/(download)', FeatureHandler), (r'/precomputed_features(/.*)?', PrecomputedFeaturesHandler), (r'/models(/[0-9]+)?', ModelHandler), (r'/models/([0-9]+)/(download)', ModelHandler), (r'/predictions(/[0-9]+)?', PredictionHandler), (r'/predictions/([0-9]+)/(download)', PredictionHandler), (r'/predict_raw_data', PredictRawDataHandler), (r'/features_list', FeatureListHandler), (r'/sklearn_models', SklearnModelsHandler), (r'/plot_features/(.*)', PlotFeaturesHandler) ] settings = baselayer_settings # settings.update({}) # Specify additional settings here app = tornado.web.Application(handlers, **settings) models.init_db(**cfg['database']) model_util.create_tables() return app
def make_app(cfg, baselayer_handlers, baselayer_settings, process=None, env=None): """Create and return a `tornado.web.Application` object with specified handlers and settings. Parameters ---------- cfg : Config Loaded configuration. Can be specified with '--config' (multiple uses allowed). baselayer_handlers : list Tornado handlers needed for baselayer to function. baselayer_settings : cfg Settings needed for baselayer to function. process : int When launching multiple app servers, which number is this? env : dict Environment in which the app was launched. Currently only has one key, 'debug'---true if launched with `--debug`. """ if cfg['cookie_secret'] == 'abc01234': print('!' * 80) print(' Your server is insecure. Please update the secret string ') print(' in the configuration file!') print('!' * 80) handlers = baselayer_handlers + [ # (r'/some_url(/.*)?', MyTornadoHandler), (r'/example_compute', ExampleComputationHandler), (r'/push_notification', PushNotificationHandler) ] settings = baselayer_settings settings.update({}) # Specify any additional Tornado settings here app = tornado.web.Application(handlers, **settings) models.init_db(**cfg['database']) if process == 0: model_util.create_tables(add=env.debug) app.cfg = cfg return app
if cfg['data_load'][data_source].get("file") is not None: fname = str(topdir / cfg['data_load'][data_source]["file"]) src = yaml.load(open(fname, "r"), Loader=Loader) else: src = cfg['data_load'][data_source] with status(f"Connecting to database {cfg['database']['database']}"): init_db(**cfg["database"]) if src.get("drop_tables", False): with status("Dropping all tables"): drop_tables() if src.get("create_tables", False): with status("Creating tables"): create_tables() if src.get("print_tables", False): for model in Base.metadata.tables: print(" -", model) if src.get("create_permissions", False): with status(f"Creating permissions"): setup_permissions() if src.get("users") is not None: with status(f"Creating users & sitewide public group"): DBSession().add(Group(name=cfg["misc"]["public_group_name"])) DBSession().commit() users = []
def make_app(cfg, baselayer_handlers, baselayer_settings, process=None, env=None): """Create and return a `tornado.web.Application` object with specified handlers and settings. Parameters ---------- cfg : Config Loaded configuration. Can be specified with '--config' (multiple uses allowed). baselayer_handlers : list Tornado handlers needed for baselayer to function. baselayer_settings : cfg Settings needed for baselayer to function. process : int When launching multiple app servers, which number is this? env : dict Environment in which the app was launched. Currently only has one key, 'debug'---true if launched with `--debug`. """ if cfg['app.secret_key'] == 'abc01234': print('!' * 80) print(' Your server is insecure. Please update the secret string ') print(' in the configuration file!') print('!' * 80) handlers = baselayer_handlers + skyportal_handlers settings = baselayer_settings settings.update( { 'SOCIAL_AUTH_PIPELINE': ( # Get the information we can about the user and return it in a simple # format to create the user instance later. In some cases the details are # already part of the auth response from the provider, but sometimes this # could hit a provider API. 'social_core.pipeline.social_auth.social_details', # Get the social uid from whichever service we're authing thru. The uid is # the unique identifier of the given user in the provider. 'social_core.pipeline.social_auth.social_uid', # Verify that the current auth process is valid within the current # project, this is where emails and domains whitelists are applied (if # defined). 'social_core.pipeline.social_auth.auth_allowed', # Checks if the current social-account is already associated in the site. 'social_core.pipeline.social_auth.social_user', 'skyportal.onboarding.get_username', 'skyportal.onboarding.create_user', # Create a user account if we haven't found one yet. # 'social_core.pipeline.user.create_user', # Create the record that associates the social account with the user. 'social_core.pipeline.social_auth.associate_user', # Populate the extra_data field in the social record with the values # specified by settings (and the default ones like access_token, etc). 'social_core.pipeline.social_auth.load_extra_data', # Update the user record with info from the auth service only if blank 'skyportal.onboarding.user_details', 'skyportal.onboarding.setup_invited_user_permissions', ), 'SOCIAL_AUTH_NEW_USER_REDIRECT_URL': '/profile?newUser=true', 'SOCIAL_AUTH_FIELDS_STORED_IN_SESSION': ['invite_token'], } ) app = CustomApplication(handlers, **settings) init_db( **cfg['database'], autoflush=False, engine_args={'pool_size': 10, 'max_overflow': 15, 'pool_recycle': 3600}, ) # If tables are found in the database, new tables will only be added # in debug mode. In production, we leave the tables alone, since # migrations might be used. create_tables(add=env.debug) model_util.refresh_enums() model_util.setup_permissions() app.cfg = cfg admin_token = model_util.provision_token() with open('.tokens.yaml', 'w') as f: f.write(f'INITIAL_ADMIN: {admin_token.id}\n') with open('.tokens.yaml') as f: print('-' * 78) print('Tokens in .tokens.yaml:') print('\n'.join(f.readlines()), end='') print('-' * 78) model_util.provision_public_group() app.openapi_spec = openapi.spec_from_handlers(handlers) return app
def setup_survey_db(): # if os.getcwd().endswith('survey_app'): # os.chdir('./cesium_web') env, cfg = load_env() for data_dir in cfg['paths'].values(): if not os.path.exists(data_dir): os.makedirs(data_dir) db_session = init_db(**baselayer.app.config.load_config()['database']) # Drop & create tables with status('Dropping and re-creating tables'): drop_tables() create_tables() # Add testuser with status('Adding testuser'): u = models.User(username='******') models.DBSession().add(u) models.DBSession().commit() # Add project with status('Adding project'): p = models.Project(name='Survey Classifier', users=[u]) models.DBSession().add(p) models.DBSession().commit() # Add datasets with status('Adding datasets'): for dataset_name, ts_data_dir in [ ['Survey Light Curve Data', 'survey_classifier_data/data/lightcurves'], ['ASAS', 'survey_classifier_data/data/ASAS_lcs'], ['Noisified to CoRoT', 'survey_classifier_data/data/noisified_CoRoT_lcs'], ['Noisified to HATNet', 'survey_classifier_data/data/noisified_HATNet_lcs'], ['Noisified to Hipparcos', 'survey_classifier_data/data/noisified_Hipparcos_lcs'], ['Noisified to KELT', 'survey_classifier_data/data/noisified_KELT_lcs'], ['Noisified to Kepler', 'survey_classifier_data/data/noisified_Kepler_lcs'], ['Noisified to LINEAR', 'survey_classifier_data/data/noisified_LINEAR_lcs'], ['Noisified to OGLE-III', 'survey_classifier_data/data/noisified_OGLE-III_lcs'], ['Noisified to SuperWASP', 'survey_classifier_data/data/noisified_SuperWASP_lcs'], ['Noisified to TrES', 'survey_classifier_data/data/noisified_TrES_lcs']]: ts_paths = [] # As these are only ever accessed to determine meta features, only # copy first ten (arbitrary) TS for src in glob.glob(os.path.join(os.path.abspath(ts_data_dir), '*.npz'))[:10]: # Add the path to the copied file in cesium data directory ts_paths.append(os.path.abspath(shutil.copy( os.path.abspath(src), cfg['paths']['ts_data_folder']))) try: meta_features = list(load_ts(ts_paths[0]) .meta_features.keys()) except IndexError: # No TS data on disk meta_features = None files = [models.DatasetFile(uri=ts_path) for ts_path in ts_paths] dataset = models.Dataset(name=dataset_name, project=p, files=files, meta_features=meta_features) models.DBSession().add_all(files + [dataset]) models.DBSession().commit() print(f'Added dataset {dataset.id}') # Add featuresets fset_dict = {} for fset_name, orig_fset_path, features_list in [ ['Survey LC Cadence/Error Features', './survey_classifier_data/data/survey_lc_features.npz', CADENCE_FEATS], ['ASAS', './survey_classifier_data/data/ASAS_features.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['CoRoT', './survey_classifier_data/data/noisified_CoRoT_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['HATNet', './survey_classifier_data/data/noisified_HATNet_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['Hipparcos', './survey_classifier_data/data/noisified_Hipparcos_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['KELT', './survey_classifier_data/data/noisified_KELT_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['Kepler', './survey_classifier_data/data/noisified_Kepler_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['LINEAR', './survey_classifier_data/data/noisified_LINEAR_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['OGLE-III', './survey_classifier_data/data/noisified_OGLE-III_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['SuperWASP', './survey_classifier_data/data/noisified_SuperWASP_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS], ['TrES', './survey_classifier_data/data/noisified_TrES_features_100.npz', GENERAL_FEATS + LOMB_SCARGLE_FEATS]]: fset_path = os.path.abspath( shutil.copy(os.path.abspath(orig_fset_path), cfg['paths']['features_folder'])) fset = models.Featureset(name=fset_name, file_uri=fset_path, project=p, features_list=features_list, task_id=None, finished=datetime.datetime.now()) models.DBSession().add(fset) models.DBSession().commit() # fset.task_id = None # fset.finished = datetime.datetime.now() # fset.save() fset_dict[fset_name] = fset print(f'Added featureset {fset.id}') # Add models # TODO: Add actual model params for model_name, orig_model_path, model_type, params, fset_name in [ ['Survey LCs RFC', os.path.abspath('./survey_classifier_data/data/survey_classifier.pkl'), 'RandomForestClassifier', {}, 'Survey LC Cadence/Error Features'], ['ASAS', os.path.abspath('./survey_classifier_data/data/ASAS_model_compressed.pkl'), 'RandomForestClassifier', {}, 'ASAS'], ['CoRoT', os.path.abspath('./survey_classifier_data/data/noisified_CoRoT_model_compressed.pkl'), 'RandomForestClassifier', {}, 'CoRoT'], ['HATNet', os.path.abspath('./survey_classifier_data/data/noisified_HATNet_model_compressed.pkl'), 'RandomForestClassifier', {}, 'HATNet'], ['Hipparcos', os.path.abspath('./survey_classifier_data/data/noisified_Hipparcos_model_compressed.pkl'), 'RandomForestClassifier', {}, 'Hipparcos'], ['KELT', os.path.abspath('./survey_classifier_data/data/noisified_KELT_model_compressed.pkl'), 'RandomForestClassifier', {}, 'KELT'], ['Kepler', os.path.abspath('./survey_classifier_data/data/noisified_Kepler_model_compressed.pkl'), 'RandomForestClassifier', {}, 'Kepler'], ['LINEAR', os.path.abspath('./survey_classifier_data/data/noisified_LINEAR_model_compressed.pkl'), 'RandomForestClassifier', {}, 'LINEAR'], ['OGLE-III', os.path.abspath('./survey_classifier_data/data/noisified_OGLE-III_model_compressed.pkl'), 'RandomForestClassifier', {}, 'OGLE-III'], ['SuperWASP', os.path.abspath('./survey_classifier_data/data/noisified_SuperWASP_model_compressed.pkl'), 'RandomForestClassifier', {}, 'SuperWASP'], ['TrES', os.path.abspath('./survey_classifier_data/data/noisified_TrES_model_compressed.pkl'), 'RandomForestClassifier', {}, 'TrES']]: model_path = os.path.abspath( shutil.copy(orig_model_path, cfg['paths']['models_folder'])) model = models.Model(name=model_name, file_uri=model_path, featureset_id=fset_dict[fset_name].id, project=p, project_id=p.id, params=params, type=model_type, task_id=None, finished=datetime.datetime.now()) models.DBSession().add(model) models.DBSession().commit() # model.task_id = None # model.finished = datetime.datetime.now() # model.save() print(f'Added model {model.id}') print(cfg)
def make_app(cfg, baselayer_handlers, baselayer_settings): """Create and return a `tornado.web.Application` object with specified handlers and settings. Parameters ---------- cfg : Config Loaded configuration. Can be specified with '--config' (multiple uses allowed). baselayer_handlers : list Tornado handlers needed for baselayer to function. baselayer_settings : cfg Settings needed for baselayer to function. """ if cfg['cookie_secret'] == 'abc01234': print('!' * 80) print(' Your server is insecure. Please update the secret string ') print(' in the configuration file!') print('!' * 80) handlers = baselayer_handlers + [ # API endpoints (r'/api/allocation(/.*)?', AllocationHandler), (r'/api/assignment(/.*)?', AssignmentHandler), (r'/api/candidates(/.*)?', CandidateHandler), (r'/api/classification(/[0-9]+)?', ClassificationHandler), (r'/api/comment(/[0-9]+)?', CommentHandler), (r'/api/comment(/[0-9]+)/attachment', CommentAttachmentHandler), (r'/api/facility', FacilityMessageHandler), (r'/api/filters(/.*)?', FilterHandler), (r'/api/followup_request(/.*)?', FollowupRequestHandler), (r'/api/groups/public', PublicGroupHandler), (r'/api/groups(/[0-9]+)/streams(/[0-9]+)?', GroupStreamHandler), (r'/api/groups(/[0-9]+)/users(/.*)?', GroupUserHandler), (r'/api/groups(/[0-9]+)?', GroupHandler), (r'/api/instrument(/[0-9]+)?', InstrumentHandler), (r'/api/invitations(/.*)?', InvitationHandler), (r'/api/newsfeed', NewsFeedHandler), (r'/api/observing_run(/[0-9]+)?', ObservingRunHandler), (r'/api/photometry(/[0-9]+)?', PhotometryHandler), (r'/api/sharing', SharingHandler), (r'/api/photometry/bulk_delete/(.*)', BulkDeletePhotometryHandler), (r'/api/sources(/[0-9A-Za-z-_]+)/photometry', ObjPhotometryHandler), (r'/api/sources(/[0-9A-Za-z-_]+)/spectra', ObjSpectraHandler), (r'/api/sources(/[0-9A-Za-z-_]+)/offsets', SourceOffsetsHandler), (r'/api/sources(/[0-9A-Za-z-_]+)/finder', SourceFinderHandler), (r'/api/sources(/.*)?', SourceHandler), (r'/api/spectrum(/[0-9]+)?', SpectrumHandler), (r'/api/streams(/[0-9]+)/users(/.*)?', StreamUserHandler), (r'/api/streams(/[0-9]+)?', StreamHandler), (r'/api/sysinfo', SysInfoHandler), (r'/api/taxonomy(/.*)?', TaxonomyHandler), (r'/api/telescope(/[0-9]+)?', TelescopeHandler), (r'/api/thumbnail(/[0-9]+)?', ThumbnailHandler), (r'/api/user(/.*)?', UserHandler), (r'/api/internal/tokens(/.*)?', TokenHandler), (r'/api/internal/profile', ProfileHandler), (r'/api/internal/dbinfo', DBInfoHandler), (r'/api/internal/source_views(/.*)?', SourceViewsHandler), (r'/api/internal/source_counts(/.*)?', SourceCountHandler), (r'/api/internal/plot/photometry/(.*)', PlotPhotometryHandler), (r'/api/internal/plot/spectroscopy/(.*)', PlotSpectroscopyHandler), (r'/api/internal/instrument_forms', RoboticInstrumentsHandler), (r'/api/internal/plot/airmass/(.*)', PlotAirmassHandler), (r'/api/internal/log', LogHandler), (r'/api/internal/recent_sources(/.*)?', RecentSourcesHandler), (r'/api/.*', InvalidEndpointHandler), (r'/become_user(/.*)?', BecomeUserHandler), (r'/logout', LogoutHandler), # User-facing pages (r'/.*', MainPageHandler), # Route all frontend pages, such as # `/source/g647ba`, through the main page. # # Refer to Main.jsx for routing info. ] settings = baselayer_settings settings.update({ 'SOCIAL_AUTH_PIPELINE': ( # Get the information we can about the user and return it in a simple # format to create the user instance later. In some cases the details are # already part of the auth response from the provider, but sometimes this # could hit a provider API. 'social_core.pipeline.social_auth.social_details', # Get the social uid from whichever service we're authing thru. The uid is # the unique identifier of the given user in the provider. 'social_core.pipeline.social_auth.social_uid', # Verify that the current auth process is valid within the current # project, this is where emails and domains whitelists are applied (if # defined). 'social_core.pipeline.social_auth.auth_allowed', # Checks if the current social-account is already associated in the site. 'social_core.pipeline.social_auth.social_user', 'skyportal.onboarding.get_username', 'skyportal.onboarding.create_user', # Create a user account if we haven't found one yet. # 'social_core.pipeline.user.create_user', # Create the record that associates the social account with the user. 'social_core.pipeline.social_auth.associate_user', # Populate the extra_data field in the social record with the values # specified by settings (and the default ones like access_token, etc). 'social_core.pipeline.social_auth.load_extra_data', # Update the user record with info from the auth service only if blank 'skyportal.onboarding.user_details', 'skyportal.onboarding.setup_invited_user_permissions', ), 'SOCIAL_AUTH_NEW_USER_REDIRECT_URL': '/profile?newUser=true', 'SOCIAL_AUTH_FIELDS_STORED_IN_SESSION': ['invite_token'], }) app = tornado.web.Application(handlers, **settings) models.init_db(**cfg['database']) baselayer_model_util.create_tables() model_util.setup_permissions() app.cfg = cfg admin_token = model_util.provision_token() with open('.tokens.yaml', 'w') as f: f.write(f'INITIAL_ADMIN: {admin_token.id}\n') with open('.tokens.yaml', 'r') as f: print('-' * 78) print('Tokens in .tokens.yaml:') print('\n'.join(f.readlines()), end='') print('-' * 78) model_util.provision_public_group() app.openapi_spec = openapi.spec_from_handlers(handlers) return app
from baselayer.app import model_util as baselayer_model_util pBase = automap_base() pengine = create_engine("postgresql://skyportal:@localhost:5432/ptf") pBase.prepare(pengine, reflect=True) pBase.metadata.bind = pengine pBase.metadata.create_all() pSource = pBase.classes.sources pPhotometry = pBase.classes.phot pTelescope = pBase.classes.telescopes pInstrument = pBase.classes.instruments psession = Session(pengine) init_db(**load_config()['database']) baselayer_model_util.create_tables() def import_table( ptf_table, skyportal_table, columns=None, column_map={}, condition=None, dedupe=[], sql_statement=None, ): df = pd.read_sql( sql_statement if sql_statement is not None else ptf_table, pengine, columns=columns,