def LoadAction(cls, action_name): Logger.Info('%s - ActionController.LoadAction - started' % __name__) Logger.Info('%s - ActionController.LoadAction - started with action_name:%s' % (__name__, action_name)) action = my_import('actions.lib.%s.action' % action_name) action = getattr(action, 'Action')() Logger.Info('%s - ActionController.LoadAction - finished' % __name__) return action
def LoadVisualization(cls, viz_name): Logger.Info('%s - VisualizationController.LoadVisualization - started' % __name__) Logger.Info('%s - VisualizationController.LoadVisualization - started with action_name:%s' % (__name__, viz_name)) viz = my_import('metalayercore.visualizations.lib.%s.visualization' % viz_name) viz = getattr(viz, 'Visualization')() Logger.Info('%s - VisualizationController.LoadVisualization - finished' % __name__) return viz
def get_experiment_obj(experiment): if isinstance(experiment, str): experiment_obj = my_import(experiment).experiment else: experiment_obj = experiment return experiment_obj
def LoadDataPoint(cls, data_point_name): Logger.Info('%s - DataPointController.LoadDataPoint - started' % __name__) Logger.Debug('%s - DataPointController.LoadDataPoint - started with data_point_name:%s' % (__name__, data_point_name)) data_point = my_import('datapoints.lib.%s.datapoint' % data_point_name) data_point = getattr(data_point, 'DataPoint')() Logger.Info('%s - DataPointController.LoadDataPoint - finished' % __name__) return data_point
def LoadOutput(cls, output_name): Logger.Info('%s - OutputController.LoadOutput - started' % __name__) Logger.Info('%s - OutputController.LoadOutput - started with output_name:%s' % (__name__, output_name)) output = my_import('outputs.lib.%s.output' % output_name) output = getattr(output, 'Output')() Logger.Info('%s - OutputController.LoadOutput - finished' % __name__) return output
def ExtractAPIKeyHelp(cls, action_name): Logger.Info('%s - ActionController.ExtractAPIKeyHelp - started' % __name__) Logger.Info('%s - ActionController.ExtractAPIKeyHelp - started with action_name:%s' % (__name__, action_name)) action = my_import('actions.lib.%s.action' % action_name) action = getattr(action, 'Action')() api_element = [e for e in action.get_unconfigured_config()['elements'] if e['name'] == 'api_key'][0] help_text = api_element['help'] Logger.Info('%s - ActionController.ExtractAPIKeyHelp - finished' % __name__) return help_text
def ExtractAPIKeyHelp(cls, data_point_name): Logger.Info('%s - DataPointController.ExtractAPIKeyHelp - started' % __name__) Logger.Debug('%s - DataPointController.ExtractAPIKeyHelp - started with data_point_name:%s' % (__name__, data_point_name)) data_point = my_import('datapoints.lib.%s.datapoint' % data_point_name) data_point = getattr(data_point, 'DataPoint')() api_element = [e for e in data_point.get_unconfigured_config()['elements'] if e['name'] == 'api_key'][0] help_text = api_element['help'] Logger.Info('%s - DataPointController.ExtractAPIKeyHelp - finished' % __name__) return help_text
def login_or_register(request): if not request.method == 'POST': template_data = { 'code':request.GET['code'] if 'code' in request.GET else '', 'email':request.GET['email'] if 'email' in request.GET else '', } return render_to_response( 'thecommunity/login_or_register/login_or_register.html', template_data, context_instance=RequestContext(request) ) else: if 'login' in request.POST: username = request.POST.get('login_username') password = request.POST.get('login_password') passed, errors = UserController.LoginUser(request, username, password) if not passed: return render_to_response( 'thecommunity/login_or_register/login_or_register.html', { 'login_errors':errors }, context_instance=RequestContext(request) ) user = UserController.GetUserByUserName(username) login_policy = getattr(my_import(settings.LOGIN_POLICY['module']), 'LoginPolicy')() return login_policy.process_login(user, request) elif 'register' in request.POST: username = request.POST.get('register_username') password1 = request.POST.get('register_password1') password2 = request.POST.get('register_password2') registration_code = request.POST.get('register_code') passed, errors = UserController.RegisterUser(request, username, password1, password2, registration_code) if not passed: return render_to_response( 'thecommunity/login_or_register/login_or_register.html', { 'register_errors':errors }, context_instance=RequestContext(request) ) user = UserController.GetUserByUserName(username) for registration_policy in [getattr(my_import(rp['module']), 'RegistrationPolicy')() for rp in settings.REGISTRATION_POLICIES.values() if rp['active']]: registration_policy.post_registration(user, {'register_code':registration_code}) login_policy = getattr(my_import(settings.LOGIN_POLICY['module']), 'LoginPolicy')() return login_policy.process_login(user, request, True)
def UserCanBeLoggedInAndRedirected(cls, request, username, password): user = authenticate(username=username, password=password) if user is None: return False, [constants.TEMPLATE_STRINGS['login']['form_errors_incorrect_username_or_password']], None elif not user.is_active: return False, [constants.TEMPLATE_STRINGS['login']['form_errors_user_inactive']], None login(request, user) for policy in settings.LOGIN_AND_REDIRECTION_POLICIES: policy_module = my_import('enterprise.userprofiles.loginandredirectionpolicies.%s' % policy) policy_function = getattr(policy_module, 'process_login_and_get_redirect') user_redirect = policy_function(request, user) if user_redirect: return True, [], user_redirect return False, [constants.TEMPLATE_STRINGS['login']['form_errors_user_type_not_supported']], None
def AllAvailableUploaders(cls): """ Class Method: Return the configuration for all DataUploaders available Returns ------- List of dict config for all matching DataUploader instances Raises ------ AttributeError: if the package and/or module structure on disk is corrupt """ import datauploader.lib as data_uploader_lib path = os.path.dirname(data_uploader_lib.__file__) data_uploader_directories = [d for d in os.listdir(path) if not re.search(r'\.', d)] data_uploaders = [getattr(my_import('datauploader.lib.%s.datauploader' % d), 'DataUploader')() for d in data_uploader_directories] return data_uploaders
('all_selected', "Datasets.selected", "Experiments.all_experiments", False, False), ('selected_baseline', 'Datasets.selected', "Experiments.baselines", True, False), ('selected_sparsity', 'Datasets.selected', "Experiments.baseline_sparsity", True, False), ('selected_all_presented', 'Datasets.selected', "Experiments.all_presented", False, False), ('selected_competence_counting', 'Datasets.selected', "Experiments.competence_counting", True, False), ('selected_competence_hybrids', 'Datasets.selected', "Experiments.competence_hybrids", True, False), ('selected_competence_with_similarity', 'Datasets.selected', "Experiments.competence_with_similarity", True, False), ('selected_competence_with_sparsity', 'Datasets.selected', "Experiments.competence_with_sparsity", True, False) ] runs = [(cat_name, os.path.join(STORAGE_DIR, cat_name), dsfn, expn, do_plots, gen_stats) for (cat_name, dsfn, expn, do_plots, gen_stats) in runs] for (_, d, dsfn, _, _, _) in runs: named_data_sets = my_import(dsfn).named_data_sets ds_names = [nds[0] for nds in named_data_sets] shutil.rmtree(d, True) for dsn in ds_names: shutil.copytree(os.path.join(ALL_DIR, dsn), os.path.join(d, dsn)) for (cat_name, d, dsfn, expn, do_plots, gen_stats) in runs: maybe_make_dirs(d) report_dir = os.path.join(REPORT_DIR, cat_name) maybe_make_dirs(report_dir) logging.info("Beginning experiment execution on %s" % dsfn) my_call(["python", "-O", "test.py", "--nocolour", "--docreatesummary", "--latexencode", expn, dsfn, d] + (["--docreateplots", "--keyonlast"] if do_plots else [])) for fn in glob(os.path.join(d, "*.pdf")): shutil.copyfile(fn, os.path.join(report_dir, os.path.basename(fn))) for (extra_opts_name, extra_opts) in [('_abbreviated', ["--abbreviatepast", "35"]), ("", [])]:
else: logging.info("Failed for %s" % h) return called_on if __name__ == '__main__': parser = optparse.OptionParser("usage: %prog [options]") parser.add_option('--remote', dest='remote', default=False, action='store_true') parser.add_option('--hosts', dest='hosts', default="", action='store') parser.add_option('--upto', dest='upto', type="int", default=10, action='store') parser.add_option('--password', help='Password to use when performing distributed computation', dest='password', default="changeme", action='store') parser.add_option('--nohead', dest='no_head', default=False, action='store_true') (options, args) = parser.parse_args() if options.hosts: hosts = my_import(options.hosts).hosts logging.basicConfig(format='%(asctime)s %(message)s',level=logging.DEBUG) CODE_DIR = os.path.expanduser(CODE_DIR) #Host will be head if nothing else provided . . . host = args[0] if len(args) > 0 else None head = args[-1] if len(args) > 0 else None os.chdir(CODE_DIR) if not options.remote: cat_name, dsfn = 'all', "Datasets.all" d = os.path.join(STORAGE_DIR, cat_name) logging.info("Beginning experiment execution on %s" % dsfn)
def get_named_data_sets_obj(named_data_sets): if isinstance(named_data_sets, str): named_data_sets_obj = my_import(named_data_sets).named_data_sets else: named_data_sets_obj = named_data_sets return named_data_sets_obj
default=None, action="store", ) (options, args) = parser.parse_args() data_info_name, input_results_file, output_file = args cv_no = options.cv input_results_file = os.path.expanduser(input_results_file) output_file = os.path.expanduser(output_file) data_sets_dict = dict(chain(non_textual_data_sets, textual_data_sets)) data_info = data_sets_dict[data_info_name]() if options.experiment: experiment_obj = my_import(options.experiment).experiment var_names = [name for (name, variation) in experiment_obj.named_experiment_variations] variation_files = [os.path.join(input_results_file, var_name + ".tar.gz") for var_name in var_names] elif os.path.isdir(input_results_file): variation_files = glob(os.path.join(input_results_file, ".tar.gz")) else: variation_files = [input_results_file] get_var_name = lambda f: os.path.splitext(os.path.splitext(os.path.basename(f))[0])[0] vn_to_fsg_list = [(get_var_name(f), partial(open, f, "r")) for f in variation_files] exp_result = ExperimentResult() exp_result.load_from_csvs(vn_to_fsg_list) def my_stream_getter(var_name, cv): # There's only going to be one variation, so only concerned with cv
def GetAnalyzerByName(cls, name, request): analyzer = my_import(name) return getattr(analyzer, 'Analyzer')(request)