def test_ssh(nels_id): feed_utils.heading("ssh use case. nels_id: %s" % nels_id) credentail = ssh_utils.get_ssh_credentials(nels_id) if not credentail: feed_utils.failed("fetching key") return False feed_utils.ok("ssh key fetched") (host, username, key_file) = credentail (status, items, error) = run_utils.launch_remote_with_key(key_file, username, host, "ls ") if status != 0: feed_utils.error(error) return False if "Personal" in items: feed_utils.ok("Personal folder found") else: feed_utils.failed("Personal folder not found") if "Projects" in items: feed_utils.ok("Projects folder found") else: feed_utils.failed("Projects folder not found") feed_utils.info("cleaning key file") run_utils.launch_cmd("rm -f %s" % key_file) return True
def launch_cmd(cmd, as_user=''): effective_command = cmd if not as_user == '': effective_command = "su - %s -c '%s'" % (as_user, cmd.replace( "'", "\'").replace(" ", "\ ")) feed_utils.info(effective_command) if os.name != 'nt': process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) else: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, startupinfo=startupinfo) (output, err) = process.communicate() exit_code = process.wait() result = [exit_code, output, err] feed_utils.info(result) return result
def get(self): try: response_type = self.get_argument("response_type", None) if not response_type: tornado_utils.return_with_status(self, 401, "error", "missing parameters") if response_type not in ["token", "code"]: tornado_utils.return_with_status(self, 401, "error", "missing parameters") params = {} for k, v in self.request.arguments.items(): params[k] = v[0] if len(v) == 1 else None if 'state' not in params.keys(): params['state'] = None feed_utils.info(params) # dispatch request oauth_request = self.request oauth_request.get_param = lambda key: params[key] oauth_response = self.controller.dispatch(oauth_request, environ={}) # map response for name, value in list(oauth_response.headers.items()): self.set_header(name, value) feed_utils.info("%s : %s" % (oauth_response.status_code, oauth_response.body)) self.set_status(oauth_response.status_code) self.write(oauth_response.body) except Exception as err: self.set_header('Content-Type', 'application/json') self.set_status(401) self.finish(json.dumps({'error': str(err)}))
def project_rename(pid, new_name): if not project_exists(pid): run_utils.exit_fail("Project not found") new_name = cleanup_project_name(new_name) if new_name in all_project_names(): run_utils.exit_fail('name already used') # remove users from the project [admin_users, power_users, member_users] = [ project_members(pid, MembershipRoles.ADMIN), project_members(pid, MembershipRoles.POWERUSER), project_members(pid, MembershipRoles.MEMBER) ] for uid in admin_users: project_user_remove(pid, uid) for uid in power_users: feed_utils.info(uid) for uid in member_users: feed_utils.info(uid) # set the new name of project set_project_name(project_home(pid), new_name) # add users back to the project for uid in admin_users: project_user_add(pid, uid, MembershipRoles.ADMIN) for uid in power_users: project_user_add(pid, uid, MembershipRoles.POWERUSER) for uid in member_users: project_user_add(pid, uid, MembershipRoles.MEMBER) print("project renamed successfully")
def get_subtypes_in_dataset_in_project(project_id, dataset_id): try: ids = [] response = requests.get(config.sbi_url( "projects/%s/datasets/%s/subtypes" % (project_id, dataset_id)), auth=(config.SBI_KEY, config.SBI_SECRET)) if response.status_code == requests.codes.ok: json_response = response.json() if json_response == []: """project dataset without subtypes""" return json_response else: for uid in json_response: if u'id' in uid: feed_utils.info(uid) ids.append(uid[u'id']) return ids else: feed_utils.debug(response.status_code) feed_utils.info(response.text) except: feed_utils.error(sys.exc_info()[0]) return None
def add_dataset_to_project(project_id, federated_id): try: json_dataset_type_array = get_dataset_types() json_body = { "data_set_type_id": json_dataset_type_array["data"][0]["id"], "name": "dataset integration", "description": "test" } headers = {'federated-id': federated_id} response = requests.post(config.sbi_url("projects/%s/datasets" % (project_id)), headers=headers, json=json_body, auth=(config.SBI_KEY, config.SBI_SECRET)) if response.status_code == requests.codes.created: return True else: feed_utils.info(response.status_code) except: feed_utils.error(sys.exc_info()[0]) return None
def all_project_names(): feed_utils.info("getting names of all projects") names = [] for pname in os.listdir(path.join(storage_facade.PROJECTS_ROOT_DIR)): readable_name = project_name_by_pname(pname) if readable_name != '': names.append(readable_name) return names
def get_ssh_credentials(nels_id): credentail = storage.get_ssh_credential(nels_id) if not credentail: return None (host, username) = (credentail[0], credentail[1]) feed_utils.info("fetching keys. host: %s, username:%s " % (host, username)) key_file = path.join(config.TEMP_DIR, "%s.nels" % nels_id) feed_utils.info("writing key file: %s" % key_file) file_utils.write_to_file(key_file, credentail[2]) run_utils.launch_cmd("chmod 600 %s" % key_file) return [credentail[0], credentail[1], key_file]
def test_users_list(): feed_utils.heading("Trying sbi user ids") user_ids = sbi_users.get_user_ids() if not user_ids: feed_utils.failed("get user ids") return for user_id in user_ids: feed_utils.info(user_id) feed_utils.ok("")
def delete_dataset_in_project(project_id, dataset_id): try: response = requests.delete(config.sbi_url("projects/%s/datasets/%s" % (project_id, dataset_id)), auth=(config.SBI_KEY, config.SBI_SECRET)) if response.status_code == requests.codes.no_content: return True else: feed_utils.info(response.status_code) feed_utils.info(response.text) except: feed_utils.error(sys.exc_info()[0]) return None
def configure(): global PORT, ENCRYPTION_KEY, PORTAL_URL, CLIENT_CREDENTIAL_CLIENTS, IMPLICIT_CLIENTS config_pth = path.join(ROOT_DIR, "config.json") if not path.exists(config_pth): run_utils.exit_fail("missing configuration file") config_json = json.loads(file_utils.read_file_content(config_pth)) feed_utils.info(config_json) PORT = int(config_json[ConfigKeys.port]) ENCRYPTION_KEY = config_json[ConfigKeys.encrytion_key] PORTAL_URL = config_json[ConfigKeys.portal_url] CLIENT_CREDENTIAL_CLIENTS = config_json[ ConfigKeys.oauth2_client_credentail_clients] IMPLICIT_CLIENTS = config_json[ConfigKeys.oauth2_implicit_clients]
def recompute_project_disc_usage(): try: json_body = {"method": "re_populate_project_disk_usage"} response = requests.post(config.sbi_url("projects/do"), json=json_body, auth=(config.SBI_KEY, config.SBI_SECRET)) if response.status_code == requests.codes.ok: return response.status_code == requests.codes.ok else: feed_utils.info(response.status_code) except: feed_utils.error(sys.exc_info()[0]) return False
def test_quota_search(name): feed_utils.heading("Trying quota search") result = sbi_quotas.search_quotas(name) if not result: feed_utils.failed("search quotas failed") else: feed_utils.ok("found %s quotas from the search" % result[u'count']) for quota in result[u'data']: feed_utils.info("id: %d, quota_id: %d" % (int(quota[u'id']), int(quota[u'quota_id']))) if result[u'count'] == 1: return int(quota[u'id']), int(quota[u'quota_id']) return None
def test_project_list_all_users(project_id): feed_utils.heading("Trying list users in project. project_id: %s" % project_id) project_info = sbi_project.get_users_in_project(project_id) if project_info == None: feed_utils.failed("failed getting project users") elif project_info == []: feed_utils.ok("project without users: %s" % project_info) else: feed_utils.ok("number of project users: %s" % len(project_info)) for user in project_info: feed_utils.info("federated_id : %s" % user[u'federated_id'])
def authenticate(self, request, environ, scopes, client): if request.method == "GET": if "nels_token" in request.query_arguments.keys(): feed_utils.info("nels_token %s" % request.get_param("nels_token")) try: expanded_token = json.loads( security_utils.decrypt( config.ENCRYPTION_KEY, request.get_param("nels_token"))) feed_utils.info(expanded_token) return (expanded_token, expanded_token["nels_id"]) except Exception as ex: feed_utils.error(ex.message) pass raise UserNotAuthenticated
def get_users_in_project(project_id): try: response = requests.get(config.sbi_url("projects/%s/users" % (project_id)), auth=(config.SBI_KEY, config.SBI_SECRET)) if response.status_code == requests.codes.ok: json_response = response.json() return json_response else: feed_utils.info(response.text) except: feed_utils.error(sys.exc_info()[0]) return None
def add_quota(name, federated_id): try: json_body = {u'name': u'%s' % name, u'description': u'added from integration test', u'federated_id': u'%s' % federated_id, u'quota_size': 1000000000000} response = requests.post(config.sbi_url("quotas/"), json=json_body, auth=(config.SBI_KEY, config.SBI_SECRET)) if response.status_code == requests.codes.created: return requests.codes.created feed_utils.info(response.text) except: feed_utils.error(sys.exc_info()[0]) return None
def delete_project(federated_id, project_id): try: json_body = {u'federated_id': u'%s' % federated_id} response = requests.post(config.sbi_url("projects/%s" % project_id), json=json_body, auth=(config.SBI_KEY, config.SBI_SECRET)) if response.status_code == requests.codes.no_content: return True else: feed_utils.debug(response.status_code) feed_utils.info(response.text) except: feed_utils.error(sys.exc_info()[0]) return None
def get_projects_in_quota(quota_id): try: ids = [] response = requests.get(config.sbi_url("quotas/%s/projects" % (quota_id)), auth=(config.SBI_KEY, config.SBI_SECRET)) feed_utils.info(response.json()) if response.status_code == requests.codes.ok: json_response = response.json() if json_response[u'count'] > 0: for uid in json_response[u'data']: ids.append(uid[u'project_id']) else: feed_utils.ok("No projects in this quota") return ids except: feed_utils.error(sys.exc_info()[0]) return None
def print_config(): feed_utils.heading("Configurations") feed_utils.info("port: %s" % PORT) feed_utils.info("implicit clients") feed_utils.push_in() for client in IMPLICIT_CLIENTS: feed_utils.info(client) feed_utils.push_out()
def get_quota_ids(): try: ids = [] response = requests.get(config.sbi_url("quotas/"), auth=(config.SBI_KEY, config.SBI_SECRET)) feed_utils.info(response.headers) feed_utils.info(response.json()) if response.status_code == requests.codes.ok: json_response = response.json() for uid in json_response[u'data']: ids.append(uid[u'id']) feed_utils.info(ids) return ids except: feed_utils.error(sys.exc_info()[0]) return None
(options, args) = parser.parse_args() feed_utils.VERBOSE = options.verbose args_utils.require_args_length(parser, args, 1) args_utils.require_arg_number(parser, args, 0, "project_id") pid = args[0] config.init() feed_utils.heading("project info") if not project_facade.project_exists(pid): run_utils.exit_fail("project not found") feed_utils.ok("project found\nname:\t%s\nhome folder:\t%s" % (project_facade.project_name_by_id(pid), project_facade.project_home(pid))) feed_utils.push_in() for role in [ project_facade.MembershipRoles.ADMIN, project_facade.MembershipRoles.POWERUSER, project_facade.MembershipRoles.MEMBER ]: feed_utils.push_in() members = project_facade.project_members(pid, role) feed_utils.info("role:\t%s (members count: %s)" % (role, len(members))) for uid in members: feed_utils.info("\tnels_id:\t%s\tusername:\t%s" % (uid, user_facade.nels_id_to_username(uid))) feed_utils.push_out()
dest="verbose", action="store_true", help='turn verbosity on', default=False) parser.add_option('-d', '--dryrun', dest="dryrun", action="store_true", help='Dry run, don' 't affect persistence', default=False) # get options and arguments (options, args) = parser.parse_args() config.DRY_RUN = options.dryrun feed_utils.VERBOSE = options.verbose config.init() feed_utils.heading("running project disk usage statistics") total_size = 0 for project_id in projects.get_project_ids(): try: size = disk_facade.get_folder_size( storage_facade.get_project_home(project_id)) total_size = total_size + size feed_utils.info("%s:%s" % (project_id, size)) if not config.DRY_RUN: stats.add_stat(stats.PROJECT_DISK_USAGE, project_id, size) except Exception as ex: feed_utils.error("error processing project id: %s" % project_id) if not config.DRY_RUN: stats.add_stat(stats.PROJECT_DISK_USAGE_SUMMARY, -1, total_size)
parser = OptionParser(usage='usage: %prog project_id') parser.add_option('-v', '--verbose', dest="verbose", action="store_true", help='turn verbosity on', default=False) (options, args) = parser.parse_args() feed_utils.VERBOSE = options.verbose test_projects_list() feed_utils.heading("") project_id = 1125311 dataset_id = 1124859 subtype_id = 1124887 test_project_display(project_id) test_project_list_all_users(project_id) test_project_list_all_datasets(project_id) json_dataset_type_array = sbi_project.get_dataset_types() feed_utils.info(json_dataset_type_array) feed_utils.heading("") test_project_display_dataset(project_id, dataset_id) test_project_list_all_subtypes(project_id, dataset_id) feed_utils.heading("") test_project_display_subtype(project_id, dataset_id, subtype_id)
def file_owner_get(directory): cmd = "stat -f '%Su' " + directory feed_utils.info(cmd) result = run_utils.launch_cmd(cmd) # caution: this is not safe. Should be changed return result[1][0].replace('\n', '')
action="store_true", help='turn verbosity on', default=False) parser.add_option('-d', '--dryrun', dest="dryrun", action="store_true", help='Dry run, don' 't affect persistence', default=False) # get options and arguments (options, args) = parser.parse_args() config.DRY_RUN = options.dryrun feed_utils.VERBOSE = options.verbose config.init() feed_utils.heading("running user disk usage statistics") total_size = 0 for nels_id in users.get_nels_ids(): try: size = disk_facade.get_folder_size( storage_facade.get_user_home(nels_id)) total_size = total_size + size feed_utils.info("%s:%s" % (nels_id, size)) if not config.DRY_RUN: stats.add_stat(stats.PERSONAL_DISK_USAGE, nels_id, size) except Exception as ex: feed_utils.error("error processing nels id: %s" % nels_id) if not config.DRY_RUN: stats.add_stat(stats.PERSONAL_DISK_USAGE_SUMMARY, -1, total_size)
def project_exists(pid): feed_utils.info("cheking project existence. pid: %s" % pid) return run_utils.launch_cmd('/usr/bin/getent group %s' % project_id_to_name(pid))[0] == 0