def wrapper(*args,**kwargs): # If view_func is a class view, args=[self, request, machine_name , ... ] # If view_func is a regular function args=[request, machine_name , ...] if isinstance(args[0], HttpRequest): request = args[0] machine_name = args[1] elif isinstance(args[1], HttpRequest): request = args[1] machine_name = args[2] else: return json_response(status="ERROR", status_code=500, error="Missing request object") hostname = None conf = settings.NEWT_CONFIG if not isinstance(machine_name , str) : return json_response(status="ERROR", status_code=500, error="Missing machine name") for s in conf['SYSTEMS']: if machine_name==s['NAME']: hostname = s['HOSTNAME'] break if hostname is None: return json_response(status="ERROR", status_code=404, error="Unrecognized system: %s" % machine_name) return view_func(*args, **kwargs)
def get_user_info(user_name, uid): """Returns information about the user Keyword arguments: user_name -- username uid -- user id """ try: if uid: user = User.objects.get(pk=uid) elif user_name: user = User.objects.get(username=user_name) else: raise Exception() user_dict = model_to_dict(user) del user_dict["password"] return user_dict except Exception: logger.debug("No user found matching query: %s" % (user_name if user_name else uid)) if user_name: return json_response(status="ERROR", status_code=404, error="User not found: No user has the username %s" % user_name) else: return json_response(status="ERROR", status_code=404, error="User not found: No user has the id %s" % uid)
def put_file(request, machine, path, local=False): content_length = int(request.META.get('CONTENT_LENGTH', 0)) #print("content_length:" , content_length ) if content_length > 1048000: return json_response( status="ERROR", status_code=500, error="The file is too big ,please use other ways to upload!") data = request.read() if path.startswith('/~'): path = path[1:] tmp_file = tempfile.NamedTemporaryFile(prefix="newt_", dir=tempdir, delete=False) tmp_file.write(data) tmp_file.file.flush() tmp_file.close() if not local: src = tmp_file.name dest = path # temphost = socket.gethostname() taskenv = {"user": request.user.username, "machine": machine} rest = put_file_task.delay(taskenv, temphost, os.path.basename(src), dest) cache.set("async-" + rest.id, "AsyncJob", 3600) return json_response(status="ACCEPT", status_code=201, error="", content=rest.id) else: os.rename(tmp_file.name, os.path.join(tempdir, os.path.basename(path))) return {'location': tmp_file.name}
def get_dir(request, machine_name, path): try: command = 'ls -la %s' % path (output, error, retcode) = run_command(command) if retcode != 0: return json_response(content=output, status="ERROR", status_code=500, error=error) # import pdb; pdb.set_trace() # Split the lines output = map(lambda i: i.strip(), output.split('\n')) # "awesome" regular expression that captures ls output of the form: # drwxrwxr-x 4 shreyas newt 32768 Apr 15 10:59 home patt=re.compile(r'(?P<perms>[\+\w@-]{10,})\s+(?P<hardlinks>\d+)\s+(?P<user>\S+)\s+(?P<group>\S+)\s+(?P<size>\d+)\s+(?P<date>\w{3}\s+\d+\s+[\d\:]+)\s+(?P<name>.+)$') # filter out stuff that doesn't match pattern output = filter(lambda line: patt.match(line), output) # Convert output into dict from group names output = map(lambda x: patt.match(x).groupdict(), output) for line in output: if line['perms'].startswith('l'): name, symlink = line['name'].split(' -> ') line['name'] = name line['symlink'] = symlink else: line['symlink'] = "" return output except Exception as e: logger.error("Could not get directory %s" % str(e)) return json_response(status="ERROR", status_code=500, error="Could not get directory: %s" % str(e))
def get_user_info(user_name, uid): """Returns information about the user Keyword arguments: user_name -- username uid -- user id """ try: if uid: user = User.objects.get(pk=uid) elif user_name: user = User.objects.get(username=user_name) else: raise Exception() user_dict = model_to_dict(user) del user_dict["password"] return user_dict except Exception: logger.debug("No user found matching query: %s" % (user_name if user_name else uid)) if user_name: return json_response( status="ERROR", status_code=404, error="User not found: No user has the username %s" % user_name) else: return json_response( status="ERROR", status_code=404, error="User not found: No user has the id %s" % uid)
def patch(self, user, pk): data = request.json data.pop("user", "") if not data: return json_response({"message": "Cannot update obj without data"}, 400) if not pk: return json_response( {"message": "Cannot update obj pk is invalid"}, 400) fields = data.keys() obj = self.Meta.model.get(self.Meta.model.id == pk, self.Meta.model.user == user) for key in fields: if key in self.Meta.model._meta.fields.keys(): setattr(obj, key, data[key]) setattr(obj, "id", pk) obj.save() json_obj = model_to_dict(obj) json_obj["user"] = user.id if self.Meta.fields: for field in self.Meta.fields: json_obj[field] = getattr(obj, field) if self.Meta.replace_fields: for field in self.Meta.replace_fields: json_obj[field["field"]] = deepgetattr(obj, field["attr"]) self.pos_patch(obj) return json_response(json_obj, 200)
def get_group_info(group_name, gid): """Returns information about the group Keyword arguments: group_name -- group name gid -- group id """ try: if gid: group = Group.objects.get(pk=gid) elif group_name: group = Group.objects.get(name=group_name) else: raise Exception() group_dict = model_to_dict(group) group_dict['users'] = [{ "id": u.id, "username": u.username } for u in group.user_set.all()] return group_dict except Exception: if group_name: return json_response( status="ERROR", status_code=404, error="Group not found: No group matches the name %s" % group_name) else: return json_response( status="ERROR", status_code=404, error="Group not found: No group matches the id %s" % gid)
def store_update(request, store_name, obj_id, data): """Updates the contents of a given document; Returns the oid of the document. Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store obj_id -- ID of the document in the store data -- Updated data of the document """ try: store = Store.objects.get(name=store_name) except ObjectDoesNotExist: return json_response(status="ERROR", status_code=404, error="Store does not exist: %s" % store_name) if not data: return json_response(status="ERROR", status_code=400, error="No data received.") doc = store.documents.get(oid=obj_id) doc.data = data doc.save() return str(doc.oid)
def get_info(request, machine_name, job_id): """Gets the information of a job, given the id Keyword arguments: machine_name -- name of the machine job_id -- the job id """ pass machine = slurmutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) env = slurmutil.get_cred_env(request.user) mycmd = "ssh " + machine["hostname"] + " ' " + ' sacct -j ' + job_id + " '" (output, error, retcode) = run_command(mycmd) if retcode != 0: return json_response(status="ERROR", status_code=500, error="Unable to get queue: %s" % error) patt = re.compile( r'(?P<jobid>[^\s]+)\s+(?P<jobname>[^\s]+)\s+(?P<partition>[^\s]+)\s+(?P<account>[^\s]+)\s+(?P<alloccpus>[^\s]+)\s+(?P<state>[^\s]+)\s+(?P<exitcode>.*)$' ) output = output.splitlines() output = [x.strip() for x in output] output = filter(lambda line: patt.match(line), output) output = list(map(lambda x: patt.match(x).groupdict(), output))[2:] #print( output ) return (output)
def delete_job(request, machine_name, job_id): """Gets the information of a job, given the id Keyword arguments: machine_name -- name of the machine job_id -- the job id """ machine = gridutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) flags = "" jobmanager = machine['jobmanagers']['fork']['url'] qdel = machine['qdel']['bin'] scheduler = machine['qdel']['scheduler'] cmd = "%s %s" % (qdel, job_id) # Set environment flags for qsub if scheduler == "sge": sge_env_str = "-env SGE_ROOT=%s -env SGE_QMASTER_PORT=%s -env SGE_EXECD_PORT=%s" % (gridutil.SGE_ROOT, gridutil.SGE_QMASTER_PORT, gridutil.SGE_EXECD_PORT) flags += " " + sge_env_str if scheduler != "sge": cmd = '/bin/bash -l -c "%s"' % cmd try: runner = GlobusHelper(request.user) (output, error, retcode) = runner.run_job(cmd, jobmanager, flags) except Exception, ex: return json_response(status="ERROR", status_code=500, error="qsub failed with error: %s" % str(ex))
def view_queue(request, machine_name): """Returns the current state of the queue in a list Keyword arguments: request -- Django HttpRequest machine_name -- name of the machine """ machine = gridutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) env = gridutil.get_cred_env(request.user) (output, error, retcode) = run_command(gridutil.GLOBUS_CONF['LOCATION'] + "bin/globus-job-run %s /project/projectdirs/osp/newt_tools/qs_moab.sh" % (machine['hostname']), env=env) patt = re.compile(r'(?P<jobid>[^\s]+)\s+(?P<status>[^\s]+)\s+(?P<user>[^\s]+)\s+(?P<job_name>[^\s]+)\s+(?P<nodes>\d+)\s+(?P<walltime>[^\s]+)\s+(?P<time_use>[^\s]+)\s+(?P<time_submit>\w{3}\s\d{1,2}\s[\d\:]+)\s+(?P<rank>[^\s]+)\s+(?P<queue>[^\s]+)\s+(?P<q_state>[^\s]+)\s+(?P<processors>[^\s]+)\s*(?P<details>.*)$') if retcode != 0: return json_response(status="ERROR", status_code=500, error="Unable to get queue: %s" % error) # filter out stuff that doesn't match pattern output = output.splitlines() output = [x.strip() for x in output] output = filter(lambda line: patt.match(line), output) # Convert output into dict from group names output = map(lambda x: patt.match(x).groupdict(), output) return output
def execute(request, machine_name, command): """Returns a the result of running command on machine_name Keyword arguments: machine_name -- name of the machine command -- command to run """ machine = gridutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) # Get the grid credentials for the user env = gridutil.get_cred_env(request.user) try: # Run the command using globus-job-run (output, error, retcode) = run_command( gridutil.GLOBUS_CONF['LOCATION'] + "bin/globus-job-run %s %s" % (machine['hostname'], command), env=env) response = {"output": output, "error": error, "retcode": retcode} return response except Exception as e: logger.error("Could not run command: %s" % str(e)) return json_response(error="Could not run command: %s" % str(e), status="ERROR", status_code=500)
def _wrapper(*args, **kwargs): try: verify_jwt_in_request() except NoAuthorizationError as e: _ = e return json_response(message="not authorization", status=401) user = get_current_user() if not user: return json_response(message="account is disabled", status=403) second_permission_alias = aliased(Permission, name="second_permission") third_permission_alias = aliased(Permission, name="third_permission") permission = db.session.query(Permission.id) \ .outerjoin(second_permission_alias, Permission.id == second_permission_alias.parent_id) \ .outerjoin(third_permission_alias, second_permission_alias.id == third_permission_alias.parent_id) \ .join(RolePermission, or_(RolePermission.permission_id == Permission.id, RolePermission.permission_id == second_permission_alias.id, RolePermission.permission_id == third_permission_alias.id)) \ .join(Role, and_(RolePermission.role_id == Role.id)) \ .join(UserRole, and_(Role.id == UserRole.role_id, UserRole.user_id == user.id)) \ .filter(Permission.parent_id.is_(None), or_(Permission.description == description, second_permission_alias.description == description, third_permission_alias.description == description))\ .order_by(Permission.id, second_permission_alias.id, third_permission_alias.id) \ .first() if permission: return fn(user, *args, **kwargs) else: return json_response(message='permission denied', status=401)
def wrapper(*args,**kwargs): #return( args , kwargs ) if not isinstance(args[1] , dict) : return json_response(status="ERROR", status_code=500, error="not a safty task: no taskenv") if ( "user" not in args[1].keys() ) or ( "machine" not in args[1].keys() ) : return json_response(status="ERROR", status_code=500, error="not a safty task: no taskenv") # chuid : username = args[1]["user"] ngid = getpwnam( username ).pw_gid nuid = getpwnam( username ).pw_uid if nuid == 0 : return json_response(status="ERROR", status_code=500, error="dangerous action ! ") if nuid != os.getuid(): os.setgroups([]) os.setgid(ngid) os.setuid(nuid) os.putenv( "HOME" , getpwnam( username ).pw_dir ) os.chdir( getpwnam( username ).pw_dir ) old_umask = os.umask( 0o077 ) #os.seteuid(nuid) #os.setegid(nuid) return [ username , task_func( *args,**kwargs ) ]
def get_group_info(group_name, gid): """Returns information about the group Keyword arguments: group_name -- group name gid -- group id """ try: if gid: group = Group.objects.get(pk=gid) elif group_name: group = Group.objects.get(name=group_name) else: raise Exception() group_dict = model_to_dict(group) group_dict['users'] = [{"id": u.id, "username": u.username} for u in group.user_set.all()] return group_dict except Exception: if group_name: return json_response(status="ERROR", status_code=404, error="Group not found: No group matches the name %s" % group_name) else: return json_response(status="ERROR", status_code=404, error="Group not found: No group matches the id %s" % gid)
def view_queue(request, machine_name): """Returns the current state of the queue in a list Keyword arguments: request -- Django HttpRequest machine_name -- name of the machine """ pass machine = slurmutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) env = slurmutil.get_cred_env(request.user) mycmd = "ssh " + machine["hostname"] + " ' " + machine["qstat"][ "bin"] + " '" (output, error, retcode) = run_command(mycmd) if retcode != 0: return json_response(status="ERROR", status_code=500, error="Unable to get queue: %s" % error) patt = re.compile( r'(?P<jobid>[^\s]+)\s+(?P<partition>[^\s]+)\s+(?P<job_name>[^\s]+)\s+(?P<user>[^\s]+)\s+(?P<state>[^\s]+)\s+(?P<time>[^\s]+)\s+(?P<nodes>\d+)\s+(?P<nodelist>.*)$' ) output = output.splitlines() output = [x.strip() for x in output] output = filter(lambda line: patt.match(line), output) output = map(lambda x: patt.match(x).groupdict(), output) #print( list(output) ) return list(output)
def execute(request, machine_name, command): conf = settings.NEWT_CONFIG try: hostname = None for s in conf['SYSTEMS']: if machine_name == s['NAME']: hostname = s['HOSTNAME'] break if hostname is None: return json_response(status="ERROR", status_code=404, error="Unrecognized system: %s" % machine_name) #user = request.POST.get('sudo_user') logger.debug("Running command(ssh): %s (@ %s)" % (command, machine_name)) #command = "sudo -u %s %s " % (user, command) command = 'ssh %s " %s " ' % (hostname, command) (output, error, retcode) = run_command(command) response = {'output': output, 'error': error, 'retcode': retcode} return response except Exception as e: logger.error("Could not run command: %s" % str(e)) return json_response(error="Could not run command: %s" % str(e), status="ERROR", status_code=500)
def store_insert(request, store_name, initial_data): """Creates a new document in the store with initial_data; Returns the oid of the new document. Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store initial_data -- document data """ try: store = Store.objects.get(name=store_name) except ObjectDoesNotExist: return json_response(status="ERROR", status_code=404, error="Store does not exist: %s" % store_name) data = initial_data if not data: return json_response(status="ERROR", status_code=400, error="No data received.") oid = store.documents.count() new_doc = Document(oid=oid, data=data, store=store) new_doc.save() return str(oid)
def execute(request, machine_name, command): """Returns a the result of running command on machine_name Keyword arguments: machine_name -- name of the machine command -- command to run """ machine = gridutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) # Get the grid credentials for the user env = gridutil.get_cred_env(request.user) try: # Run the command using globus-job-run (output, error, retcode) = run_command(gridutil.GLOBUS_CONF['LOCATION'] + "bin/globus-job-run %s %s" % (machine['hostname'], command), env=env) response = { "output": output, "error": error, "retcode": retcode } return response except Exception as e: logger.error("Could not run command: %s" % str(e)) return json_response(error="Could not run command: %s" % str(e), status="ERROR", status_code=500)
def post(self, phone, password): user = db.session.query(User).filter( User.phone == phone, User.password == password).first() if not user: return json_response(message='username or password error', status=403) return json_response(data={"access_token": get_access_token(user)})
def submit_job(request, machine_name): """Submits a job to the queue Keyword arguments: request -- Django HttpRequest machine_name -- name of the machine """ machine = gridutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) flags = "" jobmanager = machine['jobmanagers']['fork']['url'] qsub = machine['qsub']['bin'] scheduler = machine['qsub']['scheduler'] # Set environment flags for qsub if scheduler == "sge": sge_env_str = "-env SGE_ROOT=%s -env SGE_QMASTER_PORT=%s -env SGE_EXECD_PORT=%s" % ( gridutil.SGE_ROOT, gridutil.SGE_QMASTER_PORT, gridutil.SGE_EXECD_PORT) flags += " " + sge_env_str if request.POST.get("jobfile", False): # Create command for qsub on an existing pbs file job_file_path = request.POST.get("jobfile") cmd = "%s %s" % (qsub, job_file_path) elif request.POST.get("jobscript", False): # Create command for qsub from stdin data job_script = request.POST.get("jobscript") # Creates a temporary job file tmp_job_file = tempfile.NamedTemporaryFile(prefix="newt_") tmp_job_file.write(job_script) tmp_job_file.flush() # Stages the temporary job file and pass it as to stdin to qsub flags += " -stdin -s %s" % tmp_job_file.name cmd = qsub else: return json_response(status="ERROR", status_code=400, error="No data received") if scheduler != "sge": cmd = '/bin/bash -l -c "%s"' % cmd try: runner = GlobusHelper(request.user) (output, error, retcode) = runner.run_job(cmd, jobmanager, flags) except Exception, ex: return json_response(status="ERROR", status_code=500, error="qsub failed with error: %s" % str(ex))
def post(self, **kwargs): exist_user = db.session.query( User.id).filter_by(phone=kwargs["phone"]).first() if exist_user: return json_response(message="user exists") user = User(**kwargs) db.session.add(user) db.session.commit() return json_response(message="user create success", data={"access_token": get_access_token(user)})
def wrapper(*args, **kwargs): try: verify_jwt_in_request() except NoAuthorizationError as e: _ = e return json_response(message="not authorization", status=401) user = get_current_user() if not user: return json_response(message="account is disabled", status=403) return fn(user=user, *args, **kwargs)
def submit_job(request, machine_name): """Submits a job to the queue Keyword arguments: request -- Django HttpRequest machine_name -- name of the machine """ pass machine = slurmutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) qsub = machine['qsub']['bin'] env = slurmutil.get_cred_env(request.user) user = request.user # User.objects.get(username=username) if request.POST.get("jobfile", False): # Create command for sbatch on an existing slurm file job_file_path = request.POST.get("jobfile") jobfile = job_file_path cmd = "%s %s" % (qsub, job_file_path) elif request.POST.get("jobscript", False): # Create command for qsub from stdin data job_script = request.POST.get("jobscript").encode() # Creates a temporary job file tmp_job_file = tempfile.NamedTemporaryFile( prefix="newt_", dir='/HOME/nscc-gz_jiangli/tmp', delete=False) print(job_script) tmp_job_file.write(job_script) tmp_job_file.flush() jobfile = tmp_job_file.name cmd = "%s %s" % (qsub, tmp_job_file.name) else: return json_response(status="ERROR", status_code=400, error="No data received") job = HPCJob(user=user, jobfile=jobfile, machine=machine_name) job.save() try: #runner = GlobusHelper(request.user) cmd_str = "ssh " + machine["hostname"] + ' " ' + cmd + ' " ' print(cmd_str) (output, error, retcode) = run_command(cmd_str, env=env) except Exception as ex: return json_response(status="ERROR", status_code=500, error="qsub failed with error: %s" % str(ex)) if retcode != 0: return json_response(status="ERROR", status_code=500, error="qsub failed with error: %s" % error) job.jobid = output.strip().split(' ')[-1] job.save() return {"jobid": job.jobid}
def submit_job(request, machine_name): """Submits a job to the queue Keyword arguments: request -- Django HttpRequest machine_name -- name of the machine """ pass user = request.user # User.objects.get(username=username) job = HPCJob(user=user, jobfile='', machine=machine_name) jobfilepath = request.POST.get("jobfilepath", None) if request.POST.get("jobfile", False): # Create command for sbatch on an existing slurm file job_file_path = request.POST.get("jobfile") job.jobfile = job_file_path job.state = "unsubmit" elif request.POST.get("jobscript", False): # Create command for qsub from stdin data job_script = request.POST.get("jobscript").encode() # Creates a temporary job file tmp_job_file = tempfile.NamedTemporaryFile(prefix="newt_", dir=tempdir, delete=False) tmp_job_file.write(job_script) tmp_job_file.flush() tmp_job_file.close() job.jobfile = tmp_job_file.name job.state = "tempfile" username = user.username #taskenv["user"] else: return json_response(status="ERROR", status_code=400, error="No data received") #job = HPCJob( user = user,jobfile = jobfile , machine = machine_name ) if request.POST.get("jobconf", False): try: print(request.POST.get("jobconf")) #job.configure( request.POST.get("jobconf") ) job.configure(json.loads(request.POST.get("jobconf"))) except Exception as ex: return json_response(status="ERROR", status_code=403, error="Error jobconf : %s" % ex) job.save() taskenv = {"user": request.user.username, "machine": machine_name} if job.state == "tempfile": pass taskenv["host"] = socket.gethostname() rest = submit_job_task.delay(taskenv, job.id, jobfilepath) cache.set("async-" + rest.id, "AsyncJob", 3600) return json_response(status="ACCEPT", status_code=201, error="", content=rest.id)
def download_path(request, machine_name, path): #return json_response(status="ERROR", # status_code=500, # error="This API is forbidden yet. ") try: if path.startswith('/~'): path = path[1:] taskenv = { "user": request.user.username, "machine": machine_name, "host": settings.TASKENV_HOST } #if not os.path.isfile( path ) : # return json_response(status="ERROR", # status_code=500, # error=" no such file ") #if not is_readable( path , request.user.username ): # return json_response(status="ERROR", # status_code=403, # error="file not readable ") if not os.path.isdir(tempdir): os.makedirs(tempdir) os.chmod(tempdir, stat.S_IWOTH + stat.S_IXOTH + stat.S_IROTH) if os.path.dirname(path) == '/': tmpfile = os.path.join(tempdir, os.path.basename(path)) logger.error("tempfile %s" % tmpfile) #if not is_readable( tmpfile , request.user.username ): # return json_response(status="ERROR", # status_code=403, # error="file not readable ") # could download tmpfile ^ ^ file_handle = open(tmpfile, 'r') content_type = get_mime_type(machine_name, tmpfile, file_handle) logger.debug("File download requested: %s" % path) if content_type is None: content_type = "application/octet-stream" return StreamingHttpResponse(file_handle, content_type=content_type) #file_handle = open(path, 'r') #content_type = get_mime_type(machine_name, path, file_handle) #logger.debug("File download requested: %s" % path) #if content_type is None: # content_type = "application/octet-stream" #return StreamingHttpResponse(file_handle, content_type=content_type) rest = download_path_task.delay(taskenv, path) cache.set("async-" + rest.id, "AsyncJob", 3600) return json_response(status="ACCEPT", status_code=201, error="", content=rest.id) except Exception as e: logger.error("Could not get file %s" % str(e)) return json_response(status="ERROR", status_code=500, error=str(e))
def submit_job(request, machine_name): """Submits a job to the queue Keyword arguments: request -- Django HttpRequest machine_name -- name of the machine """ machine = gridutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) flags = "" jobmanager = machine['jobmanagers']['fork']['url'] qsub = machine['qsub']['bin'] scheduler = machine['qsub']['scheduler'] # Set environment flags for qsub if scheduler == "sge": sge_env_str = "-env SGE_ROOT=%s -env SGE_QMASTER_PORT=%s -env SGE_EXECD_PORT=%s" % (gridutil.SGE_ROOT, gridutil.SGE_QMASTER_PORT, gridutil.SGE_EXECD_PORT) flags += " " + sge_env_str if request.POST.get("jobfile", False): # Create command for qsub on an existing pbs file job_file_path = request.POST.get("jobfile") cmd = "%s %s" % (qsub, job_file_path) elif request.POST.get("jobscript", False): # Create command for qsub from stdin data job_script = request.POST.get("jobscript") # Creates a temporary job file tmp_job_file = tempfile.NamedTemporaryFile(prefix="newt_") tmp_job_file.write(job_script) tmp_job_file.flush() # Stages the temporary job file and pass it as to stdin to qsub flags += " -stdin -s %s" % tmp_job_file.name cmd = qsub else: return json_response(status="ERROR", status_code=400, error="No data received") if scheduler != "sge": cmd = '/bin/bash -l -c "%s"' % cmd try: runner = GlobusHelper(request.user) (output, error, retcode) = runner.run_job(cmd, jobmanager, flags) except Exception, ex: return json_response(status="ERROR", status_code=500, error="qsub failed with error: %s" % str(ex))
def delete_job(request, machine_name, job_id): machine = slurmutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) taskenv = {"user": request.user.username, "machine": machine_name} rest = delete_job_task.delay(taskenv, job_id) cache.set("async-" + rest.id, "AsyncJob", 3600) return json_response(status="ACCEPT", status_code=201, error="", content=rest.id)
def delete_job(request, machine_name, job_id): machine = slurmutil.GRID_RESOURCE_TABLE.get(machine_name, None) if not machine: return json_response(status="ERROR", status_code=400, error="Invalid machine name: %s" % machine_name) env = slurmutil.get_cred_env(request.user) mycmd = "ssh " + machine["hostname"] + " ' " + ' scancel ' + job_id + " '" (output, error, retcode) = run_command(mycmd) if retcode != 0: return json_response(status="ERROR", status_code=500, error="Unable to get queue: %s" % error) return (output)
def get_image(query): import re r = requests.get(nim_base_url + "/info/json/image/" + query) if r.status_code == 200: if re.search(r'\/photo\/?$', query): return HttpResponse(r.content, content_type=r.headers['content-type']) return r.json()['items'] elif r.status_code == 404: return json_response(status="ERROR", status_code=404, error="Image not found at: " + path) else: return json_response(status="ERROR", status_code=500, error="Invalid image location: " + query)
def update_store_perms(request, store_name, perms): """Updates the permissions of the given store with perms; Returns the id of the store. Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store perms -- list of the new permissions in the form of: [ { "user": <user_to_be_updated>, "perms": <updated_perms>, }, ... ] """ try: store = Store.objects.get(name=store_name) except ObjectDoesNotExist: return json_response(status="ERROR", status_code=404, error="Store does not exist: %s" % store_name) for new_perm in perms: try: perm = Permission.objects.get(user__username=new_perm['name'], store=store) except ObjectDoesNotExist: u = User.objects.get(username=new_perm['name']) perm = Permission(store=store, user=u) perm.type = ",".join(new_perm['perms']) perm.save() return store_name
def get_store_perms(request, store_name): """Returns a dictionary of permissions of the store in the form of: { "name": <store_name>, "perms": [ { "user": <associated_user>, "perms": <permissions_of_user>, }, ... ], } Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store """ storedb = redis.Redis(host=HOST, db=STOREDB) if store_name not in get_store(request): return json_response(status="ERROR", status_code=404, error="Store does not exist.") all_perm_entries = storedb.lrange(store_name+":perms", 0, -1) perms_list = [] for entry in all_perm_entries: perms_list.append({ "name": entry[len(store_name) + 7:], "perms": storedb.lrange(entry,0,-1) }) return {"name": store_name, "perms": perms_list,}
def update_store_perms(request, store_name, perms): """Updates the permissions of the given store with perms; Returns the id of the store. Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store perms -- list of the new permissions in the form of: [ { "user": <user_to_be_updated>, "perms": <updated_perms>, }, ... ] """ storedb = redis.Redis(host=HOST, db=STOREDB) if store_name not in get_store(request): return json_response(status="ERROR", status_code=404, error="Store does not exist.") new_perms = [] for new_perm in perms: dbname = store_name + ":perms:" + new_perm['name'] if storedb.lrange(dbname, 0, -1): storedb.delete(dbname) for perm in new_perm['perms']: storedb.rpush(dbname, perm) new_perms.append({ "user": new_perm['name'], "perms": storedb.lrange(dbname,0,-1), }) return new_perms
def get_store_contents(request, store_name): """Returns a list containing all the contents of the store in the form of: [ { "oid": <document_id>, "data": <document_data>, }, ... ] Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store """ storedb = redis.Redis(host=HOST, db=STOREDB) if store_name not in get_store(request): return json_response(status="ERROR", status_code=404, error="Store does not exist.") store_docs = storedb.lrange(store_name + ":docs",0,-1) store_contents = [] for doc in store_docs: store_contents.append({ "oid":doc[len(store_name)+1:], "data":storedb.get(doc), }) return store_contents
def get_store_contents(request, store_name): """Returns a list containing all the contents of the store in the form of: [ { "oid": <document_id>, "data": <document_data>, }, ... ] Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store """ try: store = Store.objects.get(name=store_name) except ObjectDoesNotExist: return json_response(status="ERROR", status_code=404, error="Store does not exist: %s" % store_name) documents = [{ "oid": str(doc.oid), "data": doc.data } for doc in store.documents.all()] return documents
def store_insert(request, store_name, initial_data): """Creates a new document in the store with initial_data; Returns the oid of the new document. Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store initial_data -- document data """ storedb = redis.Redis(host=HOST, db=STOREDB) # Creating next docname index_num = len(storedb.lrange(store_name + ":docs", 0, -1)) docname = store_name + ":" + str(index_num) # Getting data data = request.POST.get("data", None) if not data: return json_response(status="ERROR", status_code=400, error="No data received.") storedb.set(docname, data) storedb.rpush(store_name + ":docs", docname) return str(index_num)
def get_store_perms(request, store_name): """Returns a dictionary of permissions of the store in the form of: { "name": <store_name>, "perms": [ { "user": <associated_user>, "perms": <permissions_of_user>, }, ... ], } Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store """ # Return the permissions of the store db = MongoClient()['stores'] store = db["permissions"] res = store.find_one({"name":store_name}, {"_id":0}) if res: return res else: return json_response(status="ERROR", status_code="404", error="Store not found")
def get_store_contents(request, store_name): """Returns a list containing all the contents of the store in the form of: [ { "oid": <document_id>, "data": <document_data>, }, ... ] Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store """ # Check existance of the store if store_name not in get_stores(request): return json_response(status="ERROR", status_code=404, error="Store does not exist: %s" % store_name) # Check privlages of user attempting to access store # Get and return contents of the store db = MongoClient()['stores'] store = db[store_name] return [{ "oid": x["oid"], "data": x['data'] } for x in store.find({}, { "_id": 0, "data": 1, "oid": 1 })]
def get_store_perms(request, store_name): """Returns a dictionary of permissions of the store in the form of: { "name": <store_name>, "perms": [ { "user": <associated_user>, "perms": <permissions_of_user>, }, ... ], } Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store """ # Return the permissions of the store db = MongoClient()['stores'] store = db["permissions"] res = store.find_one({"name": store_name}, {"_id": 0}) if res: return res else: return json_response(status="ERROR", status_code="404", error="Store not found")
def get_store_contents(request, store_name): """Returns a list containing all the contents of the store in the form of: [ { "oid": <document_id>, "data": <document_data>, }, ... ] Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store """ # Check existance of the store if store_name not in get_stores(request): return json_response(status="ERROR", status_code=404, error="Store does not exist: %s" % store_name) # Check privlages of user attempting to access store # Get and return contents of the store db = MongoClient()['stores'] store = db[store_name] return [{"oid": x["oid"], "data": x['data']} for x in store.find({}, {"_id":0, "data":1, "oid":1})]
def get_store_contents(request, store_name): """Returns a list containing all the contents of the store in the form of: [ { "oid": <document_id>, "data": <document_data>, }, ... ] Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store """ storedb = redis.Redis(host=HOST, db=STOREDB) if store_name not in get_store(request): return json_response(status="ERROR", status_code=404, error="Store does not exist.") store_docs = storedb.lrange(store_name + ":docs", 0, -1) store_contents = [] for doc in store_docs: store_contents.append({ "oid": doc[len(store_name) + 1:], "data": storedb.get(doc), }) return store_contents
def put_file(request, machine, path): """Writes the uploaded file to path and returns the path Keyword arguments: request -- HttpRequest containing the data machine_name -- name of the machine path -- path to file """ # Get data from request body data = request.read() # Write data to temporary location # TODO: Get temporary path from settings.py tmp_file = tempfile.NamedTemporaryFile(prefix="newt_") tmp_file.write(data) tmp_file.file.flush() src = "file:///%s" % tmp_file.name env = gridutil.get_cred_env(request.user) dest = gridutil.get_grid_path(machine, path) logger.debug("Putting file to location: %s" % dest) (output, error, retcode) = run_command(gridutil.GLOBUS_CONF['LOCATION'] + "bin/globus-url-copy %s %s" % (src, dest), env=env) if retcode != 0: return json_response(content=output, status="ERROR", status_code=500, error=error) tmp_file.close() return {'location': path}
def get_usage(path): r = requests.get(nim_base_url + "/usage/" + path + "/json/") if r.status_code == 200: return r.json()['items'] else: return json_response(status="ERROR", status_code=500, error="Invalid resource location: " + path)
def post(self, request, machine_name): logger.debug("Entering %s:%s" % (self.__class__.__name__, __name__)) command = urllib.unquote(request.POST['command']) if not command: return json_response(status="ERROR", status_code=400, error="No command received.") return command_adapter.execute(request, machine_name, command)
def get_user_info(user_name=None, uid=None): if uid: return get_resource("user/id/%d/" % uid) elif user_name: return get_resource("user/%s/" % user_name) else: return json_response(status=ERROR, status_code=400, error="No data received.")
def wrapper(*args, **kwargs): request = args[1] if request.user.is_authenticated(): return view_func(*args, **kwargs) else: return json_response(status="ERROR", status_code=403, error="You must be logged in to access this.", content=json.dumps({"login_url": "/api/auth/"}))
def query_store(request, store_name, query): """Queries the store; Returns the result of the query in the form of: [ { "oid": <document_id>, "data": <document_data>, }, ... ] Keyword arguments: request -- Django HttpRequest object store_name -- the name of the store query -- a query string """ if store_name not in get_store(request): return json_response(status="ERROR", status_code=404, error="Store does not exist.") return json_response(status="ERROR", status_code=501, error="Method not implemented.")
def execute(request, machine_name, command): try: logger.debug("Running command: %s" % command) (output, error, retcode) = run_command(command) response = {"output": output, "error": error, "retcode": retcode} return response except Exception as e: logger.error("Could not run command: %s" % str(e)) return json_response(error="Could not run command: %s" % str(e), status="ERROR", status_code=500)
def wrapper(*args, **kwargs): # If view_func is a class view, args=[self, request, ... ] # If view_func is a regular function args=[request, ...] if isinstance(args[0], HttpRequest): request = args[0] elif isinstance(args[1], HttpRequest): request = args[1] else: return json_response(status="ERROR", status_code=500, error="Missing request object") if request.user.is_authenticated(): return view_func(*args, **kwargs) else: return json_response(status="ERROR", status_code=403, error="You must be logged in to access this.", content=json.dumps({"login_url": "/api/auth/"}))
def download_path(request, machine_name, path): try: file_handle = open(path, 'r') content_type = get_mime_type(machine_name, path, file_handle) logger.debug("File download requested: %s" % path) return StreamingHttpResponse(file_handle, content_type=content_type) except Exception as e: logger.error("Could not get file %s" % str(e)) return json_response(status="ERROR", status_code=500, error=str(e))
def execute(request, machine_name, command): try: user = request.POST.get("sudo_user") logger.debug("Running command: %s as %s" % (command, user)) command = "sudo -u %s %s " % (user, command) (output, error, retcode) = run_command(command) response = {"output": output, "error": error, "retcode": retcode} return response except Exception as e: logger.error("Could not run command: %s" % str(e)) return json_response(error="Could not run command: %s" % str(e), status="ERROR", status_code=500)
def dispatch(self, request, *args, **kwargs): """ Override the dispatch method of the class view """ # Wrap the dispatch method, so that we autoencode JSON response = super(JSONRestView, self).dispatch(request, *args, **kwargs) # If this is not an HTTPResponseBase object (Base class for responses) if not isinstance(response, HttpResponseBase): response = json_response(response) return response
def extras_router(request, query): for pattern, func, req in patterns: match = pattern.match(query) if match and req: return func(request, **match.groupdict()) elif match: return func(**match.groupdict()) return json_response(status="Unimplemented", status_code=501, error="", content="query: %s" % query)
def get_dir(request, machine_name, path): """Returns a directory listing of path (as an array) Keyword arguments: machine_name -- name of the machine path -- path to file """ try: env = gridutil.get_cred_env(request.user) path = gridutil.get_grid_path(machine_name, path) output, error, retcode = run_command(gridutil.GLOBUS_CONF['LOCATION'] + "bin/uberftp -ls %s" % path, env=env) if retcode != 0: return json_response(content=output, status="ERROR", status_code=500, error=error) # Split the lines output = map(lambda i: i.strip(), output.splitlines()) # regular expression that captures ls output of the form: # drwxrwxr-x 4 shreyas newt 32768 Apr 15 10:59 home patt=re.compile(r'(?P<perms>[\+\w@-]{10,})\s+(?P<hardlinks>\d+)\s+(?P<user>\S+)\s+(?P<group>\S+)\s+(?P<size>\d+)\s+(?P<date>\w{3}\s+\d+\s+[\d\:]+)\s+(?P<name>.+)$') # filter out stuff that doesn't match pattern output = filter(lambda line: patt.match(line), output) # break up line into tuple: (perms, hl, user, group, size, date, filename) output = map(lambda x: patt.match(x).groupdict(), output) for line in output: if line['perms'].startswith('l'): name, symlink = line['name'].split(' -> ') line['name'] = name line['symlink'] = symlink else: line['symlink'] = "" return output except Exception as e: logger.error("Could not get directory %s" % str(e)) return json_response(status="ERROR", status_code=500, error="Could not get directory: %s" % str(e))
def submit_job(request, machine_name): """Submits a job to the queue Keyword arguments: request -- Django HttpRequest machine_name -- name of the machine """ # Get data from POST if request.POST.get("jobfile", False): try: f = open(request.POST.get("jobfile"), 'r') data = f.read() except Exception as e: return json_response(status="ERROR", status_code=400, error="Unable to open job file. Be sure you gave an absolute path.") finally: f.close() elif request.POST.get("jobscript", False): data = request.POST.get("jobscript") else: return json_response(status="ERROR", status_code=400, error="No data received") # Generate unique outfile name tmp_job_name = str(ObjectId()) # Get job emulator path job_emu = settings.PROJECT_DIR + "/job/adapters/emulate_job_run.sh" # Run job with the commands in data job = Popen([job_emu, tmp_job_name, request.user.username, data], stdout=PIPE) # Get/return the job_id from stdout job_id = job.stdout.readline().rstrip() logger.debug("Spawned process: %s" % job_id) return {"jobid": job_id}