def run_query(self, query, user): self._current_user = user try: error = None code = compile_restricted(query, '<string>', 'exec') builtins = safe_builtins.copy() builtins["_write_"] = self.custom_write builtins["__import__"] = self.custom_import builtins["_getattr_"] = getattr builtins["getattr"] = getattr builtins["_setattr_"] = setattr builtins["setattr"] = setattr builtins["_getitem_"] = self.custom_get_item builtins["_getiter_"] = self.custom_get_iter builtins["_print_"] = self._custom_print # Layer in our own additional set of builtins that we have # considered safe. for key in self.safe_builtins: builtins[key] = __builtins__[key] restricted_globals = dict(__builtins__=builtins) restricted_globals["get_query_result"] = self.get_query_result restricted_globals["get_source_schema"] = self.get_source_schema restricted_globals["get_current_user"] = self.get_current_user restricted_globals["execute_query"] = self.execute_query restricted_globals["add_result_column"] = self.add_result_column restricted_globals["add_result_row"] = self.add_result_row restricted_globals[ "disable_print_log"] = self._custom_print.disable restricted_globals["enable_print_log"] = self._custom_print.enable # Supported data types restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER restricted_globals["TYPE_STRING"] = TYPE_STRING restricted_globals["TYPE_DATE"] = TYPE_DATE restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT # TODO: Figure out the best way to have a timeout on a script # One option is to use ETA with Celery + timeouts on workers # And replacement of worker process every X requests handled. exec((code), restricted_globals, self._script_locals) result = self._script_locals['result'] result['log'] = self._custom_print.lines json_data = json_dumps(result) except KeyboardInterrupt: error = "Query cancelled by user." json_data = None except Exception as e: error = str(type(e)) + " " + str(e) json_data = None return json_data, error
def test_compile__invalid_syntax(): with pytest.raises(SyntaxError) as err: compile_restricted(INVALID_ASSINGMENT, '<string>', 'exec') if IS_PY38_OR_GREATER: assert "cannot assign to literal at statement:" in str(err.value) else: assert "can't assign to literal at statement:" in str(err.value)
def test_compile_CPython_warning(recwarn, mocker): # pragma: no cover """It warns when using another Python implementation than CPython.""" assert platform.python_implementation() != 'CPython' with pytest.warns(RuntimeWarning) as record: compile_restricted('42') assert len(record) == 1 assert str(record[0].message) == str( 'RestrictedPython is only supported on CPython: use on other Python ' 'implementations may create security issues.')
def syntax_error_check(expr): if not expr: return expr = strip_query(expr) try: compile_restricted(expr, filename='<inline code>', mode='eval') except SyntaxError: compile(expr, filename='<inline code>', mode='eval')
def test_compile___compile_restricted_mode__1(recwarn, mocker): """It warns when using another Python implementation than CPython.""" if platform.python_implementation() == 'CPython': # pragma: no cover # Using CPython we have to fake the check: mocker.patch('RestrictedPython.compile.IS_CPYTHON', new=False) compile_restricted('42') assert len(recwarn) == 1 w = recwarn.pop() assert w.category == RuntimeWarning assert str(w.message) == str( 'RestrictedPython is only supported on CPython: use on other Python ' 'implementations may create security issues.')
def test_submission(self, submission_code_str): self.submission_result = [] self.compile_error_flag = False if not self.problem_dict: return else: pass try: compile_restricted(submission_code_str, '<inline>', 'exec') except Exception as e: self.compile_error_flag = True self.compile_error_info = repr(e) return for test_case in self.problem_dict['test_cases']: print('testing test case:', test_case, sep='\n') suffix = '\noutput = main_function' + str(tuple(test_case[0])) try: manager = Manager() py_code = submission_code_str + suffix ret_dict = manager.dict() p = Process(target=target_function, args=(py_code, ret_dict)) p.start() time.sleep(self.time_limit) p.terminate() p.join() if not ret_dict: self.submission_result.append('服务器资源不足!') return else: print('submission result: ', ret_dict['output']) if ret_dict['RE_flag']: self.submission_result.append('Runtime Error! ' + ret_dict['RE_info']) elif ret_dict['TLE_flag']: self.submission_result.append('Time Limit Exceeded! ') elif ret_dict['output'] == test_case[1]: self.submission_result.append('Accepted! ') else: self.submission_result.append( 'Wrong Answer! ') # add error types here maybe except Exception as e: print(repr(e))
def evalPythonString(string): ''' Executes strings of python code Takes in a string and evaluates it. Returns a tuple of the print output and a dictionary of the variables as they existed at the end of the code execution ''' string += "\ncodeShellOutput_ = printed\n" code = compile_restricted(string, '<string>', 'exec') restrictedScope = {'_print_':PrintCollector, '__builtins__':safe_builtins, \ 'codeShellOutput_':''} restrictedScope['_getiter_'] = iter restrictedScope['__name__'] = '<string>' restrictedScope['_getattr_'] = getattr restrictedScope['_write_'] = lambda obj: obj codeOutput = "" try: exec(code, restrictedScope) codeOutput = restrictedScope['codeShellOutput_'] del restrictedScope['codeShellOutput_'] del restrictedScope['_print_'] # These are apparently created by exec or something del restrictedScope['__builtins__'] del restrictedScope['_print'] except ImportError: codeOutput = "ImportError. Importing is disallowed" return (codeOutput, restrictedScope)
def bytecode(self): bytecode = compile_restricted( self["code"], filename="<EntityValidator {}>".format(self["name"]), mode="exec", ) return bytecode
def safeEvaluate(src, stdout=sys.__stdout__, stderr=sys.__stderr__): """ function allowing the safe evaluation of untrusted python code """ try: code = compile_restricted( src, '<string>', 'exec') except SyntaxError as e: if len(e.args)==2: e.args = e.args + (e.args[0], e.args[1][1], e.args[1][2] ) else: m = re.search('^Line (\d+): (.*)$', e.args[0]) if m: e.args = e.args + (m.group(2), int(m.group(1)), -1) return e class GeneralNonCollector: '''Redirect text to stdout''' def __init__(self): self.buffer = str() def write(self, text): self.buffer += text if text[-1] == "\n": stdout.write(self.buffer[:-1]) self.buffer = str() restricted_globals = dict(__builtins__ = Guards.safe_builtins, _print_ = GeneralNonCollector, _getiter_ = list.__iter__ , _write_ = Guards.full_write_guard, _getattr_ = getattr, _getitem_ = getitem, _inplacevar_ = inplacevar_wrapper, list = list, dict = dict, enumerate = enumerate, math = math, reduce = reduce, map = map, sum = sum) restricted_globals = dict(restricted_globals.items() + safe_cadmium().items()) (ostdout, ostderr) = (sys.stdout, sys.stderr) (sys.stdout, sys.stderr) = (stdout, stderr) ex = None try: exec( code ) in restricted_globals except (AttributeError, NameError, TypeError, ImportError) as e: (typ, val, tb) = sys.exc_info() e.args = e.args + (e.args[0], traceback.extract_tb(tb)[-1][1], -1) ex = e finally: (sys.stdout, sys.stderr) = (ostdout, ostderr) if ex: return ex if 'result' in restricted_globals: return restricted_globals['result'] else: return None
def Main(): host = "" port = 2357 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((host, port)) print("socket bound to port", port) # put the socket into listening mode s.listen() print("socket is listening") try: loc = {} source_code = """ open('requirements.txt', 'w')""" byte_code = compile_restricted(source_code, '<inline>', 'exec') exec(byte_code, safe_globals, loc) except Exception as e: print() # a forever loop until client wants to exit while True: # establish connection with client c, addr = s.accept() # lock acquired by client print_lock.acquire() print('Connected to :', addr[0], ':', addr[1]) # Start a new thread and return its identifier start_new_thread(threaded, (c, )) s.close()
def threaded(c): """ function handles each client separately, so using threads :param c: socket object usable to send and receive data on the connection :return: none """ c.send("server connected".encode()) while True: # data received from client try: data = c.recv(1024) if not data: print('Disconnecting from client...') # lock released on exit print_lock.release() break with stdoutIO() as s: byte_code = compile_restricted(data, safe_globals, '<inline>', 'exec') # send back reversed string to client c.send(s.getvalue().encode()) except ConnectionResetError: print("Client lost connection...") print_lock.release() break except Exception as e: print(e.args) # connection closed c.close()
def __load_tfs(self): """ Loads and connects all transfer functions """ self._notify("Loading transfer functions") for tf in self.sim_config.transfer_functions: self._notify("Loading transfer function: {}".format(tf.name)) #tf.code = correct_indentation(tf.code, 0) tf.code = tf.code.strip() + "\n" logger.debug("TF: " + tf.name + "\n" + tf.code + '\n') try: new_code = compile_restricted(tf.code, '<string>', 'exec') # pylint: disable=broad-except except Exception as e: logger.error( "Error while compiling the transfer function {name} in restricted " "mode with error {err}".format(name=tf.name, err=str(e))) tfm.set_flawed_transfer_function(tf.code, tf.name, e) continue try: tfm.set_transfer_function(tf.code, new_code, tf.name, tf.active) except tfm.TFLoadingException as loading_e: logger.error(loading_e) tfm.set_flawed_transfer_function(tf.code, tf.name, loading_e)
def submit(): data_set = request.form['dataset'] successes = {"vals":[]} submission = Submission( code=request.form['text'], data_set=data_set, user=current_user.email ) db.session.add(submission) db.session.commit() try: code = compile_restricted(request.form['text'], '<string>', 'exec') restricted_globals = dict(__builtins__ = safe_builtins) exec(code) in restricted_globals except Exception as x: print("Error here") error = "Error when defining function:\n" + str(x) return jsonify({"vals":[]}) _asserts = Assert.query.filter_by(data_set=int(data_set)).all() for item in _asserts: try: exec(str(item)) in restricted_globals successes["vals"].append(["1"]) except AssertionError: successes["vals"].append(["0", "Assertion not satisfied"]) except Exception as e: print(sys.exc_info()[2].tb_lineno) successes["vals"].append(["0", str(e)]) return jsonify(successes)
def __load_tfs(self): """ Loads and connects all transfer functions """ self._notify("Loading transfer functions") # Create transfer functions import_referenced_python_tfs(self.bibi, self.exc.dir) for i, tf in enumerate(self.bibi.transferFunction): self._notify("Generating transfer function: %i" % (i + 1)) tf_code = generate_tf(tf, self.bibi) self._notify("Loading transfer function: %s" % tf.name) tf_code = correct_indentation(tf_code, 0) tf_code = tf_code.strip() + "\n" logger.debug("TF: " + tf.name + "\n" + tf_code + '\n') try: new_code = compile_restricted(tf_code, '<string>', 'exec') # pylint: disable=broad-except except Exception as e: message = "Error while compiling the updated transfer function named "\ + tf.name +\ " in restricted mode.\n"\ + str(e) logger.error(message) nrp.set_flawed_transfer_function(tf_code, tf.name, e) continue try: nrp.set_transfer_function(tf_code, new_code, tf.name) except nrp.TFLoadingException as loading_e: logger.error(loading_e) nrp.set_flawed_transfer_function(tf_code, tf.name, loading_e)
def execute_schema_code(branch, schema_code, context=None): # TODO: use starlark-go for executing schema code # for now RestrictedPython is used byte_code = compile_restricted(schema_code, '<inline>', 'exec') my_locals = {} my_globals = {'__builtins__': limited_builtins, '_getattr_': RestrictedPython.Guards.safer_getattr, '_getiter_': RestrictedPython.Eval.default_guarded_getiter, '_iter_unpack_sequence_': RestrictedPython.Guards.guarded_iter_unpack_sequence} exec(byte_code, my_globals, my_locals) # pylint: disable=exec-used my_globals.update(my_locals) if context is None: context = { 'is_ci': True, 'is_dev': False, } ctx = SchemaCodeContext(branch.name, context) my_globals['ctx'] = ctx my_locals2 = {} exec('schema = stage(ctx)', my_globals, my_locals2) # pylint: disable=exec-used schema = my_locals2['schema'] error = schemaval.validate(schema) if error: raise Exception(error) return schema
def create_rmodule(): global rmodule fn = os.path.join(_HERE, 'restricted_module.py') f = open(fn, 'r') source = f.read() f.close() # Sanity check compile(source, fn, 'exec') # Now compile it for real code = compile_restricted(source, fn, 'exec') rmodule = { '__builtins__': { '__import__': __import__, 'None': None, '__name__': 'restricted_module' } } builtins = getattr(__builtins__, '__dict__', __builtins__) for name in ( 'map', 'reduce', 'int', 'pow', 'range', 'filter', 'len', 'chr', 'ord', ): rmodule[name] = builtins[name] exec code in rmodule
def run_python(code, accession): """ Run some restricted Python code for constructing the labels of accessions """ if code.startswith("python:"): # The python code should be stripped raise AttributeError # In order to get the result of the Python code out, we have to wrap it like this code = 'print ' + code + ';result = printed' # We compile the code in a restricted environment compiled = compile_restricted(code, '<string>', 'exec') # The getter is needed so that attributes from the accession can be used for the labels def mygetitem(obj, attr): return obj[attr] # The following globals are usable from the restricted Python code restricted_globals = dict(__builtins__ = safe_builtins, # Use only some safe Python builtins accession = accession, # The accession is needed for the labels _print_ = PrintCollector, # Pass this to get hold of the result _getitem_ = mygetitem, # Needed for accessing the accession _getattr_ = getattr) # Pass the standard getattr # The code is now executed in the restricted environment exec(compiled) in restricted_globals # We collect the result variable from the restricted environment return restricted_globals['result']
def exec_code(code: str): result = compile_restricted(code, filename='<inline code>', mode='exec', policy=None) exec(result, globals(), globals()) return globals()
def run_request_script( script: str, request_body: str, log_id: Optional[int] = None, ) -> None: """Task for run custom scripts from http stubs. :param log_id: LogEntry.id :param script: HTTPStub.request_script :param request_body: text body from a request """ log: Optional[LogEntry] = None if log_id is not None: log = LogEntry.objects.get(pk=log_id) loc = {'request_body': request_body, **restricted_builtins} byte_code = compile_restricted(script) try: exec(byte_code, loc, None) # noqa: S102, WPS421 except SoftTimeLimitExceeded: log_msg = 'Error: Execution time limit exceeded' except Exception as err: log_msg = f'Error: {err}' else: log_msg = 'Done' if log is not None: log.result_script = log_msg log.save()
def compile_python_code(request): """ Function for compiling Python code """ # Get the submitted untrusted code untrustedCode = request.GET.get('untrustedCode') # Get the function name from untrusted code - ### Can be changed to use actual lesson title from ajax call ### lessonTitle = re.search('def (.*)():', untrustedCode) lessonTitle = lessonTitle.group(1).replace('(', '').replace(')', '') try: loc = {} byteCode = compile_restricted(untrustedCode, '<inline>', 'exec') exec(byteCode, safe_globals, loc) result = loc[lessonTitle]() data = {'output': result} except SyntaxError as e: data = { 'output': "Error with the input code. Take another look at your code." + str(e) } except: data = { 'output': "Error with the input code. Take another look at your code." } return JsonResponse(data)
def safe_exec(script, _globals=None, _locals=None, restrict_commit_rollback=False): # server scripts can be disabled via site_config.json # they are enabled by default if 'server_script_enabled' in frappe.conf: enabled = frappe.conf.server_script_enabled else: enabled = True if not enabled: frappe.throw(_('Please Enable Server Scripts'), ServerScriptNotEnabled) # build globals exec_globals = get_safe_globals() if _globals: exec_globals.update(_globals) if restrict_commit_rollback: exec_globals.frappe.db.pop('commit', None) exec_globals.frappe.db.pop('rollback', None) # execute script compiled by RestrictedPython frappe.flags.in_safe_exec = True exec(compile_restricted(script), exec_globals, _locals) # pylint: disable=exec-used frappe.flags.in_safe_exec = False return exec_globals, _locals
def __create_fun(self, eoperation): name = eoperation.normalized_name() namespace = {} # code = compile(eoperation.to_code(), "<str>", "exec") # exec(code, namespace) code = compile_restricted(eoperation.to_code(), '<inline>', 'exec') exec(code, safe_builtins, namespace) setattr(self.python_class, name, namespace[name])
def bytecode(self): bytecode = compile_restricted( self["code"], filename="<Endpoint Handler {} {}>".format( self["method"], self.endpoint()["name"]), mode="exec", ) return bytecode
def readDict(source_code): locals = {} byte_code = compile_restricted(source=source_code, filename='<inline>', mode='exec') exec(byte_code, safe_builtins, locals) if 'd' in locals: return locals['d'] return []
def compile_callbacks_file(source_code): locals = {} byte_code = compile_restricted(source=source_code, filename='<inline>', mode='exec') exec(byte_code, safe_builtins, locals) if 'objects' in locals: return locals['objects'] return {}
def Execute_User_Code(): user_code = usercode user_code += "\nresult = {0}(*args, **kwargs)".format(featurename) byte_code = compile_restricted(user_code, filename="<user_code>", mode="exec") for i in tqdm(range(len(Alltimeseries))): featurevalue = execute_user_code(byte_code, Alltimeseries[i]) New_feature_vector.append(featurevalue)
def _compile_file(self, name): path = os.path.join(_HERE, name) f = open(path, "r") source = f.read() f.close() co = compile_restricted(source, path, "exec") verify.verify(co) return co
def run_query(self, query, user): try: error = None code = compile_restricted(query, '<string>', 'exec') builtins = safe_builtins.copy() builtins["_write_"] = self.custom_write builtins["__import__"] = self.custom_import builtins["_getattr_"] = getattr builtins["getattr"] = getattr builtins["_setattr_"] = setattr builtins["setattr"] = setattr builtins["_getitem_"] = self.custom_get_item builtins["_getiter_"] = self.custom_get_iter builtins["_print_"] = self._custom_print # Layer in our own additional set of builtins that we have # considered safe. for key in self.safe_builtins: builtins[key] = __builtins__[key] restricted_globals = dict(__builtins__=builtins) restricted_globals["get_query_result"] = self.get_query_result restricted_globals["get_source_schema"] = self.get_source_schema restricted_globals["execute_query"] = self.execute_query restricted_globals["add_result_column"] = self.add_result_column restricted_globals["add_result_row"] = self.add_result_row restricted_globals["disable_print_log"] = self._custom_print.disable restricted_globals["enable_print_log"] = self._custom_print.enable # Supported data types restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER restricted_globals["TYPE_STRING"] = TYPE_STRING restricted_globals["TYPE_DATE"] = TYPE_DATE restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT # TODO: Figure out the best way to have a timeout on a script # One option is to use ETA with Celery + timeouts on workers # And replacement of worker process every X requests handled. exec((code), restricted_globals, self._script_locals) result = self._script_locals['result'] result['log'] = self._custom_print.lines json_data = json_dumps(result) except KeyboardInterrupt: error = "Query cancelled by user." json_data = None except Exception as e: error = str(type(e)) + " " + str(e) json_data = None return json_data, error
def from_directory_dict(cls, dic): code = {} for filename in dic: module_name = filename.split('.py')[0] compiled = compile_restricted(dic[filename], filename, 'exec') code[module_name] = Instrument.instrument(compiled) return cls(code)
def __get_line_number_of_error(self, code): test = TestErrorLineExtraction.ret_none restricted = compile_restricted(code, '<string>', 'exec') exec restricted try: return test() except Exception: tb = sys.exc_info()[2] return extract_line_number(tb)
def checkUnpackSequenceExpression(self): co = compile_restricted("[x for x, y in [(1, 2)]]", "<string>", "eval") verify.verify(co) calls = [] def getiter(s): calls.append(s) return list(s) globals = {"_getiter_": getiter} exec co in globals, {} self.assertEqual(calls, [[(1,2)], (1, 2)])
def checkUnpackSequenceSingle(self): co = compile_restricted("x, y = 1, 2", "<string>", "single") verify.verify(co) calls = [] def getiter(s): calls.append(s) return list(s) globals = {"_getiter_": getiter} exec co in globals, {} self.assertEqual(calls, [(1, 2)])
def run_query(self, query, user): try: error = None code = compile_restricted(query, '<string>', 'exec') safe_builtins["_write_"] = self.custom_write safe_builtins["__import__"] = self.custom_import safe_builtins["_getattr_"] = getattr safe_builtins["getattr"] = getattr safe_builtins["_setattr_"] = setattr safe_builtins["setattr"] = setattr safe_builtins["_getitem_"] = self.custom_get_item safe_builtins["_getiter_"] = self.custom_get_iter safe_builtins["_print_"] = self._custom_print restricted_globals = dict(__builtins__=safe_builtins) restricted_globals["get_query_result"] = self.get_query_result restricted_globals["execute_query"] = self.execute_query restricted_globals["add_result_column"] = self.add_result_column restricted_globals["add_result_row"] = self.add_result_row restricted_globals[ "disable_print_log"] = self._custom_print.disable restricted_globals["enable_print_log"] = self._custom_print.enable # Supported data types restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER restricted_globals["TYPE_STRING"] = TYPE_STRING restricted_globals["TYPE_DATE"] = TYPE_DATE restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT restricted_globals["sorted"] = sorted restricted_globals["reversed"] = reversed restricted_globals["min"] = min restricted_globals["max"] = max # TODO: Figure out the best way to have a timeout on a script # One option is to use ETA with Celery + timeouts on workers # And replacement of worker process every X requests handled. exec(code) in restricted_globals, self._script_locals result = self._script_locals['result'] result['log'] = self._custom_print.lines json_data = json.dumps(result) except KeyboardInterrupt: error = "Query cancelled by user." json_data = None except Exception as e: error = str(type(e)) + " " + str(e) json_data = None return json_data, error
def run_query(self, query): try: error = None code = compile_restricted(query, '<string>', 'exec') safe_builtins["_write_"] = self.custom_write safe_builtins["__import__"] = self.custom_import safe_builtins["_getattr_"] = getattr safe_builtins["getattr"] = getattr safe_builtins["_setattr_"] = setattr safe_builtins["setattr"] = setattr safe_builtins["_getitem_"] = self.custom_get_item safe_builtins["_getiter_"] = self.custom_get_iter safe_builtins["_print_"] = self._custom_print restricted_globals = dict(__builtins__=safe_builtins) restricted_globals["get_query_result"] = self.get_query_result restricted_globals["execute_query"] = self.execute_query restricted_globals["add_result_column"] = self.add_result_column restricted_globals["add_result_row"] = self.add_result_row restricted_globals["disable_print_log"] = self._custom_print.disable restricted_globals["enable_print_log"] = self._custom_print.enable # Supported data types restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER restricted_globals["TYPE_STRING"] = TYPE_STRING restricted_globals["TYPE_DATE"] = TYPE_DATE restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT restricted_globals["sorted"] = sorted restricted_globals["reversed"] = reversed restricted_globals["min"] = min restricted_globals["max"] = max # TODO: Figure out the best way to have a timeout on a script # One option is to use ETA with Celery + timeouts on workers # And replacement of worker process every X requests handled. exec(code) in restricted_globals, self._script_locals result = self._script_locals['result'] result['log'] = self._custom_print.lines json_data = json.dumps(result) except KeyboardInterrupt: error = "Query cancelled by user." json_data = None except Exception as e: error = str(e) json_data = None return json_data, error
def _compile_str(self, text, name): from RestrictedPython import compile_restricted from AccessControl.ZopeGuards import get_safe_globals, guarded_getattr code = compile_restricted(text, name, 'exec') g = get_safe_globals() g['_getattr_'] = guarded_getattr g['__debug__'] = 1 # so assert statements are active g['__name__'] = __name__ # so classes can be defined in the script return code, g
def _get_migrate_function(code): byte_code = compile_restricted(code, filename="<inline code>", mode="exec") glob = safe_globals.copy() glob.update({ "sa": sqlalchemy, "sqlalchemy_jsonfield": sqlalchemy_jsonfield, "morpfw": morpfw, "sqlalchemy_utils": sqlalchemy_utils, }) loc = {} exec(byte_code, glob, loc) return loc["migrate"]
def checkUnpackSequenceExpression(self): co = compile_restricted("[x for x, y in [(1, 2)]]", "<string>", "eval") verify.verify(co) calls = [] def getiter(s): calls.append(s) return list(s) globals = {"_getiter_": getiter} exec co in globals, {} self.assertEqual(calls, [[(1, 2)], (1, 2)])
def _exec(device, code, **args): try: if device: return device.execute(code, **args) else: func = None res = compile_restricted(code, '<string>', 'exec') exec(res) if func: return func(**args) else: log_err(None, 'failed to execute, no function') except: log_err(None, 'failed to execute')
def py27(request): """ { "status":"ok", "counter":13934721, "data":[ "something for data" ], "events":[ { "message":{ "id":"14333031", "room":"bgnori", "public_session_id":"IrrMxt", "icon_url":"http://www.gravatar.com/avatar/a00efd2efcb4f4efb65f01efd366f4b2.jpg", "type":"user", "speaker_id":"bgnori", "nickname":"bgnori", "text":".", "timestamp":"2013-03-17T07:38:51Z", "local_id":"pending-IrrMxt-4"}, "event_id":13934721 } ]} """ try: data = json.loads(request.data) except: return Response('bad json', mimetype="text/plain") userdata = None for evt in data['events']: userdata = evt.get("data", None) g = build_global(userdata) #print >> sys.stderr, g for evt in data['events']: msg = evt.get("message", None) if msg and msg["text"].startswith(MAGIC): source = msg["text"][len(MAGIC)+1:] try: r = eval(compile_restricted(source, "<string>", "eval"), g) except Exception, e: return Response(str(e), mimetype="text/plain") return Response(str(r), mimetype="text/plain") else: print >> sys.stderr, "nothing to send" break
def run_query(self, query): try: error = None code = compile_restricted(query, '<string>', 'exec') safe_builtins["_write_"] = custom_write safe_builtins["__import__"] = custom_import safe_builtins["_getattr_"] = getattr safe_builtins["getattr"] = getattr safe_builtins["_setattr_"] = setattr safe_builtins["setattr"] = setattr safe_builtins["_getitem_"] = custom_get_item safe_builtins["_getiter_"] = custom_get_iter script_locals = { "result" : { "rows" : [], "columns" : [] } } restricted_globals = dict(__builtins__=safe_builtins) restricted_globals["get_query_result"] = get_query_result restricted_globals["execute_query"] = execute_query restricted_globals["add_result_column"] = add_result_column restricted_globals["add_result_row"] = add_result_row restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER restricted_globals["TYPE_STRING"] = TYPE_STRING restricted_globals["TYPE_DATE"] = TYPE_DATE restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT # TODO: Figure out the best way to have a timeout on a script # One option is to use ETA with Celery + timeouts on workers # And replacement of worker process every X requests handled. exec(code) in restricted_globals, script_locals if script_locals['result'] is None: raise Exception("result wasn't set to value.") json_data = json.dumps(script_locals['result']) except KeyboardInterrupt: error = "Query cancelled by user." json_data = None except Exception as e: raise sys.exc_info()[1], None, sys.exc_info()[2] return json_data, error
def create_rmodule(): global rmodule fn = os.path.join(here, 'restricted_module.py') f = open(fn, 'r') source = f.read() f.close() # Sanity check compile(source, fn, 'exec') # Now compile it for real code = compile_restricted(source, fn, 'exec') rmodule = {'__builtins__':{'__import__':__import__, 'None':None}} builtins = getattr(__builtins__, '__dict__', __builtins__) for name in ('map', 'reduce', 'int', 'pow', 'range', 'filter', 'len', 'chr', 'ord', ): rmodule[name] = builtins[name] exec code in rmodule
def compile_indexer(self, indexer_name, func): safe_globals = dict(LuceneDocument=LuceneDocument) safe_locals = {} eval_fn = "<Eval: %s>" % indexer_name if self.restrict: safe_builtins['__import__'] = _import_(self.allowed_modules) safe_globals.update(dict( _print_=_print_(indexer_name), _getattr_=getattr, _getitem_=_getitem_, __builtins__=safe_builtins) ) obj = compile_restricted(func, eval_fn, "exec") else: obj = compile(func, eval_fn, "exec") eval(obj, safe_globals, safe_locals) index_doc = safe_locals.get("fun") return index_doc
def r_eval(caller, src, environment={}, filename='<string>', runtype="eval"): """ Evaluate an expression in the provided environment. """ def _writer(s, is_error=False): if(s.strip()): write(environment.get('parser'))(caller, s, is_error=is_error) env = get_restricted_environment(_writer, environment.get('parser')) env['runtype'] = runtype env['caller'] = caller env.update(environment) code = compile_restricted(src, filename, 'eval') try: value = eval(code, env) except errors.UsageError, e: if(caller): _writer(str(e), is_error=True) else: raise e
def r_exec(caller, src, environment={}, filename='<string>', runtype="exec"): """ Execute an expression in the provided environment. """ def _writer(s, is_error=False): if(s.strip()): write(environment.get('parser'))(caller, s, is_error=is_error) env = get_restricted_environment(_writer, environment.get('parser')) env['runtype'] = runtype env['caller'] = caller env.update(environment) code = compile_restricted(massage_verb_code(src), filename, 'exec') try: exec code in env except errors.UsageError, e: if(caller): _writer(str(e), is_error=True) else: raise e
def checkSyntaxSecurity(self): # Ensures that each of the functions in security_in_syntax.py # throws a SyntaxError when using compile_restricted. fn = os.path.join(here, 'security_in_syntax.py') f = open(fn, 'r') source = f.read() f.close() # Unrestricted compile. code = compile(source, fn, 'exec') m = {'__builtins__':None} exec code in m for k, v in m.items(): if hasattr(v, 'func_code'): filename, source = find_source(fn, v.func_code) # Now compile it with restrictions try: code = compile_restricted(source, filename, 'exec') except SyntaxError: # Passed the test. pass else: raise AssertionError, '%s should not have compiled' % k
def run(code, test_code): # buffer = StringIO() # sys.stdin = buffer # sys.stdout = buffer try: safe_builtins['_print_'] = SafePrintCollector.blank() safe_builtins['__import__'] = safe_import safe_builtins['__name__'] = '__safe_bulitin__' safe_builtins['_getattr_'] = getattr safe_builtins['_getiter_'] = list safe_builtins['_write_'] = full_write_guard safe_builtins['input'] = safe_input restricted_globals = dict(__builtins__=safe_builtins) code += '\nresult = printed' code = compile_restricted(code, '<string>', 'exec') result = timelimit(0.5, exec_code, kwargs={ 'code': code, '_globals': restricted_globals }) or restricted_globals['result'] except Exception, e: result = '[Error] %s: %s' % (type(e).__name__, e)
def start_run_task(task_id): taskinfo = DareBigJobTask.objects.get(id=task_id) if len(taskinfo.dare_bigjob_pilot.pilot_url) > 0: code = compile_restricted(taskinfo.script, '<string>', 'exec') restricted_globals = dict(__builtins__=safe_builtins) _print_ = PrintCollector _write_ = full_write_guard _getattr_ = getattr global _getiter_, _getattr_, _write_, _print_, restricted_globals _getiter_ = list exec(code) cus = tasks() pilot_compute = PilotCompute(pilot_url=str(taskinfo.dare_bigjob_pilot.pilot_url)) taskinfo.cu_url = '' for cu in cus: compute_unit = pilot_compute.submit_compute_unit(cu) print "Started ComputeUnit: %s" % (compute_unit.get_url()) taskinfo.cu_url += '@@@' + compute_unit.get_url() taskinfo.status = 'Submitted' taskinfo.save() return compute_unit
def _checkSyntaxSecurity(self, mod_name): # Ensures that each of the functions in security_in_syntax.py # throws a SyntaxError when using compile_restricted. fn = os.path.join(_HERE, mod_name) f = open(fn, 'r') source = f.read() f.close() # Unrestricted compile. code = compile(source, fn, 'exec') m = {'__builtins__': {'__import__':minimal_import}} exec code in m for k, v in m.items(): if hasattr(v, 'func_code'): filename, source = find_source(fn, v.func_code) # Now compile it with restrictions try: code = compile_restricted(source, filename, 'exec') except SyntaxError: # Passed the test. pass else: self.fail('%s should not have compiled' % k)
def process_plugin(plugin_class): print "Starting dir", dir() print "Creating the code object..." code = compile_restricted(plugin_class, '<string>', 'exec') print "Excuting the code..." exec(code) in restricted_globals #print "res glob", restricted_globals print "NEW dir after exec:", dir() print "Creating new object" MyPlugin = restricted_globals.get("MyPlugin") print "NAME:", MyPlugin.__name__ if not issubclass(MyPlugin, Plugin): raise NotAPluginClassError("This is not a subclass of Plugin") if hasattr(MyPlugin, "blah"): print "HAS IT" else: print "NO GO" mp = MyPlugin() print "Running the func" try: result = mp.my_func() print result except NameError, ne: raise NotAvailableError("Not available because %s" % ne)
def refresh_monitor_units(request): v_return = {} v_return['v_data'] = '' v_return['v_error'] = False v_return['v_error_id'] = -1 #Invalid session if not request.session.get('omnidb_session'): v_return['v_error'] = True v_return['v_error_id'] = 1 return JsonResponse(v_return) v_session = request.session.get('omnidb_session') json_object = json.loads(request.POST.get('data', None)) v_database_index = json_object['p_database_index'] v_tab_id = json_object['p_tab_id'] v_ids = json_object['p_ids'] v_database_orig = v_session.v_tab_connections[v_tab_id] v_database = OmniDatabase.Generic.InstantiateDatabase( v_database_orig.v_db_type, v_database_orig.v_connection.v_host, str(v_database_orig.v_connection.v_port), v_database_orig.v_active_service, v_database_orig.v_active_user, v_database_orig.v_connection.v_password, v_database_orig.v_conn_id, v_database_orig.v_alias, p_conn_string = v_database_orig.v_conn_string, p_parse_conn_string = False ) v_return['v_data'] = [] if len(v_ids) > 0: v_first = True v_query = '' for v_id in v_ids: #save new user/connection unit if v_id['saved_id'] == -1: try: v_session.v_omnidb_database.v_connection.Open() v_session.v_omnidb_database.v_connection.Execute('BEGIN TRANSACTION;') v_session.v_omnidb_database.v_connection.Execute(''' insert into units_users_connections values ((select coalesce(max(uuc_id), 0) + 1 from units_users_connections), {0}, {1}, {2}, {3}, '{4}'); '''.format(v_id['id'],v_session.v_user_id,v_database_orig.v_conn_id,v_id['interval'],v_id['plugin_name'])); v_id['saved_id'] = v_session.v_omnidb_database.v_connection.ExecuteScalar(''' select coalesce(max(uuc_id), 0) from units_users_connections ''') v_session.v_omnidb_database.v_connection.Execute('COMMIT;') v_session.v_omnidb_database.v_connection.Close() except Exception as exc: v_return['v_data'] = str(exc) v_return['v_error'] = True return JsonResponse(v_return) if v_id['plugin_name']=='': if not v_first: v_query += ' union all ' v_first = False v_query += ''' select unit_id, {0} as 'sequence', {1} as rendered, {2} as saved_id, script_chart, script_data, type, title, interval from mon_units where unit_id = '{3}' '''.format(v_id['sequence'], v_id['rendered'], v_id['saved_id'], v_id['id']) #plugin unit else: #search plugin data unit_data = None for mon_unit in monitoring_units: if mon_unit['id'] == v_id['id'] and mon_unit['plugin_name'] == v_id['plugin_name']: unit_data = mon_unit break try: v_unit_data = { 'v_saved_id': v_id['saved_id'], 'v_id': unit_data['id'], 'v_sequence': v_id['sequence'], 'v_type': unit_data['type'], 'v_title': unit_data['title'], 'v_interval': unit_data['interval'], 'v_object': None, 'v_error': False } loc = {"connection": v_database.v_connection} builtins = safe_builtins.copy() builtins['_getiter_'] = iter builtins['_getitem_'] = default_guarded_getitem byte_code = compile_restricted(unit_data['script_data'], '<inline>', 'exec') exec(byte_code, builtins, loc) data = loc['result'] if unit_data['type'] == 'grid' or v_id['rendered'] == 1: v_unit_data['v_object'] = data else: byte_code = compile_restricted(unit_data['script_chart'], '<inline>', 'exec') exec(byte_code, builtins, loc) result = loc['result'] result['data'] = data v_unit_data['v_object'] = result v_return['v_data'].append(v_unit_data) except Exception as exc: v_unit_data = { 'v_saved_id': v_id['saved_id'], 'v_id': unit_data['id'], 'v_sequence': v_id['sequence'], 'v_type': unit_data['type'], 'v_title': unit_data['title'], 'v_interval': unit_data['interval'], 'v_object': None, 'v_error': True, 'v_message': str(exc) } v_return['v_data'].append(v_unit_data) if v_query != '': try: v_units = v_session.v_omnidb_database.v_connection.Query(v_query) for v_unit in v_units.Rows: try: v_unit_data = { 'v_saved_id': v_unit['saved_id'], 'v_id': v_unit['unit_id'], 'v_sequence': v_unit['sequence'], 'v_type': v_unit['type'], 'v_title': v_unit['title'], 'v_interval': v_unit['interval'], 'v_object': None, 'v_error': False } loc = {"connection": v_database.v_connection} builtins = safe_builtins.copy() builtins['_getiter_'] = iter builtins['_getitem_'] = default_guarded_getitem byte_code = compile_restricted(v_unit['script_data'], '<inline>', 'exec') exec(byte_code, builtins, loc) data = loc['result'] if v_unit['type'] == 'grid' or v_unit['rendered'] == 1: v_unit_data['v_object'] = data else: byte_code = compile_restricted(v_unit['script_chart'], '<inline>', 'exec') exec(byte_code, builtins, loc) result = loc['result'] result['data'] = data v_unit_data['v_object'] = result v_return['v_data'].append(v_unit_data) except Exception as exc: v_unit_data = { 'v_saved_id': v_unit['saved_id'], 'v_id': v_unit['unit_id'], 'v_sequence': v_unit['sequence'], 'v_type': v_unit['type'], 'v_title': v_unit['title'], 'v_interval': v_unit['interval'], 'v_object': None, 'v_error': True, 'v_message': str(exc) } v_return['v_data'].append(v_unit_data) except Exception as exc: v_return['v_data'] = str(exc) v_return['v_error'] = True return JsonResponse(v_return) return JsonResponse(v_return)
restricted_globals["_getitem_"] = _getitem_ sys.path.append(config_dir + "/app") exec_app = {} host_mod = {} run_once = {} depend_app = {} for i in range(len(config.sections())): if config.sections()[i] != "OSIRIS": _domain = config.sections()[i] _mod_name = config.get(config.sections()[i], "mod") mod_src = open(config_dir + "/app/" + _mod_name + ".py") if sandbox: mod_bytecode = compile_restricted(mod_src.read(), "<string>", "exec") else: mod_bytecode = compile(mod_src.read(), "<string>", "exec") mod_src.close() exec_app[_domain] = imp.new_module(_mod_name) sys.modules[_mod_name] = exec_app[_domain] exec mod_bytecode in exec_app[_domain].__dict__ host_mod[_domain] = _mod_name try: run_once[_domain] = exec_app[_domain].runonce() except: run_once[_domain] = 0 try: depend_app[_domain] = {}
def test_monitor_script(request): v_return = {} v_return['v_data'] = '' v_return['v_error'] = False v_return['v_error_id'] = -1 #Invalid session if not request.session.get('omnidb_session'): v_return['v_error'] = True v_return['v_error_id'] = 1 return JsonResponse(v_return) v_session = request.session.get('omnidb_session') json_object = json.loads(request.POST.get('data', None)) v_database_index = json_object['p_database_index'] v_tab_id = json_object['p_tab_id'] v_script_chart = json_object['p_script_chart'] v_script_data = json_object['p_script_data'] v_type = json_object['p_type'] v_database = v_session.v_tab_connections[v_tab_id] #Check database prompt timeout v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index)) if v_timeout['timeout']: v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] } v_return['v_error'] = True return JsonResponse(v_return) v_return['v_data'] = { 'v_object': None, 'v_error': False } try: loc = {"connection": v_database.v_connection} builtins = safe_builtins.copy() builtins['_getiter_'] = iter builtins['_getitem_'] = default_guarded_getitem byte_code = compile_restricted(v_script_data, '<inline>', 'exec') exec(byte_code, builtins, loc) data = loc['result'] if v_type == 'grid': v_return['v_data']['v_object'] = data else: byte_code = compile_restricted(v_script_chart, '<inline>', 'exec') exec(byte_code, builtins, loc) result = loc['result'] result['data'] = data v_return['v_data']['v_object'] = result except Exception as exc: v_unit_data = { 'v_object': None, 'v_error': True, 'v_message': str(exc) } v_return['v_data'] = v_unit_data return JsonResponse(v_return)
def compile(self, code, filename, mode): return compile_restricted(code, filename, mode)
def respond(self, buf, addy): global persist global config_dir gen_head = self.gen_head hostname = self.hostname(buf).lower().strip() header2dict = self.header2dict self.srv_str += "Date: %s\r\n" % time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) if hostname not in exec_app: hostname = "fallback" logging.error("Hostname not found, fell to fallback") if hostname not in exec_app: logging.error("No fallback found") return self.html(500, "No fallback configured :(\r\n", head_str="") if debug: mod_src = open(config_dir + "/app/" + host_mod[hostname] + ".py") if sandbox: mod_bytecode = compile_restricted(mod_src.read(), "<string>", "exec") else: mod_bytecode = compile(mod_src.read(), "<string>", "exec") mod_src.close() exec mod_bytecode in exec_app[hostname].__dict__ try: try: buf_head = buf.split( """\r \r """, 1, )[0] buf_body = buf.split( """\r \r """, 1, )[1] except: buf_body = buf app_header = header2dict(buf_head) if proxy: if "X-Real-IP" in app_header: addr_real = app_header["X-Real-IP"] else: addr_real = addy[0] logging.error("Reverse proxy not found!") else: addr_real = addy[0] for opt in app_header: if opt.lower() == "dnt": dnt = 1 try: dnt except: dnt = 0 if hostname not in persist: persist[hostname] = 0 payload = { "header": app_header, "body": buf_body, "ip": addr_real, "runonce": run_once[hostname], "depends": depend_app[hostname], "dnt": dnt, "persist": persist[hostname], } if payload["header"]["PROTOCOL"] == "HTTP/1.1" or proxy: try: data = exec_app[hostname].reply(payload) except: err = sys.exc_info() logging.error(err) data = {"code": 500, "msg": "A module error occured, details printed to logging"} else: data = {"code": 505, "msg": "Error, only HTTP/1.1 is supported.\r\n"} if "runonce" in data: try: run_once[hostname] = exec_app[hostname].runonce() except: run_once[hostname] = 0 if "modload" in data: for dep in data["modload"]: try: reload(depend_app[_domain][dep]) except: try: depend_app[_domain][dep] = __import__(dep) except: depend_app[_domain][dep] = 0 if "reload" in data: reload(exec_app[hostname]) if "persist" in data: persist[hostname] = data["persist"] if "file" in data: file_path = os.path.join(config_dir, "app", host_mod[hostname], data["file"]) if os.path.isfile(file_path): msg_file = open(file_path, "r") msg = str(msg_file.read()) msg_file.close() else: if debug: msg = "Attempted to template a file that does not exist" else: msg = "An error occured with the application" data["code"] = 500 logging.error(file_path + " does not exist") else: msg = data["msg"] if "template" in data: temp_opt = data["template"] try: for (entry, temp_opt_list) in temp_opt.iteritems(): msg = msg.strip(" ").replace("{" + entry + "}", temp_opt[entry]) except: pass if "xopt" in data: xopt = data["xopt"] else: xopt = 0 if xopt == 0: msg = re.sub(r"<xopt>.*?</xopt>", "", msg, flags=re.DOTALL) if dnt: msg = re.sub(r"<tracker>.*?</tracker>", "", msg, flags=re.DOTALL) if "code" in data: code = data["code"] else: code = 200 if "type" in data: mime = data["type"] else: if code == 200: mime = mimetypes.guess_type(payload["header"]["PATH"])[0] if mime == None: mime = "text/html" else: mime = "text/html" self.srv_str += "Content-Type: %s\r\n" % (mime) try: head_str = gen_head(data["header"]) except: head_str = self.srv_str except: code = 500 if debug: msg = ( "The response sent from the module was invalid<br><br>Payload:<br><code>%s</code><br><br>Response:<br><code>%s</code>" % (payload, data) ) else: msg = "A module error occured\r\n" head_str = self.srv_str + "Content-Type: text/html\r\n" return self.html(code, msg, head_str)
def newImport(string, a, b, c): f = open(string, 'r') src = f.read() code = compile_restricted(src, '<string>', 'exec') exec(code) in restricted_globals f.close() def newOpen(string, mode): if is_in_directory(string, "."): return open(string, mode) else: raise IOError("permission denied can't access " + string) def doPrint(*kargs): if len(kargs) != 0: string = kargs[0] print string safe_builtins['open'] = newOpen safe_builtins['__import__'] = newImport safe_builtins['_print_'] = doPrint safe_builtins['_getattr_'] = getattr restricted_globals = dict(__builtins__ = safe_builtins) sys.path.append(os.path.dirname(os.path.realpath(sys.argv[1]))) src = "import malicious" code = compile_restricted(src, '<string>', 'exec') exec(code) in restricted_globals
def newImport(string, a, b, c): f = open(string, 'r') src = f.read() code = compile_restricted(src, '<string>', 'exec') exec(code) in restricted_globals f.close()
from RestrictedPython import compile_restricted result = "" src = """ print "Hello World!" result = printed """ code = compile_restricted(src, "<string>", "exec") exec (code) print result