def test_edit_target_form(): """ This test is imitating editing a new target and updating it back to Brain.Targets. """ target_key = "" for target_item in get_brain_targets(): target_key = target_item["id"] plugin_name = "Plugin1" location_num = location_generated_num("172.16.5.") port_num = "8005" optional_char = "optional here" r.db("Brain").table("Targets").get(target_key).update({ "PluginName": plugin_name, "id": target_key, "Location": location_num, "Port": port_num, "Optional": optional_char }).run(connect()) update_new_target2 = r.db("Brain").table("Targets").get( target_key).update({ "PluginName": plugin_name, "Location": location_num, "Port": port_num[:2] + "1", "Optional": optional_char }).run(connect()) assert update_new_target2['replaced'] == 1
def confirm_db_info(): """ Runs all the db confirm functions """ print("\nlog: ###### DB Logs ######") connect() confirm_brain_db_info() confirm_plugin_db_info()
def test_add_plugin_ok(rf): """ This test is replicating when the user clicks on 'Execute Sequence' button at the bottom right of w3. """ url_var = "/update_plugin/NEW/" plugin_data = { "id": "NEW", "Name": "Plugin3", "ServiceName": "Plugin3-4243tcp", "ServiceID": "cheeto3", "State": "Stopped", "DesiredState": "", "OS": "all", "Interface": "1.1.1.1", "Environment": [], "Environment[]": "", "ExternalPorts": ["12/tcp"], "ExternalPorts[]": "12/tcp", "InternalPorts": ["12/tcp"], "InternalPorts[]": "12/tcp", "Extra": True, } RPP.insert({"Interface": "1.1.1.1", "TCPPorts": ["11"]}).run(connect()) response = post_test(url_var, plugin_data, update_plugin, rf, target_id="NEW") assert response.status_code == 200
def _put_blob(serial, file_id): validated = parse_serial(serial) file_content = bytes.fromhex(request.form['data']) conn = connect(host=environ['RETHINK_HOST']) put_buffer(file_id, file_content, conn=conn) _handle_client_response(validated['Location'], request.form['data']) return "1"
def tables_check(database, tables): """Takes a list of tables and checks for them This function takes a list of table names and checks if they exist in the database. Arguments: database {string} -- a string denoting the name of the database. tables {list<str>} -- a list of table names to check for. Returns: {list} -- a list of tables that do not exist in the database. """ db_con_var = connect() if "Plugin2" in tables: print("\nlog: db {}.{} table exist locally".format( database, "Plugin2")) table_clear(database, "Plugin2") for i, table_name in enumerate(tables): # {database}.{table_name} does exist if rtdb.db(database).table_list().contains(table_name).run(db_con_var): print("\nlog: db {}.{} table exist locally".format( database, table_name)) table_clear(database, table_name) del tables[i] else: print("log: db {}.{} doesnt exist".format(database, table_name)) return tables
def confirm_brain_db_info(): """ confirm_brain_db_info function checks to see if it's using a local rethinkdb connection or docker's brain instance connection. It also checks to see if Brain db exist and if any tables exist within the Brain db. If db and tables don't exist they will be created only locally. :return: nothing at the moment """ if not check_dev_env(): # Check for Development Environment return db_con_var = connect() if rtdb.db_list().contains("Brain").run(db_con_var) is not True: print("log: db Brain doesn't exist locally") rtdb.db_create("Brain").run(db_con_var) print("log: db Brain was created to locally since it didn't exist") # create local Brain tables tables_create("Brain", ["Targets", "Jobs", "Outputs"]) else: # if Brain does exist locally print("log: db Brain exist locally") non_existing_tables = tables_check("Brain", ["Targets", "Jobs", "Outputs"]) tables_create("Brain", non_existing_tables) rtdb.db("Brain").table("Targets").insert(_TEST_TARGETS).run(db_con_var) print("log: db Dummy data was inserted to Brain.Targets locally")
def create_plugin(self, plugin_data): """Creates a plugin in the 'Plugins' table of the 'Controller' database. plugin_data should contain the following: { Name<str>, State<str> [default: "Available"], DesiredState<str> [default: ""], Interface<str>(IP) [default: ""], ExternalPort<list<str> >, InternalPort<list<str> > } Arguments: plugin_data {dict} -- data for plugin """ if plugin_data["Name"] == "": return self._check_db_errors({ "errors": 1, "first_error": "Plugin must be given a valid name!" }) result = brain.queries.create_plugin_controller( plugin_data, conn=brain.connect(host=self.rethink_host)) return self._check_db_errors(result)
def test_execute_w3_data(): """ This test is replicating when the user clicks on 'Execute Sequence' button at the bottom right of w3. """ job_command = "" target_item = "" brain_db = "Brain" plugin_table = "Plugin1" command = "echo" jobs_table = "Jobs" for command_item in get_specific_command(plugin_table, command): job_command = command_item for target_item in get_specific_brain_targets(plugin_table): break inserted = rtdb.db(brain_db).table(jobs_table).insert([{ "id": ECHO_JOB_ID, "JobTarget": target_item, "Status": "Ready", "StartTime": 0, "JobCommand": job_command }]).run(connect()) assert inserted['inserted'] == 1
def val_edit_target_form(request, target_id): """ This function validates the input fields and after successfully validating the target will update with the new input fields :param request: user request :param target_id: target id :return: home page with an updated target list """ if request.method == 'POST': edit_plugin_name = request.POST.get('plugin_name') edit_location_num = request.POST.get('location_num') edit_port_num = request.POST.get('port_num') edit_optional_char = request.POST.get('optional_char') # edit form template form = TargetForm(request.POST) if form.is_valid(): brain_connection = brain.connect() brain.r.db("Brain").table("Targets").get(str(target_id)).update({ "PluginName": str(edit_plugin_name), "Location": str(edit_location_num), "Port": str(edit_port_num), "Optional": { 'init': str(edit_optional_char) } }).run(brain_connection) return redirect('/') else: form = TargetForm() return redirect('/edit_target_form/{}/'.format(target_id))
def main(): """Main server entry point """ signal(SIGTERM, sigterm_handler) check_dev_env() PLUGIN_CONTROLLER.load_plugins_from_manifest(MANIFEST_FILE) check_harness() brain_connection = connect(host=RETHINK_HOST) while True: # --- This main control loop monitors the running --- # --- plugin containers. It takes the following --- # --- actions: --- # --- 1) Update the running plugin container states --- # --- 2) Query the plugin entries table --- # --- 3) Check the DesiredState agains the State --- # --- 3.1) If they differ, take appropriate action --- try: sleep(0.3) update_states() cursor = queries.RPC.run(brain_connection) check_states(cursor) except KeyboardInterrupt: PLUGIN_CONTROLLER.stop_all_containers() exit(0)
def gens(file_id): conn = connect(host=environ['RETHINK_HOST']) file_dict = get(file_id, conn=conn) if file_dict: yield "0x" file_content = file_dict['Content'] for next_byte in file_content: yield hex(next_byte)[2:].zfill(2)
def test_display_capability_list(): """ This test is replicating when a user clicks on a plugin from W1 and command list will be displayed in W2. """ cur_var = rtdb.db("Plugins").table("Plugin1").pluck( 'OptionalInput', 'Inputs', 'Tooltip', 'CommandName', 'Output').run(connect()) for plugin_item in cur_var: assert isinstance(plugin_item, dict)
def confirm_plugin_db_info(): """ confirm_plugin_db_info function checks to see if the Plugins db exist and if any tables exist within the Plugins db. If db and tables don't exist they will be created only locally. """ db_con_var = connect() if check_prod_env(): # For Production Environment if rtdb.db_list().contains("Plugins").run(db_con_var): print("\nlog: db Plugins exist") if rtdb.db("Plugins").table_list().run(db_con_var): print( "log: db Plugins tables are listed down below:\n{}".format( rtdb.db("Plugins").table_list().run(db_con_var))) else: print("log: db Plugins tables don't exist\n") else: print("\nlog: db Plugins DOESN'T exist\n") else: # is check_dev_env()-- if Plugins does exit locally if rtdb.db_list().contains("Plugins").run(db_con_var) is not True: print("\nlog: db Plugins doesn't exist locally") rtdb.db_create("Plugins").run(db_con_var) print("log: db Plugins didn't exist, was created to locally") tables_create("Plugins", ["Plugin1", "Plugin2"]) else: # if Plugins does exit locally print("\nlog: db Plugins exist locally") non_existing_tables = tables_check("Plugins", ["Plugin1", "Plugin2"]) tables_create("Plugins", non_existing_tables) rtdb.db("Plugins").table("Plugin1").insert(_TEST_COMMANDS).run( db_con_var) rtdb.db("Plugins").table("Plugin2").insert(_TEST_COMMANDS2).run( db_con_var) rtdb.db("Controller").table("Plugins").delete().run(db_con_var) rtdb.db("Controller").table("Plugins")\ .insert(plugins).run(db_con_var) rtdb.db("Controller").table("Ports").delete().run(db_con_var) rtdb.db("Controller").table("Ports") \ .insert([TEST_PORT_DATA, TEST_PORT_DATA2]).run(db_con_var) # Brain.Logs rtdb.db("Brain").table("Logs").delete().run(db_con_var) rtdb.db("Brain").table("Logs").insert( gen_logs_data(50)).run(db_con_var) print("\nlog: db Dummy data was inserted to Brain.Logs locally\n") if rtdb.db("Brain").table_list().contains("UIW2").run(db_con_var): rtdb.db("Brain").table("UIW2").delete().run(db_con_var) else: rtdb.db("Brain").table_create("UIW2").run(db_con_var) rtdb.db("Brain").table("UIW2").insert(TEST_SAVED_COMMANDS).run( db_con_var) print("log: db Dummy data was inserted to Plugins.Plugin1 locally\n")
def dummy_output_data(): """ This test is used for other functions test in order to have data to test """ conn = connect() r.db("Brain").table("Jobs").insert(SAMPLE_JOB).run(conn) r.db("Brain").table("Outputs").insert(SAMPLE_OUTPUT).run(conn) yield r.db("Brain").table("Outputs").delete().run(conn) r.db("Brain").table("Jobs").delete().run(conn)
def test_target_list(self): """ This test queries all the targets in Brain.Targets to be displayed in W1. """ db_name = "Brain" db_table = "Targets" query_plugin_names = rtdb.db(db_name).table(db_table).pluck( 'PluginName', 'Location').run(connect()) for plugin_item in query_plugin_names: assert isinstance(plugin_item, dict)
def delete_specific_target(request, target_id): """ This function deletes a specific target from Brain.Targets :param request: user request :param target_id: target it :return: home page and deletion of the specific target user requested """ if request.method == 'GET': brain_connection = brain.connect() brain.r.db("Brain").table("Targets").get( str(target_id)).delete(return_changes=True).run(brain_connection) return redirect('/')
def start(self, *args): """The entrypoint for the docker container """ host = environ.get("RETHINK_HOST", "rethinkdb") if environ["STAGE"] == "TESTING": host = "127.0.0.1" try: self.db_conn = connect(host=host) except BrainNotReady: self._log("Brain is not ready.", 50) exit(1) self._advertise_functionality() self._start(args)
def update_plugin(self, plugin_data): """Updates the plugin info to match the current state of its container. Takes plugin data and current state. Arguments: plugin_data {dict} -- data for plugin. """ result = brain.queries.update_plugin_controller( plugin_data, conn=brain.connect(host=self.rethink_host)) self.container_mapping[plugin_data["Name"]] = \ self.get_container_from_name(plugin_data["Name"], timeout=3) return self._check_db_errors(result)
def __init__(self, file_name, new_file): QtCore.QObject.__init__(self) self._conn = brain.connect(None, file_name, open_existing=(0 if new_file else 1)) self._db = brain.CachedConnection(self._conn) # for debug purposes objs = self._db.search() for obj in objs: self._db.delete(obj) self._metaclass_id = None self._default_class_id = None self._findRoot() self._testInit() # for debug purposes
def clear_dbs(): yield sleep(1) conn = brain.connect() brain.r.db("Brain").table("Targets").delete().run(conn) brain.r.db("Brain").table("Outputs").delete().run(conn) brain.r.db("Brain").table("Jobs").delete().run(conn) brain.r.db("Audit").table("Jobs").delete().run(conn) brain.r.db("Controller").table("Plugins").delete().run(conn) brain.r.db("Controller").table("Ports").delete().run(conn) for table in brain.r.db("Plugins").table_list().run(conn): if "test_table" in table: continue brain.r.db("Plugins").table(table).delete().run(conn) sleep(1)
def test_minimal_jobs(rethink): p = Pool(processes=2) p.apply_async(set_signal_true, []) self_test(ext_signal=EXT_SIGNAL) c = connect() #verify they all got done jobs = 0 for job in RBJ.run(c): jobs += 1 assert job["Status"] == "Done" assert jobs > 0 outputs = 0 for output in RBO.run(c): outputs += 1 assert output["Content"][0] == "<" assert outputs > 0
def test_linharn(startup_brain, proc, linux_harn): # create the processes that will contact the Harness plugin linux_harn.add_proc(Linharn_proc.wrap_loop) # start the Harness plugin proc.start() while not proc.is_alive(): sleep(.5) # start linux client linux_harn.procs[0].start() sleep(3) # insert an echo job into database echo = brain.queries.get_plugin_command("Harness", "echo", brain.connect()) echo_job = { "Status": "Waiting", "StartTime": time(), "JobTarget": SAMPLE_TARGET, "JobCommand": echo } echo_job["JobCommand"]["Inputs"][0]["Value"] = "Hello World" inserted = brain.queries.insert_jobs([echo_job], True, brain.connect()) loop = True now = time() # wait for the client to complete the job and get the result while time() - now < 30 and loop is True: out = brain.queries.get_output_content(inserted["generated_keys"][0], conn=brain.connect()) if out is not None: loop = False sleep(1) assert out == "Hello World" # insert a sleep job sleep_job = { "Status": "Waiting", "StartTime": time(), "JobTarget": SAMPLE_TARGET, "JobCommand": brain.queries.get_plugin_command("Harness", "sleep", brain.connect()) } sleep_job["JobCommand"]["Inputs"][0]["Value"] = "3000" inserted = brain.queries.insert_jobs([sleep_job], True, brain.connect()) loop = True now = time() # wait for the client to complete the job and get the result while time() - now < 30 and loop is True: out = brain.queries.get_output_content(inserted["generated_keys"][0], conn=brain.connect()) if out is not None: loop = False sleep(1) assert out == ""
def table_clear(database, table): """Clears data from a table Clears all data from a given table in a database. Arguments: database {str} -- database name table {str} -- name of the table to clear. """ db_con_var = connect() try: rtdb.db(database).table(table).delete().run(db_con_var) print("log: db {}.{} table has been cleared.".format(database, table)) except rtdb.ReqlError as err: err = sys.exc_info()[0] print("EXCEPT == {}".format(err))
def plugin_status(self, plugin_data): """Return the status of a plugin container Arguments: plugin_data {dict} -- plugin data Returns: {str} -- "Active", "Restarting", or "Stopped" """ cursor = brain.queries.get_plugin_by_name_controller( plugin_data["Name"], conn=brain.connect(host=self.rethink_host)) try: return cursor.next()["State"] except brain.r.ReqlCursorEmpty: self.log(30, "".join( (plugin_data["Name"], " not found in database!"))) return None
def _create_port(self, port_data): """Creates a port entry in the 'Ports' table of the 'Controller' database. port_data should contain the following: { InterfaceName<str> [default: "All"], Address<str(IP) [default: ""], TCPPorts<list<str> >, UDPPorts<list<str> > } Arguments: port_data {dict} -- data for port """ result = brain.queries.create_port_controller( port_data, conn=brain.connect(host=self.rethink_host)) return self._check_db_errors(result)
def test_display_w4_data(): """ This test is replicating the data displayed in W4 when a user clicks on 'Execute Sequence' button at the bottom right of w3. """ brain_db = "Brain" output_table = "Outputs" process_var = Process(target=switch_to_done) process_var.start() sleep(2) command_document = rtdb.db(brain_db).table(output_table).filter({ "JobCommand": { 'id': ECHO_JOB_ID } }).run(connect()) for query_item in command_document: assert isinstance(query_item, dict) process_var.terminate() os.kill(process_var.pid, signal.SIGKILL)
def test_add_target(): """ This test is replicating the data if the form is validated it will insert the new target to Brain.Targets table. """ plugin_name = "Plugin1" location_num = location_generated_num("172.16.5.") port_num = "8002" optional_char = "" inserted_new_target = rtdb.db("Brain").table("Targets").insert([{ "PluginName": plugin_name, "Location": location_num, "Port": port_num, "Optional": optional_char }]).run(connect()) assert inserted_new_target['inserted'] == 1
def edit_target_form(request, target_id): """ This function render the edit form after the user clicks on the edit button from Target List or W1 :param request: user request :param target_id: target id :return: template with target info as placeholders """ template = loader.get_template('pcp_app/edit_target_form.html') brain_connection = brain.connect() get_brain_target = brain.r.db("Brain").table("Targets").filter({ "id": str(target_id) }).run(brain_connection) print("\nget_brain_target:\n{}\n".format(get_brain_target)) return HttpResponse( template.render(context={ "edit_target_dict": get_brain_target, 'plugin_list': get_plugin_list_query(), }, request=request))
def tables_create(database, tables): """Create a list of tables in the database Creates tables in a database from provided list. Arguments: database {str} -- a string denoting the name of the database. tables {list<str>} -- a list of table names to check for. """ db_con_var = connect() for table_name in tables: try: rtdb.db(database).table_create(table_name).run(db_con_var) print("log: db {}.{} table was created to locally \ since it didn't exist".format(database, table_name)) except rtdb.ReqlError as err: err = sys.exc_info()[0] print("EXCEPT == {}".format(err))
def test_the_Harness_app(startup_brain, proc): environ["STAGE"] = "TESTING" environ["PORT"] = "5000" proc.start() sleep(3) try: from brain import connect, r conn = connect() sleep(5) job_start = 0 for command in TEST_COMMANDS: job_start += 1 job_target = { "PluginName": "Harness", "Location": "127.0.0.1", "Port": "5000" } job = { "JobTarget": job_target, "Status": "Ready", "StartTime": job_start, "JobCommand": command } print(job) r.db("Brain").table("Jobs").insert(job).run(conn) sleep(4) the_pretend_app() sleep(5) raise KeyboardInterrupt except KeyboardInterrupt: pass finally: try: proc.terminate() sleep(2) except SystemExit as ex: assert str(ex) == "0"
def test_many(startup_brain, proc, linux_harn): proc.start() while not proc.is_alive(): sleep(.5) print("testing a lot of processes") job_list = [] for i in range(0, 7): print("creating process " + str(i)) linux_harn.add_proc(Linharn_proc.wrap_loop) linux_harn.procs[i].start() echo = brain.queries.get_plugin_command("Harness", "echo", brain.connect()) echo_job = { "Status": "Waiting", "StartTime": time(), "JobTarget": SAMPLE_TARGET, "JobCommand": echo } for i in range(0, 25): echo_job["JobCommand"]["Inputs"][0]["Value"] = "Hello World" + str(i) job_list.append(deepcopy(echo_job)) inserted = brain.queries.insert_jobs(job_list, True, brain.connect()) NOW = time() while time() - NOW < 120: if brain.rethinkdb.db("Brain").table("Jobs").filter( (brain.rethinkdb.row["Status"] == "Waiting") | (brain.rethinkdb.row["Status"] == "Ready") | (brain.rethinkdb.row["Status"] == "Pending") | (brain.rethinkdb.row["Status"] == "Active")).is_empty().run( brain.connect()): break sleep(16) for i in inserted["generated_keys"]: print(brain.queries.get_job_by_id(i, brain.connect())) print(time()) assert brain.queries.is_job_done(i, brain.connect()) assert brain.queries.get_output_content(i, conn=brain.connect())
def _runTests(objects, actions, verbosity): """Main test function. Create several objects and perform random actions on them.""" # using default engine, because we are testing only DB logic here engine_tag = brain.getDefaultEngineTag() # create brain connection and fake Python-powered connection conn = brain.connect(None, name=None) fake_conn = FakeConnection() objs = [] fake_objs = [] times = {} # create objects for i in range(objects): data = getRandomNonTrivialData(STARTING_DEPTH) try: objs.append(conn.create(data)) except: print("Error creating object: " + str(data)) raise if verbosity > 2: print("Object " + str(i) + " created" + (", initial state: " + repr(data) if verbosity > 3 else "")) fake_objs.append(fake_conn.create(data)) # perform test for c in range(actions): for i in range(objects): # copy original state in case we delete object or some error occurs fake_state_before = copy.deepcopy(fake_conn.read(fake_objs[i])) # try to read the real object from the database try: state_before = conn.read(objs[i]) except: print("Error reading object " + str(i) + (": " + str(fake_state_before) if verbosity > 3 else "")) if verbosity > 3: conn._engine.dump() raise # create random action and test it on fake object action = RandomAction(state_before) fake_exception = None # some fuzz actions can lead to exceptions try: fake_result = action(fake_conn, fake_objs[i]) except brain.BrainError as e: fake_exception = e # if object gets deleted, return its state to original fake_state_after = fake_conn.read(fake_objs[i]) if fake_state_after is None: fake_conn.modify(fake_objs[i], [], fake_state_before) continue # try to perform action on a real object try: starting_time = time.time() result = action(conn, objs[i]) action_time = time.time() - starting_time method = action.getMethod() if method not in times.keys(): times[method] = action_time else: times[method] += action_time except brain.BrainError as e: if not (fake_exception is not None and type(e) == type(fake_exception)): raise except: print("Error performing action on object " + str(i) + ": " + action.dump(verbosity)) if verbosity > 3: print("State before: " + str(fake_state_before)) conn._engine.dump() raise if verbosity > 2: print("Object " + str(i) + ", " + action.dump(verbosity)) # try to read resulting object state try: state_after = conn.read(objs[i]) except: print("Error reading object " + str(i) + ((": " + str(fake_state_after)) if verbosity > 3 else "")) print("After action: " + action.dump(verbosity)) if verbosity > 3: print("On state: " + str(fake_state_before)) conn._engine.dump() raise # compare resulting states of real and fake objects if state_after != fake_state_after: print("Action results are different:") print("State before: " + repr(fake_state_before)) print("Action: " + action.dump(verbosity)) print("Main state after: " + repr(state_after)) print("Fake state after: " + repr(fake_state_after)) if verbosity > 3: conn._engine.dump() raise Exception("Functionality error") # compare action return values (if any) if result != fake_result and fake_exception is None: print("Action return values are different:") print("State before: " + repr(fake_state_before)) print("Action: " + action.dump(verbosity)) print("Main return value: " + repr(result)) print("Fake return value: " + repr(fake_result)) if verbosity > 3: conn._engine.dump() raise Exception("Functionality error") return times
def export_connect(self, *args, **kwds): session_id = "".join(random.sample(string.ascii_letters + string.digits, 8)) if self._db_path is not None: kwds['db_path'] = self._db_path self._sessions[session_id] = brain.connect(*args, **kwds) return session_id
def connect(self, *args, **kwds): conn = brain.connect(*args, **kwds) # setting size threshold to a very low value, # for testing purposes only return brain.CachedConnection(conn, size_threshold=1)