def delete(self): """ { file_name: replicate: } """ data = request.json file_name = data['file_name'] replicate = data['replicate'] if does_file_exist(file_name, FILE_SERVER_PATH): try: delete_file(file_name, FILE_SERVER_PATH) print('File deleted from ', SERVER_PORT) #Alert dir server if replicate == True: dir_port = get_port('dir_server') req = format_file_req(dir_port) data = { 'file_name': file_name, 'file_server_port': str(SERVER_PORT) } requests.delete(req, data=json.dumps(data), headers=cf.JSON_HEADER) except: print('Unable to delete file') abort(409)
def get_model_template(addr, session, model, revision): url = 'http://%s:%s/api/template' % (addr, utils.get_port()) headers = {'User-Agent': utils.get_user_agent()} cookies = {'medlinkToken': session} payload = {'modelSeries': model, 'revision': revision} i = 0 while i < 3: r = ensure_success(lambda: requests.get(url, headers=headers, params=payload, cookies=cookies, verify=False)) conent = r.content.decode("UTF-8") try: if conent != None: return json.loads(conent) except Exception: print(i) i += 1 print(r) return Verdict.MUMBLE("Check api/template")
def get_file_list_route(): app.logger.debug(request.form) ip = dict(request.form)['ip'][0] dir_name = dict(request.form)['dir'][0] port = get_port(conf, ip) file_list = get_file_list(conf, ip, dir_name, port) return " ".join(file_list)
def get_supported_bodies(addr, session): url = 'http://%s:%s/api/bodymodels' % (addr, utils.get_port()) headers = {'User-Agent': utils.get_user_agent()} cookies = {'medlinkToken': session} r = ensure_success(lambda: requests.get( url, headers=headers, cookies=cookies, verify=False)) return json.loads(r.content.decode("UTF-8"))
def get_file_version(file_name): dir_server_port = get_port('dir_server') req = format_version_req(dir_server_port) data = {'file_name': file_name} resp = json.loads( requests.get(req, data=json.dumps(data), headers=cf.JSON_HEADER).content.decode()) version = resp['file_version'] return version
def delete(self): #Alert directory server first dir_port = get_port('dir_server') if dir_port: req = format_node_req(SERVER_PORT, dir_port) requests.delete(req) request.environ.get('werkzeug.server.shutdown')() response = {'state': 'shutting down'} return response
def register_user(addr): url = 'http://%s:%s/api/signin' % (addr, utils.get_port()) headers = {'User-Agent': utils.get_user_agent()} user = {"login": utils.get_name(), "password": utils.get_password()} r = ensure_success( lambda: requests.post(url, headers=headers, data=user, verify=False)) user["session"] = r.content.decode("UTF-8") return user
def __init__(self, path, mode = 'rtc'): self.mode = mode self.path = path host,port = utils.get_port(_config['nameserver']) self.server = utils.getServer(path, host, port) if self.server is None: print('File not found') self.modified = None SpooledTemporaryFile.__init__(self, _config['max_size'], mode.replace('c','')) host,port = utils.get_port(_config['lockserver']) if utils.Locked(path, host, port): print('File is locked') if 'w' not in mode: host, port = utils.get_port(self.server) with closing(HTTPConnection(host,port)) as con: con.request('GET', filepath) response = con.getresponse() self.modified = response.getheader('Last-Modified') status = response.status if status not in (200,204): print('Error occured',status) if status != 204: self.write(response.read()) if 'r' in mode: self.seek(0) self.lockid = '' if 'a' in mode or 'w' in mode: host,port = utils.get_port(_config['lockserver']) self.lockid = int(utils.Locked(path, host, port)) if 'c' in mode: File._cache[path] = self
def initFileServer(): ## Initialises File Server host, port = utils.get_port(config['nameserver']) with closing(HTTPConnection(host, port)) as con: data = 'srv=' data += config['server'] data += '&dirs=' data += '/n' data += 'config['directories']' con.request('POST','/',data)
def test_dir(self): utils.create_dir("dir1") response = self.client.request("GET", "/dir1") (status, reason, body, headers) = response location = utils.get_header("location", headers) self.assertEqual(307, status) self.assertEqual("http://localhost:%s/dir1/" % utils.get_port(), location)
def main(name='noname'): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.connect((socket.gethostbyname('localhost'), get_port())) proof(name) sock.sendall(str.encode(name)) print('Message sent successfully') reply = sock.recv(4096) return reply.decode() == 'ok'
def put_body(addr, session, body_model, token): url = 'http://%s:%s/api/bodymodel' % (addr, utils.get_port()) headers = {'User-Agent': utils.get_user_agent()} cookies = {'medlinkToken': session} payload = {'vendorToken': token} r = ensure_success(lambda: requests.put(url, headers=headers, json=body_model, params=payload, cookies=cookies, verify=False))
def admin_login(admin_username, admin_password): ''' returns true if login is successful ''' auth_port = get_port('auth_server') req = format_admin_req(auth_port) data = {'admin_username': admin_username, 'admin_password': admin_password} response = json.loads( requests.get(req, data=json.dumps(data), headers=cf.JSON_HEADER).content.decode()) return response['valid_admin']
def is_valid_file(file_name): db_f = file_map.find_one({'file_name': file_name}) version = db_f['file_version'] dir_server_port = get_port('dir_server') req = format_version_req(dir_server_port) data = {'file_name': file_name} resp = json.loads(requests.get(req, data=json.dumps(data), headers=cf.JSON_HEADER).content.decode()) latest_file_version = resp['file_version'] if version == latest_file_version: return True else: return False
def show(): ''' Shows files available ''' dir_port = get_port('dir_server') check_port(dir_port) show_url = format_show_files(dir_port) response = json.loads(requests.get(show_url).content.decode()) file_list = response['file_list'] print('available files:') for f in file_list: print(f)
def get_body(addr, session, model, revision, token): url = 'http://%s:%s/api/bodymodel' % (addr, utils.get_port()) headers = {'User-Agent': utils.get_user_agent()} cookies = {'medlinkToken': session} payload = { 'modelSeries': model, 'revision': revision, 'vendorToken': token } r = ensure_success(lambda: requests.get( url, headers=headers, params=payload, cookies=cookies, verify=False)) return json.loads(r.content.decode("UTF-8"))
def show_users(admin_username, admin_password): ''' prints existing users if admin credentials are accurate ''' auth_port = get_port('auth_server') req = format_users_req(auth_port) data = {'admin_username': admin_username, 'admin_password': admin_password} response = json.loads( requests.get(req, data=json.dumps(data), headers=cf.JSON_HEADER).content.decode()) user_list = response['user_list'] print('Users:') for u in user_list: print(u)
def create_file(file_name): """ Creates file """ file_server = get_port('file_server') check_port(file_server) data = { "file_name": file_name, 'file_content': " ", 'replicate': True, 'new_file': True } req = format_file_req(file_server) requests.post(req, data=json.dumps(data), headers=cf.JSON_HEADER)
def delete_file(file_name): """ Deletes file """ dir_server_port = get_port('dir_server') check_port(dir_server_port) try: file_port = get_file_port(file_name, dir_server_port) except: print('File doesnt exist') return req = format_file_req(file_port) data = {'file_name': file_name, 'replicate': True} try: requests.delete(req, data=json.dumps(data), headers=cf.JSON_HEADER) except: print('unable to delete file')
def add_user(admin_username, admin_password, new_username, new_password, new_privilege): ''' adds user to db if admin cred are accurate ''' auth_port = get_port('auth_server') req = format_admin_req(auth_port) data = { 'admin_username': admin_username, 'admin_password': admin_password, 'new_username': new_username, 'new_password': new_password, 'new_privilege': new_privilege } response = json.loads( requests.post(req, data=json.dumps(data), headers=cf.JSON_HEADER).content.decode()) return response['success']
def test_browser(self): src_dir = os.path.dirname(__file__) dst_dir = utils.get_root() shutil.copyfile("%s/test_browser.html" % src_dir, "%s/test_browser.html" % dst_dir) shutil.copyfile("%s/test_browser.js" % src_dir, "%s/test_browser.js" % dst_dir) utils.create_dir("a-dir") webbrowser.open("http://localhost:%d/test_browser.html" % utils.get_port()) print "waiting for response from browser test: " wait = 5 while wait > 0: contents = utils.read_file("test_browser.results.txt") if contents: break print "%d" % wait time.sleep(1) wait -= 1 self.assertTrue(contents != None) self.assertEquals("OK", contents)
def test_secure_customized_port(self): with self.patch_config({"port": 4242, "secure": True}): self.assertEqual(4242, get_port())
def test_secure_none_port(self): with self.patch_config({"port": None, "secure": True}): self.assertEqual(443, get_port())
def test_secure_missing_port(self): with self.patch_config({"secure": True}): self.assertEqual(443, get_port())
hadoop_bin = hdp_select.get_hadoop_dir("sbin") hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") hadoop_home = hdp_select.get_hadoop_dir("home") hadoop_secure_dn_user = hdfs_user hadoop_conf_dir = conf_select.get_hadoop_conf_dir() hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure") hadoop_lib_home = hdp_select.get_hadoop_dir("lib") # hadoop parameters for 2.2+ if Script.is_hdp_stack_greater_or_equal("2.2"): mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*" if not security_enabled: hadoop_secure_dn_user = '******' else: dfs_dn_port = utils.get_port(dfs_dn_addr) dfs_dn_http_port = utils.get_port(dfs_dn_http_addr) dfs_dn_https_port = utils.get_port(dfs_dn_https_addr) # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports if dfs_http_policy == "HTTPS_ONLY": secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_https_port) elif dfs_http_policy == "HTTP_AND_HTTPS": secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) or utils.is_secure_port(dfs_dn_https_port) else: # params.dfs_http_policy == "HTTP_ONLY" or not defined: secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) if secure_dn_ports_are_in_use: hadoop_secure_dn_user = hdfs_user else: hadoop_secure_dn_user = '******' ambari_libs_dir = "/var/lib/ambari-agent/lib"
# # fake gps over launch site # print("Calling Path Prediction.") # try: # subprocess.Popen('~/HAB/Prediction_Autologger/build/BallARENA-dev -36.71131 142.19981 4200 6.9 1612614684', shell=True) # except Exception as e: # print("Exception: " + str(e.__class__)) # print(e) # print("Error calling path prediction script. Will continue and call again later.") # the main file code starts here if __name__ == '__main__': # set Port GlobalVals.GPS_UART_PORT = get_port('GPS') print('PORT: ' + GlobalVals.GPS_UART_PORT) GlobalVals.GPS_UART_PORT = "/dev/ttyUSB0" GlobalVals.GPS_UART_BAUDRATE = 38400 try: os.makedirs("../datalog") except FileExistsError: pass file_name = "../datalog/" + time.strftime( "%Y%m%d-%H%M%S") + "-GPSLoggerUblox.csv" GlobalVals.GPS_LOGGER_FILE = file_name logString = "epoch, lon, lat, alt, ascent_rate \n"
def write_file(file_name): """Allows user to write to file of a particular name""" file_name_data = {'file_name': file_name} #Get server ports from registry cache_port = get_port('cache_server') check_port(cache_port) dir_port = get_port('dir_server') check_port(dir_port) lock_port = get_port('lock_server') check_port(lock_port) #Get lock lock_url = format_lock_req('0', lock_port) response = json.loads( requests.get(lock_url, data=json.dumps(file_name_data), headers=cf.JSON_HEADER).content.decode()) lock_acquired = response['lock_acquired'] client_id = response['client_id'] lock_url = format_lock_req(client_id, lock_port) while (not lock_acquired): time.sleep(1) response = json.loads( requests.get(lock_url, data=json.dumps(file_name_data), headers=cf.JSON_HEADER).content.decode()) lock_acquired = response['lock_acquired'] #Read File in read_file(file_name) input('Press enter to write file back to server') #Get file server port from directory server url = format_file_req(dir_port) response = json.loads( requests.get(url, data=json.dumps(file_name_data), headers=cf.JSON_HEADER).content.decode()) file_server_port = response['file_server_port'] #post to file server url = format_file_req(file_server_port) script_dir = os.path.dirname(__file__) abs_file_path = script_dir + '/temp' file_content = get_file_read(file_name, abs_file_path) data = { "file_name": file_name, "file_content": file_content, "replicate": True, "new_file": False } headers = cf.JSON_HEADER response = json.loads( requests.post(url, data=json.dumps(data), headers=headers).content.decode()) file_version = response['file_version'] #release lock requests.post(lock_url, data=json.dumps(file_name_data), headers=cf.JSON_HEADER) data = { 'file_name': file_name, 'file_content': file_content, 'file_version': file_version } #update cache url = format_file_req(cache_port) response = requests.post(url, json.dumps(data), headers=headers)
def put_telemetry(addr, session, telemetry): url = 'http://%s:%s/api/telemetry' % (addr, utils.get_port()) headers = {'User-Agent': utils.get_user_agent()} cookies = {'medlinkToken': session} ensure_success(lambda: requests.put( url, headers=headers, cookies=cookies, json=telemetry, verify=False))
def test_customized_port(self): with self.patch_config({'port': 4747, 'secure': False}): self.assertEqual(4747, get_port())
def test_secure_missing_port(self): with self.patch_config({'secure': True}): self.assertEqual(443, get_port())
def test_none_port(self): with self.patch_config({'port': None, 'secure': False}): self.assertEqual(80, get_port())
def test_secure_none_port(self): with self.patch_config({'port': None, 'secure': True}): self.assertEqual(443, get_port())
def test_customized_port(self): with self.patch_config({"port": 4747, "secure": False}): self.assertEqual(4747, get_port())
def test_missing_port(self): with self.patch_config({"secure": False}): self.assertEqual(80, get_port())
fileObj = open(GlobalVals.GROUND_STATION_LOG_FILE, "a") fileObj.write(logString) fileObj.close() except Exception as e: print("Exception: " + str(e.__class__)) print(e) print("Error using GPS data log file") #===================================================== # Thread starter #===================================================== if __name__ == '__main__': # set Port GlobalVals.PORT = get_port('RFD900') print('PORT: ' + GlobalVals.PORT) try: os.makedirs("../datalog") except FileExistsError: pass GlobalVals.ERROR_LOG_FILE = "../datalog/" + time.strftime( "%Y%m%d-%H%M%S") + "-ErrorLog.txt" GlobalVals.PING_LOG_FILE = "../datalog/" + time.strftime( "%Y%m%d-%H%M%S") + "-PingLog.txt" GlobalVals.PACKET_STATS_FILE = "../datalog/" + time.strftime( "%Y%m%d-%H%M%S") + "-PacketStats.txt" GlobalVals.GROUND_STATION_LOG_FILE = "../datalog/" + time.strftime( "%Y%m%d-%H%M%S") + "-GPS_GroundStationLogger.txt"
def test_secure_customized_port(self): with self.patch_config({'port': 4242, 'secure': True}): self.assertEqual(4242, get_port())
def test_none_port(self): with self.patch_config({"port": None, "secure": False}): self.assertEqual(80, get_port())
# check if this script will start the handshake or not # if sys.argv[1] == 'start': # starter = True # elif sys.argv[1] == 'wait': # starter = False # else: # print("Incorrect first arg.") starter = False # sys.exit() # use the third argument as the com port if numArgs == 3: GlobalVals.PORT = sys.argv[2] else: GlobalVals.PORT = get_port('Lora') print('PORT: ' + GlobalVals.PORT) # create log file string try: os.makedirs("../datalog") except FileExistsError: pass file_name = "../datalog/" + time.strftime( "%Y%m%d-%H%M%S") + "-LoraRSSI.csv" GlobalVals.RSSI_LOG_FILE = file_name logString = "epoch, rssi, filtered_RSSI, distance \n" try:
def test_missing_port(self): with self.patch_config({'secure': False}): self.assertEqual(80, get_port())
hadoop_home = stack_select.get_hadoop_dir("home") hadoop_secure_dn_user = hdfs_user hadoop_conf_dir = conf_select.get_hadoop_conf_dir() hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure") hadoop_lib_home = stack_select.get_hadoop_dir("lib") # hadoop parameters for stacks that support rolling_upgrade if stack_version_formatted and check_stack_feature( StackFeature.ROLLING_UPGRADE, stack_version_formatted): mapreduce_libs_path = format( "{stack_root}/current/hadoop-mapreduce-client/*") if not security_enabled: hadoop_secure_dn_user = '******' else: dfs_dn_port = utils.get_port(dfs_dn_addr) dfs_dn_http_port = utils.get_port(dfs_dn_http_addr) dfs_dn_https_port = utils.get_port(dfs_dn_https_addr) # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports if dfs_http_policy == "HTTPS_ONLY": secure_dn_ports_are_in_use = utils.is_secure_port( dfs_dn_port) or utils.is_secure_port(dfs_dn_https_port) elif dfs_http_policy == "HTTP_AND_HTTPS": secure_dn_ports_are_in_use = utils.is_secure_port( dfs_dn_port) or utils.is_secure_port( dfs_dn_http_port) or utils.is_secure_port( dfs_dn_https_port) else: # params.dfs_http_policy == "HTTP_ONLY" or not defined: secure_dn_ports_are_in_use = utils.is_secure_port( dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) if secure_dn_ports_are_in_use: