def __init__(self, conf, logger=None): """ constructor for account updater :param conf: configuration of account-updater """ self.logger = logger or SimpleLogger(conf).get_logger_object() Daemon.__init__(self, conf, self.logger) libraryUtils.OSDLoggerImpl( "account-updater-monitoring").initialize_logger() create_recovery_file('account-updater-server') self.conf = conf self.__interval = int(conf.get('interval', 1800)) self.__ll_port = int(conf.get('llport', 61014)) self.__account_updater_port = int(\ conf.get('account_updater_port', 61009)) self.__service_id = gethostname() + "_" + str(self.__ll_port) + \ "_account-updater-server" self.__param = self.__get_param(conf) self.msg = GlobalVariables(self.logger) self.msg.set_service_id(self.__service_id) self.walker_map = WalkerMap() self.reader_map = ReaderMap() # Start sending health to local leader self.logger.info("Loading health monitoring library") self.health_instance = healthMonitoring(self.__get_node_ip\ (gethostname()), self.__account_updater_port, \ self.__ll_port, self.__service_id) self.logger.info("Loaded health monitoring library") remove_recovery_file('account-updater-server') # load global map if not self.msg.load_gl_map(): sys.exit(130) self.logger.info("Account updater started")
def do_STOP_SERVICE(self): """ Handle COMPLETE ALL REQUEST HTTP request """ self.logger = SimpleLogger(conf=None).get_logger_object() self.conf = SimpleLogger(conf=None).get_conf() self.msg = GlobalVariables(self.logger) all_transfer_event_received = [] self.logger.info("Account-updater received STOP_SERVICE request") complete_all_request_event = self.msg.get_complete_all_event() complete_all_request_event.set() try: while len(all_transfer_event_received) != 4: all_transfer_event_received.append(self.msg.get_from_Queue()) self.logger.info("Completed STOP_SERVICE request") self.send_header('Message-Type', typeEnums.BLOCK_NEW_REQUESTS_ACK) self.send_header('Ownership-List', self.msg.get_ownershipList()) self.send_response(HTTP_OK) self.end_headers() self.wfile.write(self.msg.get_ownershipList()) return except Exception as err: self.logger.exception('Exception raised in' \ ' STOP_SERVICE error :%s' % err) self.send_response(HTTP_INTERNAL_SERVER_ERROR) self.end_headers()
def __init__(self, conf, logger): """ Constructor of AccountServiceCommunicator. """ self.logger = logger #self.logger.debug("AccountServiceCommunicator constructed") self.__conn_creator = ConnectionCreator(conf, self.logger) self.msg = GlobalVariables(self.logger)
def __init__(self, osd_dir, logger): """ Constructor for UpdaterRing. """ self.container_ring = ContainerRing(osd_dir, logger) self.account_ring = AccountRing(osd_dir, logger) self.logger = logger self.msg = GlobalVariables(self.logger) self.shift_param = 512
def __init__(self, walker_map, reader_map, conf, logger): Thread.__init__(self) self._file_location = conf['stat_file_location'] self.__interval = conf['interval'] self.logger = logger self.msg = GlobalVariables(self.logger) self._walker_map = walker_map self._reader_map = reader_map self.logger.info("ContainerSweeper constructed")
def __init__(self, walker_map, reader_map, conf, logger): Thread.__init__(self) self._walker_map = walker_map self._reader_map = reader_map self.logger = logger self.logger.info("Reader constructed") self._file_location = conf['stat_file_location'] self.msg = GlobalVariables(self.logger) self._complete_all_event = self.msg.get_complete_all_event() self._transfer_cmp_event = self.msg.get_transfer_cmp_event() self.put_queue_flag = False
class TestReader(unittest.TestCase): def setUp(self): self.msg = GlobalVariables(logger) walker_map[:] = ['1/2015-09-08.05:51:24'] self.reader_obj = Reader(walker_map, reader_map, conf, logger) def test_reader_run(self): print "test_reader_run" self.reader_obj.setDaemon(True) self.reader_obj.start() time.sleep(0.5) self.assertEquals(self.reader_obj._reader_map, {'1': {'2015-09-08.05:51:24': {'da5599f9ed2104bd4cfbbfa723b45fd8': ['945c7a7b3a4330dfcc8f269e5752ebea'], 'e3891c4148ce912256a07a50b69a8f48': ['082287da66aa3c25b281e06ae721a6d0', '5df2383886036d49657789bda3702b44']}}}) fake_kill_thread(self.reader_obj) def test_reader_run_exception(self): print "test_reader_run_exception" with mock.patch('osd.accountUpdaterService.walker.os.path.exists', side_effect=Exception): self.reader_obj.setDaemon(True) self.reader_obj.start() time.sleep(0.5) self.assertEquals(self.reader_obj._reader_map, {'1': {'2015-09-08.05:51:24': {}}}) fake_kill_thread(self.reader_obj) def test_reader_stop_service_event(self): print "test_Reader_stop_service_event" self.reader_obj._complete_all_event.set() self.reader_obj.setDaemon(True) self.reader_obj.start() time.sleep(0.5) self.assertEquals(self.reader_obj._reader_map, {}) fake_kill_thread(self.reader_obj) self.reader_obj._complete_all_event.clear() def test_reader_transfer_comp_event(self): print "test_Reader_transfer_comp_event" self.msg.set_ownershipList(range(1,10)) self.reader_obj._transfer_cmp_event.set() self.reader_obj.setDaemon(True) self.reader_obj.start() time.sleep(0.5) self.assertEquals(self.reader_obj._reader_map, {'1': {'2015-09-08.05:51:24': {'da5599f9ed2104bd4cfbbfa723b45fd8': ['945c7a7b3a4330dfcc8f269e5752ebea'], 'e3891c4148ce912256a07a50b69a8f48': ['082287da66aa3c25b281e06ae721a6d0', '5df2383886036d49657789bda3702b44']}}}) fake_kill_thread(self.reader_obj) self.reader_obj._transfer_cmp_event.clear() def test_reader_transfer_comp_event_without_new_ownership(self): print "test_Reader_transfer_comp_event_without_new_ownership" self.msg.set_ownershipList([]) self.reader_obj._transfer_cmp_event.set() self.reader_obj.setDaemon(True) self.reader_obj.start() time.sleep(0.5) self.assertEquals(self.reader_obj._reader_map, {}) fake_kill_thread(self.reader_obj) self.reader_obj._transfer_cmp_event.clear()
def __init__(self, conf, logger): """ Constructor for Communicator. """ self.__service_locator = ServiceLocator(conf['osd_dir'], logger) self.__account_updater_timeout = AccountUpdaterTimeout() self.__html_header_builder = HtmlHeaderBuilder() self.__conn_timeout = int(conf['conn_timeout']) self.__node_timeout = int(conf['node_timeout']) self.conf = conf self.logger = logger self.msg = GlobalVariables(self.logger)
def __init__(self, conf, logger): Thread.__init__(self) self._file_location = os.path.join(conf['filesystems'], FS, 'tmp', \ 'account') self.del_path = 'del' self.conf = conf self.logger = logger self.logger.info("AccountSweep constructed") self.root = conf['filesystems'] self.msg = GlobalVariables(self.logger) self._ownership_list = self.msg.get_ownershipList() self.put_queue_flag = False self._complete_all_event = self.msg.get_complete_all_event() self._transfer_cmp_event = self.msg.get_transfer_cmp_event() self._account_ring = AccountRing(conf['osd_dir'], self.logger)
def __init__(self, walker_map, reader_map, conf, logger): Thread.__init__(self) self.conf = conf self.logger = logger self.logger.info("Updater constructed") self.msg = GlobalVariables(self.logger) self.__complete_all_request = False self._walker_map = walker_map self._reader_map = reader_map self._complete_all_event = self.msg.get_complete_all_event() self._transfer_cmp_event = self.msg.get_transfer_cmp_event() self.__container_dispatcher = ContainerDispatcher(self.conf, self.logger) self._updater_map = {} self.__other_file_list = [] self.put_queue_flag = False
class ServiceLocator: """ Give the node information. """ def __init__(self, osd_dir, logger): """ Constructor for UpdaterRing. """ self.container_ring = ContainerRing(osd_dir, logger) self.account_ring = AccountRing(osd_dir, logger) self.logger = logger self.msg = GlobalVariables(self.logger) self.shift_param = 512 def get_service_details(self, service_obj): node = {} node['ip'] = service_obj.get_ip() node['port'] = service_obj.get_port() return node def get_container_from_ring(self, account_hash, container_hash): """get container node info from ring""" #TODO: Needs to modify to get complete node info(i.e. fs, dir) comp_no = Calculation.evaluate(container_hash, self.shift_param) - 1 node = self.get_service_details(\ self.msg.get_container_map()[comp_no]) node['fs'] = self.container_ring.get_filesystem() node['dir'] = self.get_cont_dir_by_hash(\ account_hash, container_hash) return node def get_account_from_ring(self, account_hash): """get account node info from ring""" #TODO: Needs to modify to get complete node info(i.e. fs) comp_no = Calculation.evaluate(account_hash, self.shift_param) - 1 node = self.get_service_details(\ self.msg.get_account_map()[comp_no]) node['fs'] = self.account_ring.get_filesystem() node['account'] = account_hash node['dir'] = self.get_acc_dir_by_hash(account_hash) return node def get_acc_dir_by_hash(self, key): return self.account_ring.get_directory_by_hash(key) def get_cont_dir_by_hash(self, acc_hash, cont_hash): return self.container_ring.get_directory_by_hash(acc_hash, cont_hash)
def __init__(self, walker_map, conf, logger): Thread.__init__(self) #self.__conf = conf # TODO : once the complete configuration file will be created correct entries would be fetched self._file_location = conf['stat_file_location'] self._max_update_files = conf['max_update_files'] self._max_files_stored_in_cache = conf['max_files_stored_in_cache'] self._expire_delta_time = conf['updaterfile_expire_delta_time'] self.logger = logger self.msg = GlobalVariables(self.logger) #self.__MainPath = '/remote/hydra042/spsingh/Latest/objectStorage/src/osd/NewUpdater/devesh/sps/upfiles1' #Todo: will be fetched from configuration file #self.__ownershipList = ['X9','X3','X4','X5','X6','X8'] #TODO: dummy value, will be set at service start self._ownership_list = self.msg.get_ownershipList() self._walker_map = walker_map self.logger.info("Walker constructed") self._complete_all_event = self.msg.get_complete_all_event() self._transfer_cmp_event = self.msg.get_transfer_cmp_event() self.put_queue_flag = False
def do_TRANSFER_COMPONENTS(self): """ Handle TRANSFER COMPONENTS HTTP request """ self.logger = SimpleLogger(conf=None).get_logger_object() self.conf = SimpleLogger(conf=None).get_conf() self.msg = GlobalVariables(self.logger) self._request_handler = Req() transfer_component_timeout = int( self.conf.get('\ transfer_component_timeout', 600)) self.ll_port = int(self.conf.get('llport', 61014)) self.service_id = self.msg.get_service_id() self.deleted_comp_list = [] transfer_component_map = { } #dictionary containing{'(dest_node_obj)':'[comp_list]'} #eg: {('169.254.1.12', '61009', 'HN0101_61014_account-updater'):'['1', '2']'} self.final_transfer_status_list = [ ] #final component status list which will be send to GL #[(1, True),(2, False),(3, True)] self.final_status = False #final response to GL self.check_transfer_component_map = { } # to check if component transfer completed or failed: #{dest_node_obj1:True, dest_node_obj2:"Failed", dest_node_obj3:False} all_transfer_event_received = [] self.protocol_version = "HTTP/1.1" self.logger.info("Account-updater received Transfer component request") try: content_length = int(self.headers['Content-Length']) #sending acknowledgement of TRANSFER_COMPONENT to GL self.send_response(HTTP_CONTINUE, "Continue\r\n\r\n") data = self.rfile.read(content_length) message = TransferCompMessage() message.de_serialize(data) transfer_component_map = message.service_comp_map() self.logger.info("Deserialized transfer component map:%s" \ %transfer_component_map) except Exception, ex: self.logger.error("Exception: %s" % ex) self.send_header('Message-Type', \ typeEnums.TRANSFER_COMPONENT_RESPONSE) self.send_response(HTTP_INTERNAL_SERVER_ERROR) self.end_headers() return
def __init__(self, container_name, container_path, account_name, conf, \ logger): """ Constructor for RecordReader. :param container_name: container name :param container_path: container path """ self.__container_name = container_name self.__container_path = container_path self.__account_name = account_name self.stat_info = {} self.conf = conf self.logger = logger self.msg = GlobalVariables(self.logger) self.retry_count = 3 #self.logger.debug("ContainerStatReader constructed") self.connection_creator = ConnectionCreator(self.conf, self.logger) self.__is_completed = False
def do_ACCEPT_COMPONENT_TRANSFER(self): """ Handle ACCEPT COMPONENT TRANSFER HTTP request """ try: self.logger = SimpleLogger(conf=None).get_logger_object() self.conf = SimpleLogger(conf=None).get_conf() self.msg = GlobalVariables(self.logger) self.ll_port = int(self.conf.get('llport', 61014)) self.logger.info("Account-updater received ACCEPT_COMPONENT_" \ "TRANSFER request") length = int(self.headers['Content-Length']) self.logger.debug("Headers:%s" % self.headers) #sending intemediate (connection created) acknowledgement #to source node self.send_response(HTTP_CONTINUE) self.end_headers() #receiving new ownership list pickled_string = self.rfile.read(length) add_comp_list = ast.literal_eval(pickled_string) self.logger.info("Accepting new component ownership: %s" % add_comp_list) #updating global map for new onwership thread = threading.Thread(target = self.update_my_ownership, \ args=(add_comp_list,)) thread.start() self.logger.info("Completed ACCEPT_COMPONENTS_TRANSFER request") self.send_response(HTTP_OK) self.end_headers() return except Exception as err: self.logger.exception('Exception raised in' \ 'ACCEPT_COMPONENTS_TRANSFER error :%s' % err) self.send_response(HTTP_INTERNAL_SERVER_ERROR) self.end_headers()
class ContainerSweeper(Thread): """ Container Updater remover from the queue """ def __init__(self, walker_map, reader_map, conf, logger): Thread.__init__(self) self._file_location = conf['stat_file_location'] self.__interval = conf['interval'] self.logger = logger self.msg = GlobalVariables(self.logger) self._walker_map = walker_map self._reader_map = reader_map self.logger.info("ContainerSweeper constructed") def run(self): try: self.__perform_container_sweeping_task() except Exception as ex: self.logger.error("error occured : %s" % ex) os._exit(130) except: self.logger.error("Unknown exception occured") os._exit(130) def __perform_container_sweeping_task(self): while True: self.logger.info("Container sweeper list : %s" \ %self.msg.get_sweeper_list()) temp_list = self.msg.get_sweeper_list() try: for comp_tuple in temp_list: #self.msg.pop_from_sweeper_list(comp_tuple) remove_file("%s/%s/%s" %(self._file_location, \ comp_tuple[0], comp_tuple[1])) if comp_tuple[0] in self._reader_map.keys(): for obj in self._reader_map[comp_tuple[0]].keys(): if obj.get_stat_file() == comp_tuple[1]: #decreasing the container count for account, cont_list in self._reader_map[ comp_tuple[0]][obj].items(): counter = len(cont_list) self._reader_map.decrementContainerCount( counter) del self._reader_map[comp_tuple[0]][obj] for comp, val in self._reader_map.items(): if not val: self._reader_map.pop(comp) if (comp_tuple[0] + "/" + comp_tuple[1]) in \ self._walker_map: self._walker_map.remove(comp_tuple[0] + "/" + \ comp_tuple[1]) self.logger.debug("After deletion reader map:%s , " \ "walker map:%s" %(self._reader_map, self._walker_map)) temp_list = [] self.msg.pop_from_sweeper_list(temp_list) except Exception as ex: self.logger.error("Exception occured :%s" % ex) temp_list = [] self.msg.pop_from_sweeper_list(temp_list) time.sleep(20)
class ConnectionCreator: """ Established the connection. """ def __init__(self, conf, logger): """ Constructor for Communicator. """ self.__service_locator = ServiceLocator(conf['osd_dir'], logger) self.__account_updater_timeout = AccountUpdaterTimeout() self.__html_header_builder = HtmlHeaderBuilder() self.__conn_timeout = int(conf['conn_timeout']) self.__node_timeout = int(conf['node_timeout']) self.conf = conf self.logger = logger self.msg = GlobalVariables(self.logger) def connect_container(self, method, account_name, container_name, \ container_path): self.logger.debug('Enter in connect_container') use_hub("selects") node = self.__service_locator.get_container_from_ring(\ account_name, container_name) conn = None headers = None shift_param = 512 with self.__account_updater_timeout.get_connection_timeout(\ self.__conn_timeout): headers = self.__html_header_builder.get_headers(None) headers['x-updater-request'] = True headers['x-component-number'] = Calculation.evaluate(\ container_name, shift_param) - 1 headers['x-global-map-version'] = self.msg.get_global_map_version() conn = httplib.HTTPConnection(node['ip'], node['port']) conn.request(method, container_path, '', headers) return conn def get_http_connection_instance(self, info, data): """ :return conn: connection object to service. """ self.logger.debug('Enter in get_http_connection_instance') headers = None conn = None shift_param = 512 if info['flag']: node = self.__service_locator.get_container_from_ring(\ info['account_name'], data.keys()[0]) else: node = self.__service_locator.get_account_from_ring( info['account_name']) headers = self.__html_header_builder.get_headers(None) info['entity_name'] = info['entity_name'].encode("utf-8") with self.__account_updater_timeout.get_connection_timeout(\ self.__conn_timeout): self.logger.debug('Connecting to : node: %s, fs: %s, account: \ %s, message body size: %s' % (node, node['fs'], \ info['account_name'], info['body_size'])) headers['account'] = info['account_name'] headers['filesystem'] = node['fs'] headers['dir'] = node['dir'] headers['x-component-number'] = Calculation.evaluate(\ info['account_name'], shift_param) - 1 headers['x-global-map-version'] = self.msg.get_global_map_version() conn = httplib.HTTPConnection(node['ip'], node['port']) conn.request("PUT_CONTAINER_RECORD", '', data, headers) self.logger.debug('Exit from get_http_connection_instance') return conn def get_http_response_instance(self, conn): """ Get the response from service. :return resp: response from service. """ self.logger.debug('Enter in get_http_response_instance') with self.__account_updater_timeout.get_node_timeout(\ self.__node_timeout): resp = conn.getresponse() return resp
def setUp(self): self.msg = GlobalVariables(logger) walker_map[:] = ['1/2015-09-08.05:51:24'] self.reader_obj = Reader(walker_map, reader_map, conf, logger)
class ContainerStatReader: """ Read the container stat info from container file. """ def __init__(self, container_name, container_path, account_name, conf, \ logger): """ Constructor for RecordReader. :param container_name: container name :param container_path: container path """ self.__container_name = container_name self.__container_path = container_path self.__account_name = account_name self.stat_info = {} self.conf = conf self.logger = logger self.msg = GlobalVariables(self.logger) self.retry_count = 3 #self.logger.debug("ContainerStatReader constructed") self.connection_creator = ConnectionCreator(self.conf, self.logger) self.__is_completed = False def __repr__(self): return "container : %s account: %s" %(self.__container_name, self.__account_name) def is_completed(self): return self.__is_completed def read_container_stat_info(self, account_map): """ Get the stat info of container using H-File interface. """ try: conn = None self.logger.debug("Enter in read_container_stat_info for container") conn = self.connection_creator.connect_container('HEAD', \ self.__account_name, self.__container_name, self.__container_path) resp = self.connection_creator.get_http_response_instance(conn) while resp.status != 204 and resp.status != 404 and self.retry_count != 0: self.retry_count -= 1 if resp.status == 301 or resp.status == 307 resp.status == 503: self.msg.load_gl_map() conn = self.connection_creator.connect_container('HEAD', \ self.__account_name, self.__container_name, self.__container_path) resp = self.connection_creator.get_http_response_instance(conn) if resp.status == 204: headers = dict(resp.getheaders()) self.stat_info[self.__container_name] = \ {'container' : headers['x-container'], \ 'put_timestamp' : headers['x-put-timestamp'] , \ 'object_count' : headers['x-container-object-count'], \ 'bytes_used' : headers['x-container-bytes-used'], \ 'delete_timestamp' : '0', 'deleted' : False} account_map[self.__account_name, self.__container_name] = \ "success" self.__is_completed = True self.logger.info("container stats for %s: %s" \ %(self.__container_name, self.stat_info[self.__container_name])) elif resp.status == 404: self.logger.debug("Container info file %s not found" \ % self.__container_path) self.stat_info[self.__container_name] = \ {'container' : '', \ 'put_timestamp' : '0', \ 'object_count' : 0, \ 'bytes_used' : 0, \ 'delete_timestamp' : '0', 'deleted' : True} account_map[self.__account_name, self.__container_name] = \ "success" self.__is_completed = True else: self.logger.warning("Could not read stats form container: %s" \ % self.__container_name) except Exception as ex: self.logger.error("While getting container stats for:%s, Error: %s" \ % (self.__container_name, ex))
class Walker(Thread): def __init__(self, walker_map, conf, logger): Thread.__init__(self) #self.__conf = conf # TODO : once the complete configuration file will be created correct entries would be fetched self._file_location = conf['stat_file_location'] self._max_update_files = conf['max_update_files'] self._max_files_stored_in_cache = conf['max_files_stored_in_cache'] self._expire_delta_time = conf['updaterfile_expire_delta_time'] self.logger = logger self.msg = GlobalVariables(self.logger) #self.__MainPath = '/remote/hydra042/spsingh/Latest/objectStorage/src/osd/NewUpdater/devesh/sps/upfiles1' #Todo: will be fetched from configuration file #self.__ownershipList = ['X9','X3','X4','X5','X6','X8'] #TODO: dummy value, will be set at service start self._ownership_list = self.msg.get_ownershipList() self._walker_map = walker_map self.logger.info("Walker constructed") self._complete_all_event = self.msg.get_complete_all_event() self._transfer_cmp_event = self.msg.get_transfer_cmp_event() self.put_queue_flag = False """ Service Stop Event Handling """ def __walker_check_complete_all_event(self): """ Walker complete all request (Stop) Event check """ if self._complete_all_event.is_set(): self.logger.info("Received complete all request event in walker") self._walker_map = [] self.msg.put_into_Queue() return True """ Get Component Map Event Handling """ def __walker_get_new_ownership(self): """ Walker transfer/accept component request """ if self._transfer_cmp_event.is_set() and not self.put_queue_flag: self.logger.info( "Received transfer/accept request event in walker") self._ownership_list = self.msg.get_ownershipList() self.msg.put_into_Queue() self.put_queue_flag = True elif not self._transfer_cmp_event.is_set(): self.put_queue_flag = False def run(self): try: self.__perform_walker_operation() except Exception as ex: self.logger.error("error occured : %s" % ex) os._exit(130) except: self.logger.error("Unknown exception occured") os._exit(130) def __perform_walker_operation(self): old_stat_files = {} while True: self.logger.debug("Walker started") temp_structure = [] self._ownership_list = self.msg.get_ownershipList() if self.__walker_check_complete_all_event(): break self.__walker_get_new_ownership() try: if not old_stat_files: old_stat_files = {} all_stat_files = {} for comp in self._ownership_list: if os.path.exists( os.path.join(self._file_location, str(comp))): for file in os.listdir( '%s/%s' % (self._file_location, comp)): all_stat_files[os.path.join(self._file_location, str(comp), file)] = \ os.path.getmtime(os.path.join(self._file_location, str(comp), file)) for old_file, modtime in sorted(all_stat_files.items(), key=lambda file: \ file[1])[:-1][:self._max_files_stored_in_cache]: old_stat_files[os.path.join(old_file.split('/')[-2:-1][0], \ os.path.basename(old_file))] = all_stat_files[old_file] if len( os.listdir( '%s/%s' % (self._file_location, comp))) == 1: left_file = os.listdir( '%s/%s' % (self._file_location, comp))[0] totalsecond_diff = respondsecondsDiff( left_file) if totalsecond_diff > self._expire_delta_time: old_stat_files[os.path.join(str(comp),left_file)] = \ os.path.getmtime(os.path.join(self._file_location, str(comp), left_file)) else: self.logger.debug("Stat file %s not Expired: " \ %os.path.join(self._file_location, str(comp), left_file)) all_stat_files = {} else: self.logger.debug( "Directory :%s not exists" % os.path.join(self._file_location, str(comp))) for stat_file in old_stat_files.keys(): if os.stat(os.path.join(self._file_location, stat_file)).st_size == 0: comp_tuple = (stat_file.split('/')[0], stat_file.split('/')[1]) self.logger.info("Stat file :%s is empty therefore deleting it" \ %os.path.join(self._file_location, stat_file)) self.msg.create_sweeper_list(comp_tuple) del old_stat_files[stat_file] for stat_file, modtime in sorted(old_stat_files.items(), key=lambda file : \ file[1])[:self._max_update_files]: if stat_file not in temp_structure: temp_structure.append(stat_file) del old_stat_files[stat_file] self._walker_map[:] = temp_structure[:self._max_update_files] self.logger.debug("State files to be processed at walker :%s" % self._walker_map) except Exception as ex: self.logger.error("Exception occured :%s", ex) self.logger.debug("Exiting walker") time.sleep(30)
class AccountServiceCommunicator: """ Communicate with the account service. """ def __init__(self, conf, logger): """ Constructor of AccountServiceCommunicator. """ self.logger = logger #self.logger.debug("AccountServiceCommunicator constructed") self.__conn_creator = ConnectionCreator(conf, self.logger) self.msg = GlobalVariables(self.logger) def __send_http_request_to_account_service(self, account_name, \ stat_info): """ Send the request to the account service. :param conn_timeout: connection time to account service :param node_timeout: response time to account service :return resp: response from account service """ info = { 'recovery_flag': False, 'method_name': 'PUT_CONTAINER_RECORD', 'entity_name': '', 'body_size': len(str(stat_info)), 'account_name': account_name, 'flag': False } return self.__conn_creator.get_http_connection_instance(info, \ str(stat_info)) def update_container_stat_info(self, account_name = None, \ stat_info = None): """ Update the container information to account service. """ updated = False retry_count = 3 self.logger.debug( 'Sending http request to account service for updation.') conn = None try: conn = self.__send_http_request_to_account_service( account_name, stat_info) resp = self.__conn_creator.get_http_response_instance(conn) self.logger.info("Response from account service: status: %s," \ " message: %s" % (resp.status, resp.read())) while resp.status != 202 and resp.status != 404 and retry_count != 0: retry_count -= 1 if resp.status == 301 or resp.status == 307: self.msg.load_gl_map() conn = self.__send_http_request_to_account_service( account_name, stat_info) resp = self.__conn_creator.get_http_response_instance(conn) self.logger.info("Response from account service: status: %s," \ " message: %s" % (resp.status, resp.read())) if resp.status == 202 or resp.status == 404: updated = True except Exception as ex: self.logger.error("Error while updating container stat info: %s" % ex) finally: if conn: conn.close() return updated
def setUp(self): self.global_var = GlobalVariables(logger) self.global_var.set_service_id("service_id")
class Reader(Thread): def __init__(self, walker_map, reader_map, conf, logger): Thread.__init__(self) self._walker_map = walker_map self._reader_map = reader_map self.logger = logger self.logger.info("Reader constructed") self._file_location = conf['stat_file_location'] self.msg = GlobalVariables(self.logger) self._complete_all_event = self.msg.get_complete_all_event() self._transfer_cmp_event = self.msg.get_transfer_cmp_event() self.put_queue_flag = False def __reader_check_complete_all_event(self): """ Reader complete all request (Stop) Event check """ if self._complete_all_event.is_set(): self.logger.info("Received complete all request event in reader") self._reader_map = {} self.msg.put_into_Queue() return True def __reader_get_new_ownership(self): """ Reader transfer/accept component request """ if self._transfer_cmp_event.is_set() and not self.put_queue_flag: self.logger.info( "Received transfer/accept request event in reader") for key in self._reader_map.keys(): if int(key) not in self.msg.get_ownershipList(): del self._reader_map[key] self.msg.put_into_Queue() self.put_queue_flag = True elif not self._transfer_cmp_event.is_set(): self.put_queue_flag = False def read_stat_file(self, comp, stat_file): if not os.path.exists( os.path.join(self._file_location, comp, stat_file)): raise StopIteration with open(os.path.join(self._file_location, comp, stat_file), 'rt') as fd: while True: data = fd.read(MAX_CHUNK_SIZE) if not data: break yield data.split(',')[:-1] def check_if_container_limit_exceeded(self): while self._reader_map.getContainerCount() > maxContainerEntries: self.logger.info("Container limit exceeded :%s therefore wait till"\ "update completed" %(self._reader_map.getContainerCount())) time.sleep(10) def run(self): try: self.__perform_reader_operation() except Exception as ex: self.logger.error("error occured : %s" % ex) os._exit(130) except: self.logger.error("Unknown exception occured") os._exit(130) def __perform_reader_operation(self): while True: self.logger.debug("Reader started") if self.__reader_check_complete_all_event(): break try: self.__reader_get_new_ownership() for entry in self._walker_map.getEntries(): comp, stat_file = entry.split("/") self._stat_obj = StatFile(stat_file) self.check_if_container_limit_exceeded() self.logger.debug("Processing start for component:%s stat" " files: %s" % (comp, stat_file)) for file_content in self.read_stat_file(comp, stat_file): self._stat_obj.set_lock_flag(True) for data in file_content: account_name, container_name = data.split("/") if comp not in self._reader_map.keys(): self._reader_map[comp] = {} if self._stat_obj not in self._reader_map[ comp].keys(): self._reader_map[comp][self._stat_obj] = {} if account_name not in self._reader_map[comp][ self._stat_obj].keys(): self._reader_map[comp][ self._stat_obj][account_name] = [] self._reader_map[comp][self._stat_obj][ account_name].append(container_name) self._reader_map.incrementContainer() self._stat_obj.set_lock_flag(False) self.logger.info("Stat File:%s processing done for component:%s count:%s" \ %(stat_file, comp, self._reader_map.getContainerCount())) self.logger.debug( "After completing all stat file processing, reader_map = \ %s" % (self._reader_map)) except Exception as ex: self.logger.error("Exception occured :%s" % ex) self.logger.debug("Exiting Reader") time.sleep(35)
class TestGlobalVariables(unittest.TestCase): @mock.patch("osd.accountUpdaterService.monitor.Req", mockReq) @mock.patch("osd.accountUpdaterService.monitor.Resp", mockResp) @mock.patch("osd.accountUpdaterService.monitor.ServiceInfo", mockServiceInfo) def setUp(self): self.global_var = GlobalVariables(logger) self.global_var.set_service_id("service_id") def test_load_gl_map(self): #self.assertTrue(self.global_var.load_gl_map()) with mock.patch( 'osd.accountUpdaterService.unitTests.mockReq.connector', return_value=None): self.assertFalse(self.global_var.load_gl_map()) with mock.patch( 'osd.accountUpdaterService.unitTests.mockStatus.get_status_code', return_value="Resp.FAILURE"): self.assertFalse(self.global_var.load_gl_map()) def test_load_ownership(self): with mock.patch( 'osd.accountUpdaterService.unitTests.mockReq.connector', return_value=None): self.global_var.load_ownership() self.assertEquals(self.global_var.get_ownershipList(), []) with mock.patch( 'osd.accountUpdaterService.unitTests.mockStatus.get_status_code', return_value="Resp.FAILURE"): self.global_var.load_ownership() self.assertEquals(self.global_var.get_ownershipList(), []) self.global_var.load_ownership() self.assertEquals(self.global_var.get_ownershipList(), range(1, 513)) def test_get_account_map(self): self.assertEquals( [obj.get_id() for obj in self.global_var.get_account_map()], acc_id) self.assertEquals( [obj.get_ip() for obj in self.global_var.get_account_map()], acc_ip) self.assertEquals( [obj.get_port() for obj in self.global_var.get_account_map()], acc_port) def test_get_container_map(self): self.assertEquals( [obj.get_id() for obj in self.global_var.get_container_map()], cont_id) self.assertEquals( [obj.get_ip() for obj in self.global_var.get_container_map()], cont_ip) self.assertEquals( [obj.get_port() for obj in self.global_var.get_container_map()], cont_port) def test_get_acc_updater_map(self): self.assertEquals( [obj.get_id() for obj in self.global_var.get_acc_updater_map()], au_id) self.assertEquals( [obj.get_ip() for obj in self.global_var.get_acc_updater_map()], au_ip) self.assertEquals( [obj.get_port() for obj in self.global_var.get_acc_updater_map()], au_port) def test_get_acc_updater_map_version(self): self.assertEquals(self.global_var.get_acc_updater_map_version(), "5.0") def test_get_global_map_version(self): self.assertEquals(self.global_var.get_global_map_version(), "5.0") def test_get_service_id(self): self.assertEquals(self.global_var.get_service_id(), "service_id")
class HttpListener(BaseHTTPRequestHandler): def do_STOP_SERVICE(self): """ Handle COMPLETE ALL REQUEST HTTP request """ self.logger = SimpleLogger(conf=None).get_logger_object() self.conf = SimpleLogger(conf=None).get_conf() self.msg = GlobalVariables(self.logger) all_transfer_event_received = [] self.logger.info("Account-updater received STOP_SERVICE request") complete_all_request_event = self.msg.get_complete_all_event() complete_all_request_event.set() try: while len(all_transfer_event_received) != 4: all_transfer_event_received.append(self.msg.get_from_Queue()) self.logger.info("Completed STOP_SERVICE request") self.send_header('Message-Type', typeEnums.BLOCK_NEW_REQUESTS_ACK) self.send_header('Ownership-List', self.msg.get_ownershipList()) self.send_response(HTTP_OK) self.end_headers() self.wfile.write(self.msg.get_ownershipList()) return except Exception as err: self.logger.exception('Exception raised in' \ ' STOP_SERVICE error :%s' % err) self.send_response(HTTP_INTERNAL_SERVER_ERROR) self.end_headers() def do_ACCEPT_COMPONENT_TRANSFER(self): """ Handle ACCEPT COMPONENT TRANSFER HTTP request """ try: self.logger = SimpleLogger(conf=None).get_logger_object() self.conf = SimpleLogger(conf=None).get_conf() self.msg = GlobalVariables(self.logger) self.ll_port = int(self.conf.get('llport', 61014)) self.logger.info("Account-updater received ACCEPT_COMPONENT_" \ "TRANSFER request") length = int(self.headers['Content-Length']) self.logger.debug("Headers:%s" % self.headers) #sending intemediate (connection created) acknowledgement #to source node self.send_response(HTTP_CONTINUE) self.end_headers() #receiving new ownership list pickled_string = self.rfile.read(length) add_comp_list = ast.literal_eval(pickled_string) self.logger.info("Accepting new component ownership: %s" % add_comp_list) #updating global map for new onwership thread = threading.Thread(target = self.update_my_ownership, \ args=(add_comp_list,)) thread.start() self.logger.info("Completed ACCEPT_COMPONENTS_TRANSFER request") self.send_response(HTTP_OK) self.end_headers() return except Exception as err: self.logger.exception('Exception raised in' \ 'ACCEPT_COMPONENTS_TRANSFER error :%s' % err) self.send_response(HTTP_INTERNAL_SERVER_ERROR) self.end_headers() def do_TRANSFER_COMPONENTS(self): """ Handle TRANSFER COMPONENTS HTTP request """ self.logger = SimpleLogger(conf=None).get_logger_object() self.conf = SimpleLogger(conf=None).get_conf() self.msg = GlobalVariables(self.logger) self._request_handler = Req() transfer_component_timeout = int( self.conf.get('\ transfer_component_timeout', 600)) self.ll_port = int(self.conf.get('llport', 61014)) self.service_id = self.msg.get_service_id() self.deleted_comp_list = [] transfer_component_map = { } #dictionary containing{'(dest_node_obj)':'[comp_list]'} #eg: {('169.254.1.12', '61009', 'HN0101_61014_account-updater'):'['1', '2']'} self.final_transfer_status_list = [ ] #final component status list which will be send to GL #[(1, True),(2, False),(3, True)] self.final_status = False #final response to GL self.check_transfer_component_map = { } # to check if component transfer completed or failed: #{dest_node_obj1:True, dest_node_obj2:"Failed", dest_node_obj3:False} all_transfer_event_received = [] self.protocol_version = "HTTP/1.1" self.logger.info("Account-updater received Transfer component request") try: content_length = int(self.headers['Content-Length']) #sending acknowledgement of TRANSFER_COMPONENT to GL self.send_response(HTTP_CONTINUE, "Continue\r\n\r\n") data = self.rfile.read(content_length) message = TransferCompMessage() message.de_serialize(data) transfer_component_map = message.service_comp_map() self.logger.info("Deserialized transfer component map:%s" \ %transfer_component_map) except Exception, ex: self.logger.error("Exception: %s" % ex) self.send_header('Message-Type', \ typeEnums.TRANSFER_COMPONENT_RESPONSE) self.send_response(HTTP_INTERNAL_SERVER_ERROR) self.end_headers() return #if transfer_component_map is empty then send HTTP_OK to GL if not transfer_component_map: self.logger.info( "Deserialized transfer component map is empty, return HTTP OK!" ) comp_transfer_response = TransferCompResponseMessage(\ self.final_transfer_status_list, True) serialized_body = comp_transfer_response.serialize() self.send_header('Message-Type', \ typeEnums.TRANSFER_COMPONENT_RESPONSE) self.send_header('Content-Length', len(serialized_body)) self.send_response(HTTP_OK) self.end_headers() self.wfile.write(serialized_body) return for dest_node_obj, comp_list in transfer_component_map.items(): for comp in comp_list: self.deleted_comp_list.append(comp) self.final_transfer_status_list.append((comp, False)) self.check_transfer_component_map[dest_node_obj] = False self.logger.debug("Ownership for components:%s will be removed" % self.deleted_comp_list) self.delete_self_ownership() transfer_comp_event = self.msg.get_transfer_cmp_event() transfer_comp_event.set() try: #sending accept component request to other nodes self.logger.info("Sending accept component request to " \ "destination nodes") for target_service_obj, comp_list in transfer_component_map.items( ): thread_connecting_node = threading.Thread(target = \ self.send_accept_component_request, args = \ ("ACCEPT_COMPONENT_TRANSFER", target_service_obj, \ comp_list, )) thread_connecting_node.setDaemon(True) thread_connecting_node.start() #Checking if transfer component completed and intermediate #response sent or not self.logger.info("Checking if transfer component completed") thread_check_transfer_status = threading.Thread(target = \ self.check_component_transfer_completion) thread_check_transfer_status.setDaemon(True) thread_check_transfer_status.start() thread_check_transfer_status.join(transfer_component_timeout) # sending final response to GL self.logger.info("Sending final response to GL :%s" \ % self.final_transfer_status_list) if self.final_transfer_status_list: for comp_status_tuple in self.final_transfer_status_list: if not comp_status_tuple[1]: self.logger.warning("Final transfer component list having failed" \ "component:%s, %s" %(comp_status_tuple[0], comp_status_tuple[1])) self.msg.set_ownershipList(self.old_ownership_list) break else: self.final_status = True comp_transfer_response = TransferCompResponseMessage(\ self.final_transfer_status_list, self.final_status) serialized_body = comp_transfer_response.serialize() while len(all_transfer_event_received) != 4: all_transfer_event_received.append(self.msg.get_from_Queue()) self.logger.info("Completed TRANSFER_COMPONENTS request") self.send_header('Message-Type', \ typeEnums.TRANSFER_COMPONENT_RESPONSE) self.send_header('Content-Length', len(serialized_body)) self.send_response(HTTP_OK) self.end_headers() self.wfile.write(serialized_body) transfer_comp_event.clear() return except Exception as ex: self.logger.error("Exception raised: %s" % ex) transfer_comp_event.clear() self.msg.set_ownershipList(self.old_ownership_list) self.logger.error("old_ownership_list:%s" % self.old_ownership_list) self.send_header('Message-Type', \ typeEnums.TRANSFER_COMPONENT_RESPONSE) self.send_response(HTTP_INTERNAL_SERVER_ERROR) self.end_headers()
class Updater(Thread): def __init__(self, walker_map, reader_map, conf, logger): Thread.__init__(self) self.conf = conf self.logger = logger self.logger.info("Updater constructed") self.msg = GlobalVariables(self.logger) self.__complete_all_request = False self._walker_map = walker_map self._reader_map = reader_map self._complete_all_event = self.msg.get_complete_all_event() self._transfer_cmp_event = self.msg.get_transfer_cmp_event() self.__container_dispatcher = ContainerDispatcher(self.conf, self.logger) self._updater_map = {} self.__other_file_list = [] self.put_queue_flag = False def populate_updater_map(self): self.__other_file_list = [] for component, stat_data in self._reader_map.items(): for stat_obj, data in stat_data.items(): if not stat_obj.get_lock_flag(): stat_file = stat_obj.get_stat_file() if not self._updater_map.has_key((component, stat_file)): self._updater_map[(component, stat_file)] = {} if self._reader_map.get(component, None): self._updater_map[(component, stat_file)] = self._reader_map[component][stat_obj] self.__other_file_list.append((component, stat_file)) self.logger.info("Updater map : %s" %self._updater_map) def __updater_check_complete_all_event(self): """ Updater complete all request (Stop) Event check """ if self._complete_all_event.is_set(): self.logger.info("Received complete all request event in updater") self._updater_map = {} self.msg.put_into_Queue() return True def __updater_get_new_ownership(self): """ Updater transfer/accept component request """ if self._transfer_cmp_event.is_set() and not self.put_queue_flag: self.logger.info("Received transfer/accept request event in updater") for comp_tuple in self._updater_map.keys(): if int(comp_tuple[0]) not in self.msg.get_ownershipList(): del self._updater_map[comp_tuple] self.msg.put_into_Queue() self.put_queue_flag = True elif not self._transfer_cmp_event.is_set(): self.put_queue_flag = False def __other_file(self, comp, file): listOfAccounts = [] index = self.__other_file_list.index((comp, file)) rest_comp_tuple_list = self.__other_file_list[:index] + self.__other_file_list[index+1:] for comp_tuple in rest_comp_tuple_list: listOfAccounts.extend(self._updater_map[comp_tuple].keys()) #self.logger.debug("Current component :%s and other stat files having accounts %s" %(comp, listOfAccounts)) return listOfAccounts def run(self): try: while True: self.logger.debug("Updater started") self.populate_updater_map() self.__updater_get_new_ownership() if self.__updater_check_complete_all_event(): self.__complete_all_request = True break waitList = [] delete_list=[] accObjectDict = {} container_list = [] #Container list contains AccountInfo objects where HEAD has been done on Accounts for comp_tuple, map_ in self._updater_map.items(): #NOTE: compTuple = (componentNum, Filename) #NOTE: map_ = {account name : [container name]} self.logger.info("Processing start for stat file : %s, map : %s" %(comp_tuple, map_)) #self.logger.debug("Modified account list: %s" % comp_tuple) if self.__updater_check_complete_all_event(): break for account, cont_list in map_.items(): #NOTE: loop to perform container head for a account and lastly perform account update self.logger.info("Processing account: %s, container_list:%s" %(account, cont_list)) if account not in accObjectDict.keys(): #NOTE: in case account object is already created, no need to re-create the object account_instance = AccountInfo(account, self.conf, self.logger) accObjectDict[account] = account_instance else: account_instance = accObjectDict[account] for container in cont_list: #NOTE: for all container in the account perform container head request if not account_instance.is_container(container): #NOTE: in case container is already existing that means container head has already been performed. #contObj = ContainerClass(container) account_instance.add_container(container) try: #NOTE: perform container HEAD self.logger.debug("Processing for container HEAD request") account_instance.get_container_path() self.__container_dispatcher.dispatcher_executer(account_instance) #If any container HEAD fails then do not perform account update operation and iterate next stat file if "fail" in account_instance.account_map.values(): self.logger.info("Container HEAD is failing on %s, \ skipping stat file %s" %(account, comp_tuple)) break #NOTE: if the account is present in the other stat files than we should not perform account updater # in this iteration. We should add it to waitList and will update account at the end. if account_instance.getAccountName() in self.__other_file(\ comp_tuple[0], comp_tuple[-1]): if account_instance.getAccountName() not in waitList: self.logger.info("Adding account :%s in the waitlist"\ %account_instance.getAccountName()) waitList.append(account_instance.getAccountName()) continue #Performing account update operation if this account is not present in other stat files if self.__container_dispatcher.account_update(account_instance): account_instance.remove_container() else: self.logger.info("Account %s can not be modified" % account) break except Exception as err: self.logger.error("Exception occured :%s", err) # do not create sweeper list of stat_file if any account update(wait list) of that stat_file remains for account in map_.keys(): if account in waitList or ('fail' in accObjectDict[account].account_map.values()) or account_instance.acc_update_failed: break else: self.msg.create_sweeper_list(comp_tuple) del self._updater_map[comp_tuple] self.__other_file_list.remove(comp_tuple) # Sending account update request for accounts exist in waitList at the end for comp_tuple, map_ in self._updater_map.items(): for account in map_.keys(): if account in waitList: if self.__container_dispatcher.account_update(accObjectDict[account]): accObjectDict[account].remove_container() waitList.remove(account) else: self.logger.info("Account %s can not be modified" % account) break elif not accObjectDict.get(account): continue elif ('fail' in accObjectDict[account].account_map.values()): self.logger.info("Container HEAD is failing on %s, \ skipping stat file %s" %(account, comp_tuple)) break elif not accObjectDict[account].acc_update_failed: self.logger.info("Account : %s already updated" %account) elif accObjectDict[account].acc_update_failed: self.logger.info("Account update for %s failed, \ skipping stat file %s" %(account, comp_tuple)) break else: self.msg.create_sweeper_list(comp_tuple) del self._updater_map[comp_tuple] self.__other_file_list.remove(comp_tuple) #self._updater_map = {} # Will be clear after one time processing self.logger.debug("Exiting Updater") time.sleep(40) except Exception as ex: self.logger.error("exception occured : %s" %ex) os._exit(130) except: self.logger.error("unkown exception occured") os._exit(130)
class AccountUpdater(Daemon): """ Update container information in account listings. """ def __init__(self, conf, logger=None): """ constructor for account updater :param conf: configuration of account-updater """ self.logger = logger or SimpleLogger(conf).get_logger_object() Daemon.__init__(self, conf, self.logger) libraryUtils.OSDLoggerImpl( "account-updater-monitoring").initialize_logger() create_recovery_file('account-updater-server') self.conf = conf self.__interval = int(conf.get('interval', 1800)) self.__ll_port = int(conf.get('llport', 61014)) self.__account_updater_port = int(\ conf.get('account_updater_port', 61009)) self.__service_id = gethostname() + "_" + str(self.__ll_port) + \ "_account-updater-server" self.__param = self.__get_param(conf) self.msg = GlobalVariables(self.logger) self.msg.set_service_id(self.__service_id) self.walker_map = WalkerMap() self.reader_map = ReaderMap() # Start sending health to local leader self.logger.info("Loading health monitoring library") self.health_instance = healthMonitoring(self.__get_node_ip\ (gethostname()), self.__account_updater_port, \ self.__ll_port, self.__service_id) self.logger.info("Loaded health monitoring library") remove_recovery_file('account-updater-server') # load global map if not self.msg.load_gl_map(): sys.exit(130) self.logger.info("Account updater started") def __get_node_ip(self, hostname): """ Get internal node ip on which service is running """ try: command = "grep -w " + hostname + " /etc/hosts | awk {'print $1'}" child = subprocess.Popen(command, stdout = subprocess.PIPE, \ stderr = subprocess.PIPE, shell = True) std_out, std_err = child.communicate() return std_out.strip() except Exception as err: self.logger.error("Error occurred while getting ip of node:%s" % err) return "" def __get_param(self, conf): """ Getting parameters through config file :param conf: configuration file of account-updater """ return { 'filesystems': conf.get('filesystems', '/export'), 'stat_file_location': conf.get('stat_file_location', \ '/export/OSP_01/updater'), 'file_expire_time': conf.get('file_expire_time', 900), 'interval': conf.get('interval', 600), 'conn_timeout': float(conf.get('conn_timeout', 10)), 'node_timeout': float(conf.get('node_timeout', 10)), 'osd_dir': conf.get('osd_dir', '/export/.osd_meta_config'), 'reader_max_count': int(conf.get('reader_max_count', 10)), 'max_update_files': int(conf.get('max_update_files', 10)), 'max_files_stored_in_cache': int(conf.get('max_files_stored_in_cache', 100)), 'updaterfile_expire_delta_time': int(conf.get('updaterfile_expire_delta_time', 1020)) } def run_forever(self, *args, **kwargs): """ Run the updator continuously. """ try: self.logger.debug('Begin account update') # get account-updater server ownership self.get_ownership_obj = threading.Thread( target=self.msg.get_my_ownership) self.get_ownership_obj.setDaemon(True) self.get_ownership_obj.start() self.walker_obj = Walker(self.walker_map, self.__param, self.logger) self.walker_obj.setDaemon(True) self.walker_obj.start() self.logger.info("Walker Started") self.reader_obj = Reader(self.walker_map, self.reader_map, \ self.__param, self.logger) self.reader_obj.setDaemon(True) self.reader_obj.start() self.logger.info("Reader Started") self.account_sweeper = AccountSweep(self.__param, self.logger) self.account_sweeper.setDaemon(True) self.account_sweeper.start() self.logger.info("Account Sweeper Started") self.updater_obj = Updater(self.walker_map, self.reader_map, \ self.__param, self.logger) self.updater_obj.setDaemon(True) self.updater_obj.start() self.logger.info("Updater Started") self.container_sweeper = ContainerSweeper(self.walker_map, \ self.reader_map, self.__param, self.logger) self.container_sweeper.setDaemon(True) self.container_sweeper.start() self.logger.info("Container Sweeper Started") account_updater_server = ThreadedAccountUpdaterServer(\ (self.__get_node_ip(gethostname()), \ self.__account_updater_port), HttpListener) account_updater_server.serve_forever() except Exception as ex: self.logger.error("Exception occured: %s" % ex)
class AccountSweep(Thread): """ Sweep to deleted account """ def __init__(self, conf, logger): Thread.__init__(self) self._file_location = os.path.join(conf['filesystems'], FS, 'tmp', \ 'account') self.del_path = 'del' self.conf = conf self.logger = logger self.logger.info("AccountSweep constructed") self.root = conf['filesystems'] self.msg = GlobalVariables(self.logger) self._ownership_list = self.msg.get_ownershipList() self.put_queue_flag = False self._complete_all_event = self.msg.get_complete_all_event() self._transfer_cmp_event = self.msg.get_transfer_cmp_event() self._account_ring = AccountRing(conf['osd_dir'], self.logger) def _get_del_files(self): """Get the list of deleted temp account file""" del_files = {} try: for comp in self._ownership_list: if os.path.exists(os.path.join(self._file_location, \ str(comp), self.del_path)): #self.logger.debug("listing deleted tmp account files on " \ # "location: %s/%s/%s" \ # % (self._file_location, str(comp), self.del_path)) del_files[comp] = os.listdir(os.path.join(\ self._file_location, str(comp), self.del_path)) return del_files except Exception as ex: self.logger.error("While listing directory %s : %s" % \ (self._file_location, ex)) return del_files def _get_account_name(self, account_file): """Split name of account from file""" return account_file.split('.info')[0] def _delete_dir(self, path): """Delete dir""" if os.path.exists(path): running_procs = [Popen(['rm', '-rf', path], stdout=PIPE, \ stderr=PIPE)] while running_procs: for proc in running_procs: retcode = proc.poll() if retcode is not None: running_procs.remove(proc) break else: time.sleep(.2) continue if retcode != 0: for line in proc.stderr.readlines(): self.logger.error("Error in proc.stderr.readlines()") return False else: self.logger.debug("Path %s does not exist" % path) return True def _clean_account_data(self, deleted_account, fs_list): status = True acc_dir = self._account_ring.get_directory_by_hash(\ self._get_account_name(deleted_account)) for fs in fs_list[::-1]: account_path = os.path.join(self.root, fs, acc_dir, \ self._get_account_name(deleted_account)) try: if not self._delete_dir(account_path): status = False except Exception, ex: self.logger.error("while deleting dir: %s : %s" \ % (account_path, ex)) status = False return status