def RebalanceCommit(self): """Tell servers to commit rebalance changes.""" # Save rebalance information to a file, so we can recover later. rebalance.SaveCommitInformation(self.rebalance) body = self.rebalance.SerializeToString() size = len(body) headers = {"Content-Length": size} for i, pool in enumerate(self.rebalance_pool): try: res = pool.urlopen("POST", "/rebalance/perform", headers=headers, body=body) if res.status != constants.RESPONSE_OK: logging.error("Server %d failed to perform transaction %s", i, self.rebalance.id) self.CancelRebalancing() return None stat = rdfvalue.DataServerState() stat.ParseFromString(res.data) data_server = self.servers[i] data_server.UpdateState(stat) except urllib3.exceptions.MaxRetryError: self.CancelRebalancing() return None # Update server intervals. mapping = self.rebalance.mapping for i, serv in enumerate(list(self.mapping.servers)): serv.interval = mapping.servers[i].interval self.rebalance.mapping = self.mapping self.service.SaveServerMapping(self.mapping) # We can finally delete the temporary file, since we have succeeded. rebalance.DeleteCommitInformation(self.rebalance) rebalance.RemoveDirectory(self.rebalance) self.CancelRebalancing() return self.mapping
def SyncMapping(self, skip=None): """Syncs mapping with other servers.""" pools = [] try: # Update my state. self._PeriodicThread() for serv in self.servers[1:]: if skip and serv in skip: continue pool = connectionpool.HTTPConnectionPool(serv.Address(), port=serv.Port()) pools.append((serv, pool)) body = self.mapping.SerializeToString() headers = {"Content-Length": len(body)} for serv, pool in pools: res = pool.urlopen("POST", "/servers/sync", headers=headers, body=body) if res.status != constants.RESPONSE_OK: logging.warning("Could not sync with server %s:%d", serv.Address(), serv.Port()) return False state = rdfvalue.DataServerState() state.ParseFromString(res.data) serv.UpdateState(state) except urllib3.exceptions.MaxRetryError: return False finally: for _, pool in pools: pool.close() return True
def GetStatistics(): """Build statistics object for the server.""" ok = rdfvalue.DataServerState.Status.AVAILABLE num_components, avg_component = SERVICE.GetComponentInformation() stat = rdfvalue.DataServerState(size=SERVICE.Size(), load=0, status=ok, num_components=num_components, avg_component=avg_component) return stat
def _PeriodicThread(self): """Periodically update our state and store the mappings.""" ok = rdfvalue.DataServerState.Status.AVAILABLE num_components, avg_component = self.service.GetComponentInformation() state = rdfvalue.DataServerState(size=self.service.Size(), load=0, status=ok, num_components=num_components, avg_component=avg_component) self.myself.UpdateState(state) self.service.SaveServerMapping(self.mapping)
def __init__(self, location, index): # Parse location. loc = urlparse.urlparse(location, scheme="http") offline = rdfvalue.DataServerState.Status.OFFLINE state = rdfvalue.DataServerState(size=0, load=0, status=offline) self.server_info = rdfvalue.DataServerInformation(index=index, address=loc.hostname, port=loc.port, state=state) self.registered = False self.removed = False logging.info("Configured DataServer on %s:%d", self.Address(), self.Port())
def HandleState(self): """Respond to /server/state.""" if not MASTER: self._EmptyResponse(constants.RESPONSE_NOT_MASTER_SERVER) return if not self.data_server: logging.error( "Server %s attempting to update its state but " "is not registered yet", self.client_address) self._EmptyResponse(constants.RESPONSE_SERVER_NOT_REGISTERED) return state = rdfvalue.DataServerState(self.post_data) self.data_server.UpdateState(state) logging.info("Received new state from server %s", self.client_address) # Response with our mapping. body = MAPPING.SerializeToString() self._Response(constants.RESPONSE_OK, body)