Esempio n. 1
0
def main():
    '''Main entrypoint for gRPC server.'''

    args = setup_arguments()
    log = Logger()

    if args.file_type == "local":
        file_system = FileSystem(fstype=FileSystemType.FSTYPE_LOCAL)
        file_system.impl.remote_root_dirpath = tempfile.mkdtemp()
        dao = FileDataStore(file_system=file_system)
    else:
        dao = FileDataStore()

    # create a gRPC server
    server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
    handler = AlamedaServicer(dao)
    # use the generated function `add_CalculatorServicer_to_server`
    # to add the defined class to the created server
    ai_service_pb2_grpc.add_AlamendaAIServiceServicer_to_server(
        handler, server)

    # listen on port 50051
    log.info('Starting server. Listening on port 50051.')
    server.add_insecure_port('[::]:50051')
    server.start()

    # since server.start() will not block,
    # a sleep-loop is added to keep alive
    try:
        while True:
            time.sleep(_ONE_DAY_IN_SECONDS)
    except KeyboardInterrupt:
        server.stop(0)
Esempio n. 2
0
    def test_delete_pods(self):
        # setup the pod data
        self.test_create_pods()

        dao = FileDataStore(file=self.file, file_system=self.fs)
        delete_pods = self._get_pod_data()
        dao.delete_pod(delete_pods)

        pods = dao.read_data()
        # expected empty dict
        self.assertTrue(not pods)
Esempio n. 3
0
    def setUp(self):
        logging.disable(logging.CRITICAL)  # suppress log
        self.fs = Mock()
        self.file = tempfile.NamedTemporaryFile()
        self.default_data = {("ns1", "id", "name"): "POD"}
        self.decoder = Mock()

        self.dao = FileDataStore(file=self.file.name,
                                 file_system=self.fs,
                                 decoder=self.decoder)
        # write the default data
        with open(self.file.name, "wb") as f:
            pickle.dump(self.default_data, f)
Esempio n. 4
0
    def test_create_pods(self):

        dao = FileDataStore(file=self.file, file_system=self.fs)

        pods = self._get_pod_data()
        res = dao.create_pod(pods)

        # # generate the pod keys
        expected_keys = map(lambda p: (p["namespace"], p["uid"], p["name"]),
                            pods)
        result = dao.read_data()
        for key in expected_keys:
            self.assertIn(key, result)
Esempio n. 5
0
    def test_delete_not_exsistence_pods(self):
        # setup the pod data
        self.test_create_pods()
        dao = FileDataStore(file=self.file, file_system=self.fs)

        dao.delete_pod([self._get_non_existence_pod()])

        pods = dao.read_data()
        # expected some pod
        self.assertTrue(pods)

        # check the keys in our data
        expected_keys = map(lambda p: (p["namespace"], p["uid"], p["name"]),
                            self._get_pod_data())
        for key in expected_keys:
            self.assertIn(key, pods)
Esempio n. 6
0
def main():
    '''Main entrypoint for workload prediction.'''

    log = Logger()
    log.info("Start workload prediction.")

    # workload predictor
    predictor_log = Logger(name='workload_prediction',
                           logfile='/var/log/workload_prediction.log',
                           level=LogLevel.LV_DEBUG)
    predictor = WorkloadPredictor(log=predictor_log)

    # file datastore to get pod list
    dao = FileDataStore()

    while True:

        pod_list = dao.read_data()
        for k, v in pod_list.items():
            try:
                pod = {
                    "namespace": k[0],
                    "uid": k[1],
                    "pod_name": k[2],
                    "type": v["type"],
                    "policy": v["policy"]
                }
            except (IndexError, KeyError):
                log.error(
                    "Not predicting POD %s:%s, "
                    "due to wrong format of pod info.", k, v)
                continue

            predictor.predict(pod)

        time.sleep(60)

    log.info("Workload prediction is completed.")
Esempio n. 7
0
class FileDAOTestCase(unittest.TestCase):
    def setUp(self):
        logging.disable(logging.CRITICAL)  # suppress log
        self.fs = Mock()
        self.file = tempfile.NamedTemporaryFile()
        self.default_data = {("ns1", "id", "name"): "POD"}
        self.decoder = Mock()

        self.dao = FileDataStore(file=self.file.name,
                                 file_system=self.fs,
                                 decoder=self.decoder)
        # write the default data
        with open(self.file.name, "wb") as f:
            pickle.dump(self.default_data, f)

    def tearDown(self):
        logging.disable(logging.NOTSET)
        pass

    def _setup_read_data(self):
        self.fs.is_file.return_value = True
        self.fs.get_file.return_value = self.file.name, None

    def _get_pod_data(self):
        return [
            {
                "namespace": "ns2",
                "uid": "id1",
                "name": "pod_name",
                "type": "POD"
            },
        ]

    def test_read_data_creation(self):
        self.decoder.load.return_value = self.default_data
        self._setup_read_data()
        data = self.dao.read_data()
        self.assertEqual(data, self.default_data,
                         "The result is not as expectation")
        self.fs.get_file.assert_called_once()
        self.decoder.load.assert_called_once()

    def test_read_data_exception(self):
        self.decoder.load.return_value = self.default_data
        self.fs.is_file.return_value = True
        self.fs.get_file.side_effect = Exception("test")
        data = self.dao.read_data()
        self.assertEqual(data, {}, "The result is not as expectation")
        self.fs.get_file.assert_called_once()

    def test_creat_pods(self):
        self._setup_read_data()
        self.decoder.load.return_value = self.default_data
        self.decoder.dump.return_value = None
        self.fs.update_file.return_value = True, ""
        res = self.dao.create_pod(self._get_pod_data())
        self.fs.update_file.assert_called_once()
        self.assertTrue(res[0])

    def test_delete_pods(self):
        self._setup_read_data()
        self.fs.update_file.return_value = True, ""
        self.decoder.load.return_value = self.default_data
        self.decoder.dump.return_value = None
        res = self.dao.delete_pod(self._get_pod_data())
        self.fs.update_file.assert_called_once()
        self.assertTrue(res[0])