Beispiel #1
0
class PageBase:
    """
    @DynamicAttrs loaded as part of load()
    """
    def __init__(self, browser: Browser, locators: LocatorsBase, url: str = None, timeout: int = None):
        self.browser = browser
        self.locators = locators
        self.url = url
        self.timeout = timeout
        self.elements: List[Element] = []
        browser.set_timeout(timeout)

        # error messages that will make tests fails
        self.error_messages = []

        # log messages to be used for logging in pytest results
        self.output_messages = []

        # Other useful declarations for the children to use
        self.logger = Logger(name=self.__class__.__name__)

    def exception_to_error_message(self, e):
        html_file = self.browser.save_html(self.__class__.__name__)
        screenshot_file = self.browser.save_screenshot(self.__class__.__name__)
        message = "[{}] {} - Saved HTML in {} and screenshot in {}".format(
            self.__class__.__name__,
            e,
            html_file,
            screenshot_file
        )
        self.error_messages.append(message)
        self.output_messages.append(message)

    def add_to_output(self, message):
        self.output_messages.append("[{}] {}".format(now_to_str(), message))

    def open(self):
        self.browser.open(self.url)
        self.load()
        return self

    def load(self, locators: LocatorsBase = None):
        expected_locators = locators if locators else self.locators
        for name, locator in expected_locators.items():
            self.logger.log("Loading {}".format(name))
            element = None
            try:
                element = Element(self.browser, name, locator, self.timeout)
                self.elements.append(element)
            except Exception as e:
                self.logger.log("{} = NONE, {}".format(name, e))
            self.__dict__[name] = element
        return self

    def load_new_page(self, page_class):
        new_page: PageBase = page_class(self.browser)
        new_page.error_messages += self.error_messages
        new_page.output_messages += self.output_messages
        new_page.load()
        return new_page
Beispiel #2
0
def main():
    '''Main entrypoint for gRPC server.'''

    args = setup_arguments()
    log = Logger()

    if args.file_type == "local":
        file_system = FileSystem(fstype=FileSystemType.FSTYPE_LOCAL)
        file_system.impl.remote_root_dirpath = tempfile.mkdtemp()
        dao = FileDataStore(file_system=file_system)
    else:
        dao = FileDataStore()

    # create a gRPC server
    server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
    handler = AlamedaServicer(dao)
    # use the generated function `add_CalculatorServicer_to_server`
    # to add the defined class to the created server
    ai_service_pb2_grpc.add_AlamendaAIServiceServicer_to_server(
        handler, server)

    # listen on port 50051
    log.info('Starting server. Listening on port 50051.')
    server.add_insecure_port('[::]:50051')
    server.start()

    # since server.start() will not block,
    # a sleep-loop is added to keep alive
    try:
        while True:
            time.sleep(_ONE_DAY_IN_SECONDS)
    except KeyboardInterrupt:
        server.stop(0)
Beispiel #3
0
 def __init__(self):
     self._data = {}
     self._logger = Logger(name=self.__class__.__name__, debug=True)
     self._default_values_file = os.path.join(
         os.path.dirname(os.path.abspath(__file__)), "environment.json")
     self.load_environment_json()
     self.check_variables()
Beispiel #4
0
 def __init__(self, browser: Browser, name, locator, timeout):
     self.browser = browser
     self.name = name
     self.locator = locator
     self.timeout = timeout
     self.logger = Logger(name=name)
     self.element = browser.find_element(*locator)
     self.logger.log("Found")
Beispiel #5
0
 def __init__(self, config=None):
     ''' The construct methdo '''
     if not config:
         config = {
             "metric_server": get_metric_server_address()
         }
     self.config = config
     self.logger = Logger()
     self.logger.info("Metric DAO config: %s", str(self.config))
Beispiel #6
0
    def __init__(self, log=None, config_file=None):
        """Initialize the predictor.

        Args:
            config_file: configuration file of parameters

        Returns: none
        """

        self.TIMES = 'times'
        self.VALUES = 'values'
        self.ORDER = 'order'
        self.SEASONAL_ORDER = 'seasonal_order'

        self.log = log or Logger()

        # Get parameters from configuration file.
        if config_file is None:
            config_file = os.path.join(
                os.path.dirname(os.path.abspath(__file__)),
                'config/sarimax_config.yaml')

        try:
            with open(config_file, 'r') as stream:
                self.cfg = yaml.load(stream)
        except FileNotFoundError:
            self.log.error("SARIMAX predictor's configure file not found.")
            raise
Beispiel #7
0
    def __init__(self, browser: Browser, locators: LocatorsBase, url: str = None, timeout: int = None):
        self.browser = browser
        self.locators = locators
        self.url = url
        self.timeout = timeout
        self.elements: List[Element] = []
        browser.set_timeout(timeout)

        # error messages that will make tests fails
        self.error_messages = []

        # log messages to be used for logging in pytest results
        self.output_messages = []

        # Other useful declarations for the children to use
        self.logger = Logger(name=self.__class__.__name__)
    def __init__(self,
                 log=None,
                 dao=None,
                 preprocesser=None,
                 recommender=None):
        # Max filename length of linux is 255;
        # reserve capacity 20 character for further name appending
        # e.g., .prdt_log in _predict_write_influx()
        self.MAX_FILENAME_LENGTH = 235
        self.APP_PATH = os.path.dirname(os.path.abspath(__file__))

        with open(os.path.join(self.APP_PATH,
                               'config/measurement_conf.yaml')) as yaml_file:
            measurement_conf = yaml.load(yaml_file)
        self.measurement_conf = measurement_conf

        with open(os.path.join(self.APP_PATH,
                               'config/granularity_conf.yaml')) as yaml_file:
            granularity_conf = yaml.load(yaml_file)
            granularity_conf = granularity_conf['30s']
        self.granularity_conf = granularity_conf

        self.log = log or Logger(name='workload_prediction',
                                 logfile='/var/log/workload_prediction.log',
                                 level=LogLevel.LV_DEBUG)
        self.dao = dao or MetricDAO()
        self.preprocessor = preprocesser or Preprocessor()
        self.target_metrics = measurement_conf.keys()
        self.recommender = recommender or Recommender(
            self.measurement_conf, log=self.log, dao=self.dao)
Beispiel #9
0
    def setUp(self):
        self.test_name = self.id().split(".")[-1]
        os.environ[
            "PYTEST_CURRENT_TEST"] = self.test_name  # not needed on pytest 3.2

        self.environment = Environment()
        self.logger = Logger(debug=(self.environment.DEBUG == "true"))

        # Populate variables with data to HTML Report and to JUnit XML
        self._pytest_output = ""
        self._fail_message = []

        # Done
        self.log("Starting test " + self.id())
        self.log_step("Scenario description:")
        self.log("\n" + self.__dict__["_testMethodDoc"])
Beispiel #10
0
    def __init__(self):
        # error messages that will make tests fails
        self.error_messages = []

        # log messages to be used for logging in pytest results
        self.output_messages = []

        # requests data
        self.expected_status_code = 200
        self.response_code = -1
        self.headers = {}
        self.data = ""
        self.filename = ""
        self.verification_status = False

        # Other useful declarations for the children to use
        self.logger = Logger(name=self.__class__.__name__)
Beispiel #11
0
class MockMetricDAO(object):
    ''' Mock DAO '''
    def __init__(self):
        self.logger = Logger()

    def write_container_recommendation_result(self, recommendations):
        ''' Write the recommendation '''
        self.logger.info("Write recommendation result: %s", str(recommendations))

    def write_container_prediction_data(self, predictions):
        ''' Write predictions '''
        self.logger.info("Write prediction result: %s", str(predictions))

    def get_container_observed_data(self, metric_type, namespace, pod_name, duration):
        # pylint: disable=line-too-long
        ''' Get metrics '''
        self.logger.info("Get observed data: metric_type=%s, "
                         " namespace=%s, pod_name=%s, duration=%s",
                         str(metric_type), str(namespace),
                         str(pod_name), str(duration))
        return [
            {
                "data": [{"time": 1540970511, "value": 0.11933598524417476}, {"time": 1540970541, "value": 0.12222222222222222}, {"time": 1540970571, "value": 0.1211138025288804}, {"time": 1540970601, "value": 0.12064253815904132}, {"time": 1540970631, "value": 0.11799999999994824}, {"time": 1540970661, "value": 0.1175581679593074}, {"time": 1540970691, "value": 0.11978842563782154}, {"time": 1540970721, "value": 0.11999733339262363}, {"time": 1540970751, "value": 0.1177777777777616}, {"time": 1540970781, "value": 0.11555812351387203}, {"time": 1540970811, "value": 0.1153358963532232}, {"time": 1540970841, "value": 0.115775204995506}, {"time": 1540970871, "value": 0.11378536346865305}, {"time": 1540970901, "value": 0.11199751116643794}, {"time": 1540970931, "value": 0.11200248894413732}, {"time": 1540970961, "value": 0.11422222222220929}, {"time": 1540970991, "value": 0.11044444444447031}, {"time": 1540971021, "value": 0.11088642474615233}, {"time": 1540971051, "value": 0.11486591570570893}, {"time": 1540971081, "value": 0.12333607413496463}, {"time": 1540971111, "value": 0.12533611858039997}, {"time": 1540971141, "value": 0.12222222222222222}, {"time": 1540971171, "value": 0.12333333333331715}, {"time": 1540971201, "value": 0.12421670147993745}, {"time": 1540971231, "value": 0.1242498332962913}, {"time": 1540971261, "value": 0.12088083016684867}, {"time": 1540971291, "value": 0.12177236567262706}, {"time": 1540971321, "value": 0.12444444444449294}, {"time": 1540971351, "value": 0.12422774345526791}, {"time": 1540971381, "value": 0.12112187750023842}, {"time": 1540971411, "value": 0.11888888888885656}, {"time": 1540971441, "value": 0.11644703215630572}, {"time": 1540971471, "value": 0.1171111111111208}, {"time": 1540971501, "value": 0.12021687924980788}, {"time": 1540971531, "value": 0.12088888888885978}, {"time": 1540971561, "value": 0.11955289882449321}, {"time": 1540971591, "value": 0.11533845948714005}, {"time": 1540971621, "value": 0.11353284898577133}, {"time": 1540971651, "value": 0.11400253338957268}, {"time": 1540971681, "value": 0.1135378941052935}, {"time": 1540971711, "value": 0.1135732225012909}, {"time": 1540971741, "value": 0.11418923422121205}, {"time": 1540971771, "value": 0.11734376389017855}, {"time": 1540971801, "value": 0.1191164051735762}, {"time": 1540971831, "value": 0.11825112805359857}, {"time": 1540971861, "value": 0.11752682677569645}, {"time": 1540971891, "value": 0.1177777777777616}, {"time": 1540971921, "value": 0.11911640517349537}, {"time": 1540971951, "value": 0.11914553093108997}, {"time": 1540971981, "value": 0.12266666666667637}, {"time": 1540972011, "value": 0.123333333333398}, {"time": 1540972041, "value": 0.1213252449837292}, {"time": 1540972071, "value": 0.1168888888888533}, {"time": 1540972101, "value": 0.11799999999994824}, {"time": 1540972131, "value": 0.12133872616558795}, {"time": 1540972161, "value": 0.12422222222222544}, {"time": 1540972191, "value": 0.12510833092600207}, {"time": 1540972221, "value": 0.12311658295926203}, {"time": 1540972251, "value": 0.12266121505711827}, {"time": 1540972281, "value": 0.12288888888886301}, {"time": 1540972311, "value": 0.1226639408012444}, {"time": 1540972341, "value": 0.12044979776883176}, {"time": 1540972371, "value": 0.11755555555557495}, {"time": 1540972401, "value": 0.11776730957254714}, {"time": 1540972431, "value": 0.11821959512010198}, {"time": 1540972461, "value": 0.11955555555557819}, {"time": 1540972491, "value": 0.1144546181882349}, {"time": 1540972521, "value": 0.1135580790684367}, {"time": 1540972551, "value": 0.11133333333329776}, {"time": 1540972581, "value": 0.11666925931687372}, {"time": 1540972611, "value": 0.1191111111110432}, {"time": 1540972641, "value": 0.12042838732617632}, {"time": 1540972671, "value": 0.12177507166506443}, {"time": 1540972701, "value": 0.12218420935708889}, {"time": 1540972731, "value": 0.11912699471041878}, {"time": 1540972761, "value": 0.11464373791907961}, {"time": 1540972791, "value": 0.11247943804742164}, {"time": 1540972821, "value": 0.11578035067444005}, {"time": 1540972851, "value": 0.1169122713432028}, {"time": 1540972881, "value": 0.11510855314326997}, {"time": 1540972911, "value": 0.10976070389047683}, {"time": 1540972941, "value": 0.10822462721391547}, {"time": 1540972971, "value": 0.10778017289269862}, {"time": 1540973001, "value": 0.11223968172825266}, {"time": 1540973031, "value": 0.11221723478954873}, {"time": 1540973061, "value": 0.11421968400700915}, {"time": 1540973091, "value": 0.11333081487082916}, {"time": 1540973121, "value": 0.11089135314121648}, {"time": 1540973151, "value": 0.10466666666672811}, {"time": 1540973181, "value": 0.10732856317492798}, {"time": 1540973211, "value": 0.10955555555556203}],
                "labels": {"namespace": "default", "pod_name": "router-2-m7vx8", "container_name": "router1"}
            },
            {
                "data": [{"time": 1540970511, "value": 0.11933598524417476}, {"time": 1540970541, "value": 0.12222222222222222}, {"time": 1540970571, "value": 0.1211138025288804}, {"time": 1540970601, "value": 0.12064253815904132}, {"time": 1540970631, "value": 0.11799999999994824}, {"time": 1540970661, "value": 0.1175581679593074}, {"time": 1540970691, "value": 0.11978842563782154}, {"time": 1540970721, "value": 0.11999733339262363}, {"time": 1540970751, "value": 0.1177777777777616}, {"time": 1540970781, "value": 0.11555812351387203}, {"time": 1540970811, "value": 0.1153358963532232}, {"time": 1540970841, "value": 0.115775204995506}, {"time": 1540970871, "value": 0.11378536346865305}, {"time": 1540970901, "value": 0.11199751116643794}, {"time": 1540970931, "value": 0.11200248894413732}, {"time": 1540970961, "value": 0.11422222222220929}, {"time": 1540970991, "value": 0.11044444444447031}, {"time": 1540971021, "value": 0.11088642474615233}, {"time": 1540971051, "value": 0.11486591570570893}, {"time": 1540971081, "value": 0.12333607413496463}, {"time": 1540971111, "value": 0.12533611858039997}, {"time": 1540971141, "value": 0.12222222222222222}, {"time": 1540971171, "value": 0.12333333333331715}, {"time": 1540971201, "value": 0.12421670147993745}, {"time": 1540971231, "value": 0.1242498332962913}, {"time": 1540971261, "value": 0.12088083016684867}, {"time": 1540971291, "value": 0.12177236567262706}, {"time": 1540971321, "value": 0.12444444444449294}, {"time": 1540971351, "value": 0.12422774345526791}, {"time": 1540971381, "value": 0.12112187750023842}, {"time": 1540971411, "value": 0.11888888888885656}, {"time": 1540971441, "value": 0.11644703215630572}, {"time": 1540971471, "value": 0.1171111111111208}, {"time": 1540971501, "value": 0.12021687924980788}, {"time": 1540971531, "value": 0.12088888888885978}, {"time": 1540971561, "value": 0.11955289882449321}, {"time": 1540971591, "value": 0.11533845948714005}, {"time": 1540971621, "value": 0.11353284898577133}, {"time": 1540971651, "value": 0.11400253338957268}, {"time": 1540971681, "value": 0.1135378941052935}, {"time": 1540971711, "value": 0.1135732225012909}, {"time": 1540971741, "value": 0.11418923422121205}, {"time": 1540971771, "value": 0.11734376389017855}, {"time": 1540971801, "value": 0.1191164051735762}, {"time": 1540971831, "value": 0.11825112805359857}, {"time": 1540971861, "value": 0.11752682677569645}, {"time": 1540971891, "value": 0.1177777777777616}, {"time": 1540971921, "value": 0.11911640517349537}, {"time": 1540971951, "value": 0.11914553093108997}, {"time": 1540971981, "value": 0.12266666666667637}, {"time": 1540972011, "value": 0.123333333333398}, {"time": 1540972041, "value": 0.1213252449837292}, {"time": 1540972071, "value": 0.1168888888888533}, {"time": 1540972101, "value": 0.11799999999994824}, {"time": 1540972131, "value": 0.12133872616558795}, {"time": 1540972161, "value": 0.12422222222222544}, {"time": 1540972191, "value": 0.12510833092600207}, {"time": 1540972221, "value": 0.12311658295926203}, {"time": 1540972251, "value": 0.12266121505711827}, {"time": 1540972281, "value": 0.12288888888886301}, {"time": 1540972311, "value": 0.1226639408012444}, {"time": 1540972341, "value": 0.12044979776883176}, {"time": 1540972371, "value": 0.11755555555557495}, {"time": 1540972401, "value": 0.11776730957254714}, {"time": 1540972431, "value": 0.11821959512010198}, {"time": 1540972461, "value": 0.11955555555557819}, {"time": 1540972491, "value": 0.1144546181882349}, {"time": 1540972521, "value": 0.1135580790684367}, {"time": 1540972551, "value": 0.11133333333329776}, {"time": 1540972581, "value": 0.11666925931687372}, {"time": 1540972611, "value": 0.1191111111110432}, {"time": 1540972641, "value": 0.12042838732617632}, {"time": 1540972671, "value": 0.12177507166506443}, {"time": 1540972701, "value": 0.12218420935708889}, {"time": 1540972731, "value": 0.11912699471041878}, {"time": 1540972761, "value": 0.11464373791907961}, {"time": 1540972791, "value": 0.11247943804742164}, {"time": 1540972821, "value": 0.11578035067444005}, {"time": 1540972851, "value": 0.1169122713432028}, {"time": 1540972881, "value": 0.11510855314326997}, {"time": 1540972911, "value": 0.10976070389047683}, {"time": 1540972941, "value": 0.10822462721391547}, {"time": 1540972971, "value": 0.10778017289269862}, {"time": 1540973001, "value": 0.11223968172825266}, {"time": 1540973031, "value": 0.11221723478954873}, {"time": 1540973061, "value": 0.11421968400700915}, {"time": 1540973091, "value": 0.11333081487082916}, {"time": 1540973121, "value": 0.11089135314121648}, {"time": 1540973151, "value": 0.10466666666672811}, {"time": 1540973181, "value": 0.10732856317492798}, {"time": 1540973211, "value": 0.10955555555556203}],
                "labels": {"namespace": "default", "pod_name": "router-2-m7vx8", "container_name": "router2"}
            }
        ]
Beispiel #12
0
class Element:
    def __init__(self, browser: Browser, name, locator, timeout):
        self.browser = browser
        self.name = name
        self.locator = locator
        self.timeout = timeout
        self.logger = Logger(name=name)
        self.element = browser.find_element(*locator)
        self.logger.log("Found")

    def click(self, timeout=None):
        wait_time = timeout if timeout else self.timeout
        self.browser.set_timeout(wait_time)
        self.wait_for_visible(wait_time)
        try:
            self.logger.log("Click")
            self.element.click()
        except Exception as e:
            self.handle_exception(e)

    def type(self, message, hide=False):
        self.logger.log("Typying {} with hide = {}".format(message if not hide else "*"*len(message), hide))
        self.element.send_keys(message)

    def wait_for_visible(self, timeout=None):
        wait_time = timeout if timeout else self.browser.timeout
        self.logger.log("Waiting for visibility during {}s".format(wait_time))
        try:
            WebDriverWait(self.browser, wait_time).until(expected_conditions.visibility_of_element_located(self.locator))
        except Exception as e:
            self.handle_exception(e)

    def handle_exception(self, e):
        exeption_type = e.__class__.__name__
        self.logger.log("{} raised".format(exeption_type))
        self.browser.save_screenshot("{}.{}".format(self.name, exeption_type))
        self.browser.save_html("{}.{}".format(self.name, exeption_type))
        raise Exception("{} on {}".format(exeption_type, self.name))
Beispiel #13
0
    def __init__(self, measurement_conf, log=None, dao=None):
        self.time_scaling_sec = 1
        self.init_stage_duration_sec = 3 * 60
        self.tolerance = 1.2
        self.prdt_tolerance = 1.2
        self.policy_partition = 3
        self.policy = Policy.STABLE

        self.measurement_conf = measurement_conf

        self.log = log or Logger(name='Recommender',
                                 logfile='/var/log/recommender.log',
                                 level=LogLevel.LV_DEBUG)
        self.dao = dao or MetricDAO()
Beispiel #14
0
def main():
    '''Main entrypoint for workload prediction.'''

    log = Logger()
    log.info("Start workload prediction.")

    # workload predictor
    predictor_log = Logger(name='workload_prediction',
                           logfile='/var/log/workload_prediction.log',
                           level=LogLevel.LV_DEBUG)
    predictor = WorkloadPredictor(log=predictor_log)

    # file datastore to get pod list
    dao = FileDataStore()

    while True:

        pod_list = dao.read_data()
        for k, v in pod_list.items():
            try:
                pod = {
                    "namespace": k[0],
                    "uid": k[1],
                    "pod_name": k[2],
                    "type": v["type"],
                    "policy": v["policy"]
                }
            except (IndexError, KeyError):
                log.error(
                    "Not predicting POD %s:%s, "
                    "due to wrong format of pod info.", k, v)
                continue

            predictor.predict(pod)

        time.sleep(60)

    log.info("Workload prediction is completed.")
Beispiel #15
0
def main():
    '''Main entrypoint for demo server.'''

    svc_rootpath = '/alameda-ai/services/arima'
    pyscripts = ['grpc_server.py', 'workload_prediction.py']

    log = Logger()
    log.info("Start demo server.")

    for script in pyscripts:
        log.info("Start to run", script)

        script_path = os.path.join(svc_rootpath, script)
        subprocess.Popen(['python3', script_path])

    while True:
        time.sleep(60)

    log.info("Demo server is completed.")
Beispiel #16
0
class Browser:
    def __init__(self, browser):
        self.browser = browser
        base_directory = os.sep.join(os.path.dirname(__file__).split(os.sep)[:-2])
        self.screenshots_directory = os.path.join(base_directory, "logs")
        self.logger = Logger(name=self.__class__.__name__)
        self.timeout = 0

    def open(self, url):
        if url is not None and url != self.browser.current_url:
            self.browser.get(url)
            self.logger.log("Opened {}".format(url))

    def close(self):
        self.browser.close()

    def set_timeout(self, timeout: int):
        if not timeout or timeout <= 0:
            return
        self.browser.implicitly_wait(timeout)
        self.logger.log("Set implicit wait to {}".format(timeout))
        self.timeout = timeout

    def find_element(self, by, value):
        return self.browser.find_element(by, value)

    def save_screenshot(self, prefix=None):
        filename = "{}.{}.png".format(prefix, now_to_str())
        full_filename = os.path.join(self.screenshots_directory, filename)
        self.browser.save_screenshot(filename=full_filename)
        self.logger.log("Screenshot saved on {}".format(full_filename))
        return full_filename

    def save_html(self, prefix=None):
        filename = "{}.{}.html".format(prefix, now_to_str())
        full_filename = os.path.join(self.screenshots_directory, filename)
        with open(full_filename, "w", encoding="utf-8") as f:
            f.write(self.browser.page_source)
        self.logger.log("HTML saved on {}".format(full_filename))
        return full_filename
Beispiel #17
0
class ApiBase:
    def __init__(self):
        # error messages that will make tests fails
        self.error_messages = []

        # log messages to be used for logging in pytest results
        self.output_messages = []

        # requests data
        self.expected_status_code = 200
        self.response_code = -1
        self.headers = {}
        self.data = ""
        self.filename = ""
        self.verification_status = False

        # Other useful declarations for the children to use
        self.logger = Logger(name=self.__class__.__name__)

    def log(self, line):
        self.logger.log(line)

    def log_multiple(self, logs: List[str]):
        self.logger.log_multiple(logs)

    def request(self,
                name,
                url,
                method=Requests.METHOD_GET,
                headers={},
                request_headers={},
                body=None,
                json_body=None,
                expected_verification=Requests.DEFAULT_VERIFICATIONS,
                expected_response_code=200,
                override_base_filename=None,
                override_base_folder=None):
        request = Requests(url=url,
                           method=method,
                           expected_headers=headers,
                           request_headers=request_headers,
                           plain_body=body,
                           json_body=json_body,
                           expected_verifications=expected_verification,
                           expected_response_code=expected_response_code,
                           override_base_filename=override_base_filename,
                           override_base_folder=override_base_folder)
        request.request()
        self.verification_status = request.verification()
        printable_attr = "[" + name + " Request] "
        self.error_messages += [
            printable_attr + error for error in request.error_messages
        ]
        self.response_code = int(request.status_code)
        self.headers = request.response_headers
        self.filename = request.filename
        last_raw_data = request.data
        last_dict_data = request.dict_data
        self.data = last_dict_data if last_dict_data else last_raw_data
        request.save()
        request.log_summary()
        return self.data

    def validate_json_schema(self, name, data, schema_file):
        try:
            check = JsonSchemaChecker.validate(data, schema_file)
            return check != {}
        except JsonSchemaCheckerException as e:
            self.error_messages.append("{}: {}".format(name, e))
            return False
Beispiel #18
0
class LoggerTestCase(unittest.TestCase):
    '''Unit test for Logger class.'''

    UNITTEST_LOGPATH = "./unittest.log"
    TEST_MSG = "This is unittest for logger module."

    def setUp(self):
        '''Setup unittest environment.'''

        self.testitem = Logger('unittest', self.UNITTEST_LOGPATH)

    def tearDown(self):
        '''Clean unittest environment.'''

        os.remove(self.UNITTEST_LOGPATH)

    def __clear_logfile(self):
        '''Clear log file.'''

        open(self.UNITTEST_LOGPATH, 'w').close()

    def __has_log(self, logmsg, loglevel=None):
        '''Check if log message is in the logfile.

        Args:
            loglevel(str): Log level string, ex: 'DEBUG', 'INFO', etc.
        '''

        with open(self.UNITTEST_LOGPATH, 'r') as f_test:
            content = f_test.read()

        if loglevel is None:
            return logmsg in content

        return logmsg in content and loglevel in content

    def test_set_level(self):
        '''Test on set_level() function.

        Test target:
            Test if log level is set.
        '''

        # Log level is used to filter out low-level logs, so here we test on
        # calling log function with the same log level.
        visible_log = "Visible log"
        invisible_log = "Invisible log"

        self.testitem.set_level(LogLevel.LV_DEBUG)
        self.testitem.debug(visible_log)
        self.testitem.set_level(LogLevel.LV_INFO)
        self.testitem.debug(invisible_log)

        self.assertTrue(self.__has_log(visible_log, 'DEBUG'))
        self.assertFalse(self.__has_log(invisible_log, 'DEBUG'))

    def test_debug(self):
        '''Test on debug() function.

        Test target:
            Test if log is written to assigned log file.
        '''

        self.testitem.set_level(LogLevel.LV_DEBUG)
        self.testitem.debug(self.TEST_MSG)
        self.assertTrue(self.__has_log(self.TEST_MSG, 'DEBUG'))
Beispiel #19
0
    def setUp(self):
        '''Setup unittest environment.'''

        self.testitem = Logger('unittest', self.UNITTEST_LOGPATH)
Beispiel #20
0
 def __init__(self):
     self.logger = Logger()
Beispiel #21
0
class MetricDAO(object):
    ''' Metric DAO '''

    def __init__(self, config=None):
        ''' The construct methdo '''
        if not config:
            config = {
                "metric_server": get_metric_server_address()
            }
        self.config = config
        self.logger = Logger()
        self.logger.info("Metric DAO config: %s", str(self.config))

    def __get_client(self):
        ''' Get the grpc client '''
        conn_str = self.config["metric_server"]
        channel = grpc.insecure_channel(conn_str)
        return server_pb2_grpc.OperatorServiceStub(channel)

    def __get_metric_type_value(self, metric_type):
        ''' Get the metric type '''
        if metric_type == "cpu":
            key = "CONTAINER_CPU_USAGE_TOTAL"
        elif metric_type == "cpu_rate":
            key = "CONTAINER_CPU_USAGE_TOTAL_RATE"
        elif metric_type == "memory":
            key = "CONTAINER_MEMORY_USAGE"
        else:
            key = "CONTAINER_CPU_USAGE_TOTAL"

        # default return cpu
        return server_pb2.MetricType.Value(key)

    def __get_op_type_value(self, op_type):
        ''' Get the op type '''
        if op_type == "equal":
            key = "Equal"
        else:
            key = "NotEqual"
        return server_pb2.StrOp.Value(key)

    def __parse_prediction(self, data):
        ''' Parse the prediction data '''
        result = server_pb2.PredictData()
        result.time.FromSeconds(data["time"])
        result.value = data["value"]
        return result

    def __parse_recommendation(self, data):
        ''' Parse recommendation '''
        result = server_pb2.Recommendation()
        result.time.FromSeconds(data["time"])
        result.resource.CopyFrom(self.__parse_resource(data["resources"]))
        return result

    def __parse_time_series(self, data):
        ''' Parse time series data '''
        result = server_pb2.TimeSeriesData()
        predict_data = list(map(self.__parse_prediction, data))
        result.predict_data.extend(predict_data)
        return result

    def __parse_container_prediction_data(self, data):
        ''' Parse the container prediction data '''
        if not data:
            return []
        result = server_pb2.PredictContainer()

        if "container_name" in data:
            result.name = data["container_name"]

        if "raw_predict" in data:
            for k, v in data["raw_predict"].items():
                time_series = self.__parse_time_series(v)
                result.row_predict_data[k].CopyFrom(time_series)

        if "recommendations" in data:
            result.recommendations.extend(list(
                map(self.__parse_recommendation, data["recommendations"])
            ))

        if "init_resource" in data:
            result.initial_resource.CopyFrom(
                self.__parse_resource(data["init_resource"])
            )

        return result

    def __parse_resource(self, data):
        ''' Parse the resource data '''
        resource = server_pb2.Resource()

        for k, v in data["limits"].items():
            resource.limit[k] = v

        for k, v in data["requests"].items():
            resource.request[k] = v

        return resource

    def __parse_sample(self, data):
        ''' Parse the sample data '''
        return {
            "time": data.time.seconds,
            "value": data.value
        }

    def __parse_metrics(self, data):
        ''' Parse the metrics '''
        result = []
        if not data:
            return result

        for d in data:
            r = {"labels": {}}
            for k, v in d.labels.items():
                r["labels"][k] = v

            # add the sample data
            r["data"] = list(
                map(self.__parse_sample, d.samples)
            )
            result.append(r)
        return result

    def write_container_prediction_data(self, prediction):
        ''' Write the prediction result to server. '''
        self.logger.info("Write prediction result: %s", str(prediction))
        req = server_pb2.CreatePredictResultRequest()
        pod = req.predict_pods.add()

        pod.uid = prediction["uid"]
        pod.namespace = prediction["namespace"]
        pod.name = prediction["pod_name"]
        pod.predict_containers.extend(list(
            map(self.__parse_container_prediction_data,
                prediction["containers"]
               )
        ))
        try:
            client = self.__get_client()
            resp = client.CreatePredictResult(req)
            if resp.status.code != 0:
                msg = "Write prediction error [code={}]".format(resp.status.code)
                raise Exception(msg)
        except Exception as e:
            self.logger.error("Could not get metrics: %s", str(e))
            raise e

    def write_container_recommendation_result(self, data):
        ''' Write the container recommendation result '''
        self.write_container_prediction_data(data)

    def get_container_observed_data(self, metric_type, namespace_name, pod_name, duration):
        ''' Get the observed metrics '''
        self.logger.info("Get observed data: metric_type=%s, "
                         " namespace=%s, pod_name=%s, duration=%s",
                         str(metric_type), str(namespace_name),
                         str(pod_name), str(duration))
        req = server_pb2.ListMetricsRequest()
        req.metric_type = self.__get_metric_type_value(metric_type)
        req.duration.seconds = duration
        # setup the query conditions
        namespace = req.conditions.add()
        namespace.key = u"namespace"
        namespace.op = self.__get_op_type_value("equal")
        namespace.value = namespace_name
        pod = req.conditions.add()
        pod.key = u"pod_name"
        pod.op = self.__get_op_type_value("equal")
        pod.value = pod_name

        try:
            client = self.__get_client()
            resp = client.ListMetrics(req)
            if resp.status.code == 0:
                return self.__parse_metrics(resp.metrics)
            else:
                msg = "List metric error [code={}]".format(resp.status.code)
                raise Exception(msg)
        except Exception as e:
            self.logger.error("Could not get metrics: %s", str(e))
            raise e
Beispiel #22
0
 def __init__(self, browser):
     self.browser = browser
     base_directory = os.sep.join(os.path.dirname(__file__).split(os.sep)[:-2])
     self.screenshots_directory = os.path.join(base_directory, "logs")
     self.logger = Logger(name=self.__class__.__name__)
     self.timeout = 0
Beispiel #23
0
class Environment:
    """
    @DynamicAttrs loaded as part of load_environment_json
    """
    def __init__(self):
        self._data = {}
        self._logger = Logger(name=self.__class__.__name__, debug=True)
        self._default_values_file = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "environment.json")
        self.load_environment_json()
        self.check_variables()

    def __getattr__(self, name):
        """
        Python magic to avoid AttributeError exception

        https://stackoverflow.com/questions/45234632/how-to-prevent-attributeerror-for-undeclared-variables-and-methods-and-fix-get
        :param name:
        :return:
        """
        if name not in self.__dict__:
            return ""

        return self.__dict__[name]

    def check_variables(self):

        variables_to_check = [
            'GITLAB_KEY', 'GITHUB_KEY', 'LOBSTER_EMAIL', 'LOBSTER_PASSWORD'
        ]
        status_variables = {}

        for v in variables_to_check:
            a = getattr(self, v)
            status_variables[v] = True
            if a is None or len(a) <= 0:
                self._logger.log("No {} found - export one".format(v))
                status_variables[v] = False

        self._logger.log("Export variables check")
        self._logger.log(json.dumps(status_variables, indent=4))
        return reduce(lambda x, y: x and y, list(status_variables.values()))

    def load_environment_json(self):
        self._data = {}
        data = {}
        try:
            with open(self._default_values_file) as f:
                data = json.loads(f.read())
        except:
            pass

        if not data:
            self._logger.log("Failed to load {}".format(
                self._default_values_file))
            return

        for d in data:
            if d in os.environ:
                self._logger.log("Loaded {} from OS".format(d))
                self.__dict__[d] = os.environ[d]
            else:
                self._logger.log("Loaded {} from JSON".format(d))
                self.__dict__[d] = data[d]
Beispiel #24
0
    def __init__(self,
                 url,
                 method=METHOD_GET,
                 request_headers={},
                 authorization=None,
                 proxies=None,
                 plain_body=None,
                 json_body=None,
                 ignore_response_body=False,
                 expected_response_code=200,
                 expected_headers=None,
                 expected_verifications=DEFAULT_VERIFICATIONS,
                 override_base_filename=None,
                 override_base_folder=None):
        self.method = method
        self.url = url
        self.body = plain_body
        if not self.body and json_body:
            self.body = json.dumps(json_body)
        self.ignore_response_body = ignore_response_body
        self.request_headers = request_headers
        self.authorization = authorization
        self.proxies = proxies
        self.status_code = 0
        self.expected_response_code = expected_response_code
        self.response_headers = {}
        self.expected_headers = expected_headers
        self.expected_verifications = expected_verifications

        # Attributes to control caching (cached_filename) and where the data is stored when saved (filename)
        self.filename = None
        base_name = (self.__safe_string(self.url) + now_to_str()
                     if not override_base_filename else override_base_filename)
        self._base_name = (os.path.join(
            override_base_folder if override_base_folder else self.BASE_FOLDER,
            base_name))
        self.cached_filename = self._base_name + self.CACHE_EXTENSION

        # Declare response time (time between request and all download) and
        # latency time (time between request and response headers) and
        # full time (time between request and response saved in filesystem)
        self.elapsed_time = 0
        self.latency_time = 0
        self.full_time = 0

        # Declare the response data (in plain, as it came) and the dict representation of it
        # If the response content type is a binary one, nothing is saved in those
        # but a file is created with the proposed filename (and the flag self.is_binary is set to True)
        # Real data content len is also stored, for further checks with Header Content-Length
        self._data = None
        self.is_binary = False
        self._dict = None
        self.content_len = 0

        # Misc stuff
        self.error_messages = []
        self._request_begin_time = None
        self._requested = False
        self._retries = 3
        self._sleep_between_retries = 0.1
        self._logger = Logger(name=self.__class__.__name__)
        logging.getLogger("requests").setLevel(logging.ERROR)
        logging.getLogger("urllib").setLevel(logging.ERROR)
        logging.getLogger("urllib3").setLevel(logging.ERROR)
        requests.packages.urllib3.disable_warnings()
Beispiel #25
0
class Requests:
    METHOD_GET = "GET"
    METHOD_POST = "POST"
    METHOD_HEAD = "HEAD"
    METHOD_PUT = "PUT"
    TIMEOUT = 30
    VERIFY_STATUS_CODE = "VERIFY_STATUS_CODE"
    VERIFY_HEADERS = "VERIFY_HEADERS"
    VERIFY_BODY_LEN = "VERIFY_BODY_LEN"
    CONTENT_TYPE_XML = "xml"
    CONTENT_TYPE_JSON = "json"
    CONTENT_TYPE_HTML = "html"
    CONTENT_TYPE_JPEG = "jpeg"
    CONTENT_TYPE_JPG = "jpg"
    CONTENT_TYPE_PNG = "png"
    CACHE_MODE_READ = False
    CACHE_MODE_WRITE = False
    CACHE_EXTENSION = ".cached.json"
    BASE_FOLDER = "."
    DEFAULT_VERIFICATIONS = [VERIFY_STATUS_CODE, VERIFY_HEADERS]

    def __init__(self,
                 url,
                 method=METHOD_GET,
                 request_headers={},
                 authorization=None,
                 proxies=None,
                 plain_body=None,
                 json_body=None,
                 ignore_response_body=False,
                 expected_response_code=200,
                 expected_headers=None,
                 expected_verifications=DEFAULT_VERIFICATIONS,
                 override_base_filename=None,
                 override_base_folder=None):
        self.method = method
        self.url = url
        self.body = plain_body
        if not self.body and json_body:
            self.body = json.dumps(json_body)
        self.ignore_response_body = ignore_response_body
        self.request_headers = request_headers
        self.authorization = authorization
        self.proxies = proxies
        self.status_code = 0
        self.expected_response_code = expected_response_code
        self.response_headers = {}
        self.expected_headers = expected_headers
        self.expected_verifications = expected_verifications

        # Attributes to control caching (cached_filename) and where the data is stored when saved (filename)
        self.filename = None
        base_name = (self.__safe_string(self.url) + now_to_str()
                     if not override_base_filename else override_base_filename)
        self._base_name = (os.path.join(
            override_base_folder if override_base_folder else self.BASE_FOLDER,
            base_name))
        self.cached_filename = self._base_name + self.CACHE_EXTENSION

        # Declare response time (time between request and all download) and
        # latency time (time between request and response headers) and
        # full time (time between request and response saved in filesystem)
        self.elapsed_time = 0
        self.latency_time = 0
        self.full_time = 0

        # Declare the response data (in plain, as it came) and the dict representation of it
        # If the response content type is a binary one, nothing is saved in those
        # but a file is created with the proposed filename (and the flag self.is_binary is set to True)
        # Real data content len is also stored, for further checks with Header Content-Length
        self._data = None
        self.is_binary = False
        self._dict = None
        self.content_len = 0

        # Misc stuff
        self.error_messages = []
        self._request_begin_time = None
        self._requested = False
        self._retries = 3
        self._sleep_between_retries = 0.1
        self._logger = Logger(name=self.__class__.__name__)
        logging.getLogger("requests").setLevel(logging.ERROR)
        logging.getLogger("urllib").setLevel(logging.ERROR)
        logging.getLogger("urllib3").setLevel(logging.ERROR)
        requests.packages.urllib3.disable_warnings()

    @property
    def dict_data(self):
        if self._dict:
            return self._dict
        self.__load_payload()
        self._dict = self.__safe_json_load(self._data)
        return self._dict

    @property
    def data(self):
        if self._data:
            return self._data
        self.__load_payload()
        return self._data

    def save(self):
        Requests.CACHE_MODE_WRITE = True
        self.__save_json_data_if_cache_mode_enabled()
        Requests.CACHE_MODE_WRITE = False

    @staticmethod
    def load_from_file(filename):
        r = Requests(url="")
        Requests.CACHE_MODE_READ = True
        r.cached_filename = filename
        r.request()
        Requests.CACHE_MODE_READ = False
        return r

    def to_json(self):
        """
        Return a json representation of the public attributes of this object
        """
        translated_json = {}
        for k, v in self.__dict__.items():
            try:
                if not k.startswith("_"):
                    translated_json[k] = json.dumps(v)
                    translated_json[k] = v
            except TypeError as err:
                pass
        return translated_json

    def content_type(self):
        """
        Clear content_type, acquired from our CONTENT_TYPE_* variables
        """
        possible_extensions = [
            v for k, v in Requests.__dict__.items()
            if k.startswith("CONTENT_TYPE")
        ]
        extension = self.__content_type_in(*possible_extensions)
        return extension

    def is_textual_type(self):
        return self.__content_type_in(self.CONTENT_TYPE_XML,
                                      self.CONTENT_TYPE_JSON,
                                      self.CONTENT_TYPE_HTML) is not None

    def is_binary_type(self):
        return self.__content_type_in(self.CONTENT_TYPE_PNG,
                                      self.CONTENT_TYPE_JPEG,
                                      self.CONTENT_TYPE_JPG) is not None

    def copy(self):
        newone = self.__class__(url=self.url)
        for k, v in self.__dict__.items():
            try:
                newone.__dict__[k] = copy.deepcopy(v)
            except TypeError:
                pass
        return newone

    def request(self):
        """
        Use python-request lib to ask for a REST API_Base content and retrieve anything

        :return: The response data (same as self.response)
        """
        if self.__load_json_data_if_cache_mode_enabled():
            return True

        self.__start_timer()
        self.error_messages = []
        r = None
        try:
            r = requests.request(method=self.method,
                                 url=self.url,
                                 data=self.body,
                                 headers=self.request_headers,
                                 auth=self.authorization,
                                 proxies=self.proxies,
                                 verify=False,
                                 timeout=self.TIMEOUT)
            self.status_code = r.status_code
            self.__read_response_body(
                func_read_headers=lambda: {k: v
                                           for k, v in r.headers.items()}
                if r.headers else {},
                func_read_textual_data=lambda: r.text,
                func_read_binary_data=lambda: r.content,
            )
            self.elapsed_time = self.__stop_timer()
            self._requested = True
            self.__save_json_data_if_cache_mode_enabled()
            return True
        except Exception as err:
            self.error_messages.append(str(err))
            return False
        finally:
            if r:
                r.close()

    def urlopen(self):
        """
        Use python-urlib.urlopen to ask for a content. This handles exceptions gracefully and sleeps in bad error situations.

        :param sleep_time_err_case: the amount of time to sleep case a bad thing happens (404s and 500s are not bad things). Defaults to None.
        :return: True if no exception was raised - False otherwise (grab your content from self.response)
        """
        if self.__load_json_data_if_cache_mode_enabled():
            return True
        r = None
        self.__start_timer()
        for _ in range(self._retries):
            try:
                raw_request = urllib.request.Request(
                    self.url, headers=self.request_headers)
                self._requested = True
                r = urllib.request.urlopen(raw_request, timeout=self.TIMEOUT)
                self.status_code = int(r.getcode())
                self.__read_response_body(
                    func_read_headers=lambda: {
                        x[0]: ",".join([
                            h[1].replace(', ', ',') for h in r.getheaders()
                            if h[0] == x[0]
                        ])
                        for x in r.getheaders()
                    },
                    func_read_textual_data=lambda: r.read().decode('utf-8'),
                    func_read_binary_data=lambda: r.read(),
                )
                self.__save_json_data_if_cache_mode_enabled()
                return True
            except urllib.request.HTTPError as err:
                self.status_code = int(err.code)
                self._data = err.read().decode('utf-8')
                return True
            except Exception as err:
                self.error_messages.append(str(err))
                sleep(self._sleep_between_retries)
        return False

    def log_summary(self):
        self._logger.log_debug("{} {} {} {}".format(self.method, self.url,
                                                    self.status_code,
                                                    self.elapsed_time))
        if self.body:
            self._logger.log_debug("Input=====> " + str(self.body))

        to_log_request_headers = {
            k: (v if "KEY" not in k.upper() and "TOKEN" not in k.upper()
                and "AUTHORIZATION" not in k.upper() else "**HIDDEN**")
            for k, v in self.request_headers.items()
        }
        self._logger.log_debug("Headers =====> " + str(to_log_request_headers))
        if self._data or self.filename:
            self._logger.log_debug("Output =====> " + (
                str(self._data) if self._data else " Saved on " +
                self.filename))
        self._logger.log_debug("Output Headers =====> " +
                               str(self.response_headers))
        self._logger.log_debug("")

    def requests_async(self, workers):
        """
        GET many requests for the same URL in asyncronous way - return a list of Request objects from the responses, without data,
        to avoid memory usage to grow.

        For the effect of a larger number of workers, read http://skipperkongen.dk/2016/09/09/easy-parallel-http-requests-with-python-and-asyncio/

        workers: the amount of requests to perform
        """
        # Start as many processes as needed with a small delay
        output = Queue()
        for _ in range(workers):
            Process(target=Requests.__requests_async, args=(
                self,
                output,
            )).start()
            sleep(0.01)
        responses = [output.get() for _ in range(workers)]
        new_requests = []
        for r in responses:
            new_request = self.copy()
            new_request.status_code = r.status_code
            new_request.response_headers = {
                k: v
                for k, v in r.headers.items()
            } if r.headers else {}
            new_requests.append(new_request)
        return new_requests

    def verification(self):
        """
        Verifies the response and populates self.error_messages if needed.

        The following validations are performed:

        * Expected status code returned on response(default 200).
        * Non-empty data body
        * Expected headers values (including that Server headers has the right "VSPP_VERSION")

        :return: if error messages exists (same as len(self.error_messages) > 0)
        """
        if not self._requested:
            return len(self.error_messages) == 0

        if self.error_messages:
            return len(self.error_messages) == 0

        if self.__should_verify(self.VERIFY_STATUS_CODE):
            status = self.verify_status_code()
            # If the request has failed, no point in performing further validation.
            # If we try and validate BODY_LEN after a fail and no data returned an exception will be raised.
            #    TypeError: object of type 'NoneType' has no len()
            if not status:
                return status

        if self.__should_verify(self.VERIFY_BODY_LEN):
            self.verify_body_len()

        if self.__should_verify(self.VERIFY_HEADERS):
            self.verify_response_headers()

        return len(self.error_messages) == 0

    def verify_status_code(self):
        status = self.status_code == self.expected_response_code
        if not status:
            self.error_messages.append(
                "Request {0} returned a {1} response code".format(
                    self.url, self.status_code))
        self._logger.log("[{}] Status Code Check".format(
            "OK" if not self.error_messages else "BAD"))
        return len(self.error_messages) == 0

    def verify_response_headers(self):
        """
        Verifies the response expected headers values and populates self.error_messages.

        If the expected header value is N/A, only verify the header existence but not its value

        :param expected_headers: A custom dictionary of expected headers and values. Default to None (will use self.default_headers)

        :return: Error messages (same as self.error_messages)
        """

        for header_field_to_verify in self.expected_headers:
            expected = self.expected_headers[header_field_to_verify]
            if expected == "N/A":
                if header_field_to_verify not in self.response_headers:
                    self.error_messages.append(
                        "\n Expected to find header '{}' but it wasn't in {}".
                        format(header_field_to_verify, self.url))
            else:
                actual = self.response_headers.get(header_field_to_verify,
                                                   "N/A")
                if actual != expected:
                    self.error_messages.append(
                        "Expected value for '{}' is '{}' but response header had '{}'"
                        .format(header_field_to_verify, expected, actual))
        self._logger.log("[{}] Headers Check".format(
            "OK" if not self.error_messages else "BAD"))
        return len(self.error_messages) == 0

    def verify_body_len(self):
        header_content_len = self.response_headers.get('Content-Length', 0)
        status = self.content_len != header_content_len
        if not status:
            self.error_messages.append(
                "Request {} returned {} bytes while Content-Length header had {}"
                .format(self.url, self.content_len, header_content_len))
        self._logger.log("[{}] Body Len Check".format(
            "OK" if not self.error_messages else "BAD"))
        return len(self.error_messages) == 0

    @staticmethod
    def __requests_async(request, output):
        response = requests.request(method=Requests.METHOD_GET,
                                    url=request.url,
                                    headers=request.request_headers,
                                    auth=request.authorization,
                                    proxies=request.proxies,
                                    verify=False)
        output.put(response)

    @staticmethod
    def __safe_string(unsafe_string):
        return (unsafe_string.replace(":", ".").replace("/", ".").replace(
            "$",
            ".").replace("?", ".").replace("=", ".").replace("(", ".").replace(
                ")", ".").replace("&", ".").replace("...",
                                                    ".").replace("..", "."))

    def __load_json_data_if_cache_mode_enabled(self):
        if self.CACHE_MODE_READ:
            try:
                with open(self.cached_filename) as f:
                    response = json.load(f)
                    for k, v in self.__dict__.items():
                        if k in response:
                            self.__dict__[k] = response[k]
                    self._logger.log("Loaded public data from {}".format(
                        self.cached_filename))
                    self._requested = True
                    return True
            except Exception as e:
                raise e
        return False

    def __save_json_data_if_cache_mode_enabled(self):
        if self.CACHE_MODE_WRITE:
            content_type = self.content_type()
            if (self._data is not None
                    or self._dict is not None) and content_type:
                self.filename = self._base_name + "." + content_type
                if self._dict is not None:
                    with open(self.filename, "w") as f:
                        f.write(json.dumps(self._dict, indent=4))
                elif self._data is not None and type(self._data) == str:
                    with open(self.filename, "w") as f:
                        f.write(self._data)
                elif self._data is not None:
                    with open(self.filename, "wb") as f:
                        f.write(self._data)
            with open(self.cached_filename, "w") as f:
                f.write(
                    json.dumps(self.to_json(),
                               indent=4,
                               ensure_ascii=True,
                               sort_keys=True))
            self._logger.log("Saved public data on {}".format(
                self.cached_filename))
            return True
        return False

    def __start_timer(self):
        self._request_begin_time = datetime.now().timestamp()

    def __stop_timer(self):
        return round(datetime.now().timestamp() - self._request_begin_time,
                     6) if self._request_begin_time else 0

    def __read_response_body(self, func_read_headers, func_read_textual_data,
                             func_read_binary_data):
        """
        Read the response, independently on how it was got

        This is achieved thanks to the functional parameters, functions that allow it to read headers and response.

        The textual function will be used if the content type is either XML, JSON, or HTML. In this case, the
        self.is_binary attribute will be False and self.data will be populated.

        The binary function will be used if the content type is either MP4, MP2T, or image. In this case, the
        self.is_binary attribute will be True and self.data will NOT be populated - a file will be saved with
        the content for later manipulations.

        :param func_read_headers: a function defining how to read the headers of the response
        :param func_read_textual_data: a function defining how to read textual data - will be used depending on the content type
        :param func_read_binary_data: a function defining how to read binary data - will be used depending on the content type
        :return:
        """
        self.response_headers = func_read_headers()
        self.latency_time = self.__stop_timer()
        self._data = None
        self.is_binary = False
        if self.ignore_response_body or self.method == self.METHOD_HEAD:
            return
        textual_type = self.is_textual_type()
        binary_type = self.is_binary_type()
        if textual_type:
            self._data = func_read_textual_data()
            self.elapsed_time = self.__stop_timer()
            self.content_len = len(self._data)
            self._dict = self.__safe_json_load(self._data)
            self.full_time = self.__stop_timer()
        elif binary_type:
            self.is_binary = True
            self._data = func_read_binary_data()
            self.elapsed_time = self.__stop_timer()
            self.content_len = len(self._data)
            self.full_time = self.__stop_timer()
        elif not textual_type and not binary_type:
            self._data = func_read_textual_data()
            self.elapsed_time = self.__stop_timer()
            self.content_len = len(self._data)
            self.full_time = self.__stop_timer()
        else:
            # should never happen, as no two CONTENT_TYPE constant should match at the same time
            self.error_messages.append("Something wrong on __read_body")

    def __load_payload(self):
        if self.filename is None:
            return
        if not os.path.exists(self.filename):
            return
        if self._data is not None:
            return

        # Else
        with open(self.filename, 'r', encoding='utf8') as f:
            self._data = f.read()

    def __content_type_in(self, *args):
        content_types = [
            x for x in args
            if x in self.response_headers.get("Content-Type", "").lower()
        ]
        return content_types[0] if content_types else None

    def __should_verify(self, verification_constant):
        return verification_constant in self.expected_verifications

    def __safe_json_load(self, json_data):
        """
        Safelly (no exceptions) parses string to dictionary
        :param string_data: json data as string
        :return: parsed dictionary data
        """
        data = None
        try:
            data = json.loads(json_data)
        except Exception as e:
            pass
        return data
Beispiel #26
0
class ApiTestBase(unittest.TestCase):
    def setUp(self):
        self.test_name = self.id().split(".")[-1]
        os.environ[
            "PYTEST_CURRENT_TEST"] = self.test_name  # not needed on pytest 3.2

        self.environment = Environment()
        self.logger = Logger(debug=(self.environment.DEBUG == "true"))

        # Populate variables with data to HTML Report and to JUnit XML
        self._pytest_output = ""
        self._fail_message = []

        # Done
        self.log("Starting test " + self.id())
        self.log_step("Scenario description:")
        self.log("\n" + self.__dict__["_testMethodDoc"])

    def tearDown(self):
        self.log("Ending test {} - {}".format(
            self.id(), "PASSED" if len(self._fail_message) == 0 else "FAILED"))
        self.log("=" * 50)
        self.log("Log file created on {}.txt".format(
            self.logger.logging_filename))
        self.log("=" * 50)

    def log(self, line):
        self.logger.log(line)

    def log_multiple(self, logs: List[str]):
        self.logger.log_multiple(logs)

    def log_step(self, line):
        self.logger.log_step(line)

    def reset_fail_message(self):
        self._fail_message = []

    def add_fail_message(self, message: str):
        self._fail_message.append(message)

    def add_fail_messages(self, messages: List[str]):
        self._fail_message += messages

    def add_output_message(self, message: str):
        self._pytest_output += message + "\n"
        self.log(message)

    def add_output_messages(self, messages: List[str]):
        if messages:
            message = "\n\t *" + "\n\t *".join(messages) + "\n"
            self._pytest_output += message
            self.log(message)

    def fail_if_message(self):
        fail_log_message = "Fail Message(s) for test [" + self.id(
        ) + "]:\n" + "\n".join(self._fail_message)
        if len(self._fail_message) != 0:
            self.log(fail_log_message)
        assert len(self._fail_message) == 0, "\n" + fail_log_message

    def then_everything_should_be_fine(self, fail_messages=None):
        if fail_messages:
            self.log("Failure messages:")
            self.add_fail_messages(fail_messages)

        self.log_step(
            'Then everything is successfully returned on all requests')
        self.fail_if_message()

    def flush_api_messages(self, api: ApiBase):
        self.add_fail_messages(api.error_messages)
        self.add_output_messages(api.output_messages)

    def flush_page_messages(self, page: PageBase):
        self.add_fail_messages(page.error_messages)
        self.add_output_messages(page.output_messages)

    def data_test_file_path(self, filename):
        return os.path.join(os.path.dirname(os.path.abspath(__file__)), "data",
                            filename)