Пример #1
0
 def test_multinode_description(self):
     self.assertIsNotNone(self.client_job)
     allow_missing_path(self.client_job.validate, self, "qemu-system-x86_64")
     # check that the description can be re-loaded as valid YAML
     for action in self.client_job.pipeline.actions:
         data = action.explode()
         data_str = yaml.dump(data)
         yaml_load(data_str)  # nosec not suitable for safe_load
Пример #2
0
def test_internal_v1_jobs_get(client, mocker, settings):
    # Create objects
    objs = create_objects(Worker.objects.create(hostname="worker-01"))
    (j1, j2, j3, j4, j5, j6) = objs["jobs"]

    # Test errors
    ret = client.get(reverse("lava.scheduler.internal.v1.jobs",
                             args=["12345"]))
    assert ret.status_code == 404

    ret = client.get(reverse("lava.scheduler.internal.v1.jobs", args=[j1.id]))
    assert ret.status_code == 400
    assert ret.json()["error"] == "Missing 'token'"

    ret = client.get(reverse("lava.scheduler.internal.v1.jobs", args=[j1.id]),
                     HTTP_LAVA_TOKEN="")
    assert ret.status_code == 400
    assert ret.json()["error"] == "Invalid 'token'"

    ret = client.get(
        reverse("lava.scheduler.internal.v1.jobs", args=[j1.id]),
        HTTP_LAVA_TOKEN=j1.token,
    )
    assert ret.status_code == 200
    assert list(ret.json().keys()) == [
        "definition",
        "device",
        "dispatcher",
        "env",
        "env-dut",
    ]
    print(ret.json())
    assert yaml_load(ret.json()["definition"]) == {
        "compatibility": 0,
        "device_type": "qemu",
    }
    assert "hostname: qemu05" not in ret.json()["device"]
    assert "available_architectures:" in ret.json()["device"]

    ret = client.get(
        reverse("lava.scheduler.internal.v1.jobs", args=[j6.id]),
        HTTP_LAVA_TOKEN=j6.token,
    )
    assert ret.status_code == 200
    assert list(ret.json().keys()) == [
        "definition",
        "device",
        "dispatcher",
        "env",
        "env-dut",
    ]
    assert yaml_load(ret.json()["definition"]) == {
        "compatibility": 0,
        "connection": "ssh",
        "host_role": "hello",
    }
    assert "hostname: qemu05" in ret.json()["device"]
    assert "available_architectures:" not in ret.json()["device"]
Пример #3
0
 def write(self, job, line, output=None, idx=None):
     line = yaml_load(line)[0]
     doc_ref = (self.db.collection(self.root_collection).document(
         "%02d-%02d-%02d" % (job.submit_time.year, job.submit_time.month,
                             job.submit_time.day)).collection(str(
                                 job.id)).document(line["dt"]))
     doc_ref.set({"lvl": line["lvl"], "msg": line["msg"]})
Пример #4
0
def create_metadata_store(results, job):
    """
    Uses the OrderedDict import to correctly handle
    the yaml.load
    """
    if "extra" not in results:
        return None
    level = results.get("level")
    if level is None:
        return None

    logger = logging.getLogger("lava-master")
    stub = "%s-%s-%s.yaml" % (results["definition"], results["case"], level)
    meta_filename = os.path.join(job.output_dir, "metadata", stub)
    os.makedirs(os.path.dirname(meta_filename), mode=0o755, exist_ok=True)
    if os.path.exists(meta_filename):
        with open(meta_filename, "r") as existing_store:
            data = yaml_load(existing_store)
        data.update(results["extra"])
    else:
        data = results["extra"]
    try:
        with open(meta_filename, "w") as extra_store:
            yaml.dump(data, extra_store)
    except OSError as exc:  # LAVA-847
        msg = "[%d] Unable to create metadata store: %s" % (job.id, exc)
        logger.error(msg)
        append_failure_comment(job, msg)
        return None
    return meta_filename
Пример #5
0
    def test_devicetype_get_health_check(self, monkeypatch, tmpdir):

        real_open = open
        (tmpdir / "qemu.yaml").write_text("hello", encoding="utf-8")

        def monkey_open(path, *args):
            if path == os.path.join(settings.HEALTH_CHECKS_PATH, "qemu.yaml"):
                return real_open(str(tmpdir / "qemu.yaml"), *args)
            if path == os.path.join(settings.HEALTH_CHECKS_PATH,
                                    "docker.yaml"):
                raise FileNotFoundError()
            return real_open(path, *args)

        monkeypatch.setitem(__builtins__, "open", monkey_open)

        # 1. normal case
        qemu_device_type1 = DeviceType.objects.create(name="qemu")
        data = self.hit(
            self.userclient,
            reverse("api-root", args=[self.version]) +
            "devicetypes/%s/health_check/" % qemu_device_type1.name,
        )
        data = yaml_load(data)
        assert data == str("hello")  # nosec

        # 2. Can't read the health-check
        docker_device_type1 = DeviceType.objects.create(name="docker")
        response = self.userclient.get(
            reverse("api-root", args=[self.version]) +
            "devicetypes/%s/health_check/" % docker_device_type1.name)
        assert response.status_code == 400  # nosec
Пример #6
0
    def write(self, job, line, output=None, idx=None):
        line = yaml_load(line)[0]
        dt = datetime.datetime.strptime(line["dt"], "%Y-%m-%dT%H:%M:%S.%f")
        line.update({"job_id": job.id, "dt": int(dt.timestamp() * 1000)})
        data = simplejson.dumps(line)

        requests.post("%s_doc/" % self.api_url, data=data, headers=self.headers)
Пример #7
0
    def test_devices_get_dictionary(self, monkeypatch, tmpdir):
        # invalid context
        response = self.userclient.get(
            reverse("api-root", args=[self.version]) +
            "devices/public01/dictionary/?context={{")
        assert response.status_code == 400  # nosec

        # no device dict
        monkeypatch.setattr(Device, "load_configuration",
                            (lambda self, job_ctx, output_format: None))
        response = self.userclient.get(
            reverse("api-root", args=[self.version]) +
            "devices/public01/dictionary/")
        assert response.status_code == 400  # nosec

        # success
        monkeypatch.setattr(
            Device,
            "load_configuration",
            (lambda self, job_ctx, output_format: "device: dict"),
        )
        data = self.hit(
            self.userclient,
            reverse("api-root", args=[self.version]) +
            "devices/public01/dictionary/",
        )
        data = yaml_load(data)
        assert data == {"device": "dict"}  # nosec
Пример #8
0
def test_internal_v1_jobs_test_auth_token(client, setup, mocker):
    user = User.objects.get(username="******")
    job01 = TestJob.objects.get(description="test job 01")

    write_text = mocker.Mock()
    mocker.patch("pathlib.Path.write_text", write_text)

    ret = client.get(
        reverse("lava.scheduler.internal.v1.jobs", args=[job01.id]),
        HTTP_LAVA_TOKEN=job01.token,
    )
    assert ret.status_code == 200
    job_def = yaml_load(ret.json()["definition"])

    # Token not in db.
    assert job_def["actions"][0]["deploy"]["image"]["headers"]["PRIVATE"] == "token"

    # Token in db.
    RemoteArtifactsAuth.objects.create(name="token", token="tokenvalue", user=user)
    ret = client.get(
        reverse("lava.scheduler.internal.v1.jobs", args=[job01.id]),
        HTTP_LAVA_TOKEN=job01.token,
    )
    assert ret.status_code == 200
    job_def = yaml_load(ret.json()["definition"])

    assert (
        job_def["actions"][0]["deploy"]["image"]["headers"]["PRIVATE"] == "tokenvalue"
    )

    # No headers present.
    job02 = TestJob.objects.get(description="test job 02")
    ret = client.get(
        reverse("lava.scheduler.internal.v1.jobs", args=[job02.id]),
        HTTP_LAVA_TOKEN=job02.token,
    )
    assert ret.status_code == 200

    # Int value in deploy action dict
    job01.definition = INT_VALUE_JOB_DEFINITION
    job01.save()
    ret = client.get(
        reverse("lava.scheduler.internal.v1.jobs", args=[job01.id]),
        HTTP_LAVA_TOKEN=job01.token,
    )
    assert ret.status_code == 200
    job_def = yaml_load(ret.json()["definition"])
Пример #9
0
def test_simple_mapping(tmpdir):
    device_info = {"foo": "bar"}
    add_device_container_mapping("1", device_info, "mycontainer")
    data = yaml_load(open(tmpdir / "1" / "usbmap.yaml"))

    assert data["device_info"] == device_info
    assert data["container"] == "mycontainer"
    assert data["container_type"] == "lxc"
Пример #10
0
 def test_testjob_yaml(self):
     data = self.hit(
         self.userclient,
         reverse("api-root", args=[self.version]) +
         "jobs/%s/yaml/" % self.public_testjob1.id,
     )
     data = yaml_load(data)
     assert data[0]["job"] == str(
         self.public_testjob1.id)  # nosec - unit test support
Пример #11
0
    def write(self, job, line, output=None, idx=None):
        line = yaml_load(line)[0]

        self.db.logs.insert_one({
            "job_id": job.id,
            "dt": line["dt"],
            "lvl": line["lvl"],
            "msg": line["msg"]
        })
Пример #12
0
def load_mapping_data(filename):
    try:
        with open(filename) as f:
            data = yaml_load(f) or []
        if isinstance(data, dict):
            data = [data]
        return data
    except FileNotFoundError:
        return []
Пример #13
0
 def test_decimal_yaml_dump(self):
     job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), self.user)
     test_dict = {
         "definition": "unit-test",
         "case": "unit-test",
         "measurement": decimal.Decimal(1234.5),
         "result": "pass",
     }
     test_case = map_scanned_results(test_dict, job, {}, None)
     self.assertEqual(yaml_load(test_case.metadata)["measurement"], "1234.5")
Пример #14
0
    def test_metastore(self):
        field = TestCase._meta.get_field("metadata")
        level = "1.3.5.1"
        # artificially inflate results to represent a set of kernel messages
        results = {
            "definition": "lava",
            "case": "unit-test",
            "level": level,
            # list of numbers, generates a much longer YAML string than just the count
            "extra": list(range(int(field.max_length / 2))),
            "result": "pass",
        }
        stub = "%s-%s-%s.yaml" % (results["definition"], results["case"],
                                  level)
        job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(),
                                         self.user)
        meta_filename = os.path.join(job.output_dir, "metadata", stub)
        filename = "%s/job-%s/pipeline/%s/%s-%s.yaml" % (
            job.output_dir,
            job.id,
            level.split(".")[0],
            level,
            results["definition"],
        )

        mkdir(os.path.dirname(filename))
        if os.path.exists(meta_filename):
            # isolate from other unit tests
            os.unlink(meta_filename)
        self.assertEqual(meta_filename, create_metadata_store(results, job))
        ret = map_scanned_results(results, job, {}, meta_filename)
        self.assertIsNotNone(ret)
        ret.save()
        self.assertEqual(TestCase.objects.filter(name="unit-test").count(), 1)
        test_data = yaml_load(
            TestCase.objects.filter(name="unit-test")[0].metadata)
        self.assertEqual(test_data["extra"], meta_filename)
        self.assertTrue(os.path.exists(meta_filename))
        with open(test_data["extra"], "r") as extra_file:
            data = yaml_load(extra_file)
        self.assertIsNotNone(data)
        os.unlink(meta_filename)
        shutil.rmtree(job.output_dir)
Пример #15
0
def export_testcase(testcase, with_buglinks=False):
    """
    Returns string versions of selected elements of a TestCase
    Unicode causes issues with CSV and can complicate YAML parsing
    with non-python parsers.
    :param testcase: list of TestCase objects
    :return: Dictionary containing relevant information formatted for export
    """
    metadata = dict(
        testcase.action_metadata) if testcase.action_metadata else {}
    extra_source = []
    extra_data = metadata.get("extra")
    if isinstance(extra_data, str) and os.path.exists(extra_data):
        with open(metadata["extra"], "r") as extra_file:
            # TODO: this can fail!
            items = yaml_load(extra_file)
        # hide the !!python OrderedDict prefix from the output.
        for key, value in items.items():
            extra_source.append({key: value})
        metadata["extra"] = extra_source
    casedict = {
        "name":
        str(testcase.name),
        "job":
        str(testcase.suite.job_id),
        "suite":
        str(testcase.suite.name),
        "result":
        str(testcase.result_code),
        "measurement":
        str(testcase.measurement),
        "unit":
        str(testcase.units),
        "level":
        metadata.get("level", ""),
        "url":
        str(testcase.get_absolute_url()),
        "id":
        str(testcase.id),
        "logged":
        str(testcase.logged),
        "log_start_line":
        str(testcase.start_log_line) if testcase.start_log_line else "",
        "log_end_line":
        str(testcase.end_log_line) if testcase.end_log_line else "",
        "metadata":
        metadata,
    }
    if with_buglinks:
        casedict["buglinks"] = [
            str(url) for url in testcase.buglinks.values_list("url", flat=True)
        ]

    return casedict
Пример #16
0
 def test_testjob_suite_yaml(self):
     data = self.hit(
         self.userclient,
         reverse("api-root", args=[self.version]) +
         "jobs/%s/suites/%s/yaml/" %
         (self.public_testjob1.id,
          self.public_testjob1.testsuite_set.first().id),
     )
     data = yaml_load(data)
     assert (
         data[0]["suite"] == self.public_testjob1.testsuite_set.first().name
     )  # nosec - unit test support
Пример #17
0
 def check(logger, lvl, lvlno, msg=None, mock_calls=1):
     assert len(logger._log.mock_calls) == mock_calls
     if mock_calls == 0:
         return
     assert logger._log.mock_calls[0][1][0] == lvlno
     data = yaml_load(logger._log.mock_calls[0][1][1])
     assert list(data.keys()) == ["dt", "lvl", "msg"]
     assert data["lvl"] == lvl
     if msg is None:
         assert data["msg"] == f"an {lvl}"
     else:
         assert data["msg"] == msg
Пример #18
0
def test_mongo_logs(mocker):
    mocker.patch("pymongo.database.Database.command")
    mocker.patch("pymongo.collection.Collection.create_index")
    logs_mongo = LogsMongo()

    job = mocker.Mock()
    job.id = 1

    insert_one = mocker.MagicMock()
    find = mocker.MagicMock()
    find_ret_val = [
        {
            "dt": "2020-03-25T19:44:36.209548",
            "lvl": "info",
            "msg": "first message"
        },
        {
            "dt": "2020-03-26T19:44:36.209548",
            "lvl": "info",
            "msg": "second message"
        },
    ]
    find.return_value = find_ret_val

    mocker.patch("pymongo.collection.Collection.find", find)
    mocker.patch("pymongo.collection.Collection.insert_one", insert_one)

    logs_mongo.write(
        job,
        '- {"dt": "2020-03-25T19:44:36.209548", "lvl": "info", "msg": "lava-dispatcher, installed at version: 2020.02"}',
    )
    insert_one.assert_called_with({
        "job_id":
        1,
        "dt":
        "2020-03-25T19:44:36.209548",
        "lvl":
        "info",
        "msg":
        "lava-dispatcher, installed at version: 2020.02",
    })  # nosec
    result = yaml_load(logs_mongo.read(job))

    assert len(result) == 2  # nosec
    assert result == find_ret_val  # nosec
    # size of find_ret_val in bytes
    assert logs_mongo.size(job) == 137  # nosec

    assert logs_mongo.open(job).read() == yaml_dump(find_ret_val).encode(
        "utf-8")
Пример #19
0
    def _get_docs(self, job, start=0, end=None):

        if not end:
            end = self.MAX_RESULTS

        limit = end - start
        if limit < 0:
            return []

        params = {
            "query": {
                "match": {
                    "job_id": job.id
                }
            },
            "from": start,
            "size": limit,
            "sort": [{
                "dt": {
                    "order": "asc"
                }
            }],
        }

        response = requests.get(
            "%s_search/" % self.api_url,
            data=simplejson.dumps(params),
            headers=self.headers,
        )

        response = simplejson.loads(response.text)
        if not "hits" in response:
            return []
        result = []
        for res in response["hits"]["hits"]:
            doc = res["_source"]
            doc.update({
                "dt":
                datetime.datetime.fromtimestamp(doc["dt"] /
                                                1000.0).isoformat()
            })
            if doc["lvl"] == "results":
                doc.update({"msg": yaml_load(doc["msg"])})
            result.append(doc)
        return result
Пример #20
0
def test_wait_for_board_id_is_optional(factory):
    action = DockerTestAction()
    action.job = Job("1234", {}, None)
    rendered, _ = factory.create_device("hi6220-hikey-r2-01.jinja2")
    action.job.device = NewDevice(yaml_load(rendered))
    action.job.timeout = Timeout("blah")
    action.level = 1
    action.populate(
        {
            "namespace": "common",
            "docker": {"image": "foobar", "wait": {"device": False}},
        }
    )
    assert not any(
        [a for a in action.pipeline.actions if a.name == "wait-device-boardid"]
    )

    docker_test_shell = action.pipeline.actions[-2]
    assert not docker_test_shell.wait_for_device
Пример #21
0
    def test_workers_get_config(self, monkeypatch, tmpdir):
        (tmpdir / self.worker1.hostname).mkdir()
        (tmpdir / self.worker1.hostname / "dispatcher.yaml").write_text(
            "hello world", encoding="utf-8")

        class MyPath(pathlib.PosixPath):
            def __new__(cls, path, *args, **kwargs):
                if (path == "%s/worker1/dispatcher.yaml" %
                        settings.DISPATCHER_CONFIG_PATH):
                    return super().__new__(
                        cls, str(tmpdir / "worker1" / "dispatcher.yaml"),
                        *args, **kwargs)
                elif path == "%s/worker1.yaml" % settings.GLOBAL_SETTINGS_PATH:
                    return super().__new__(cls, str(tmpdir / "worker1.yaml"),
                                           *args, **kwargs)
                else:
                    assert 0  # nosec

        monkeypatch.setattr(pathlib, "Path", MyPath)
        data = self.hit(
            self.userclient,
            reverse("api-root", args=[self.version]) +
            "workers/%s/config/" % self.worker1.hostname,
        )
        data = yaml_load(data)
        assert data == str("hello world")  # nosec

        # worker does not exists
        response = self.userclient.get(
            reverse("api-root", args=[self.version]) +
            "workers/invalid_hostname/config/")
        assert response.status_code == 404  # nosec

        # no configuration file
        (tmpdir / self.worker1.hostname / "dispatcher.yaml").remove()
        (tmpdir / self.worker1.hostname).remove()
        response = self.userclient.get(
            reverse("api-root", args=[self.version]) +
            "workers/%s/config/" % self.worker1.hostname)
        assert response.status_code == 400  # nosec
Пример #22
0
def parse_job_description(job):
    filename = os.path.join(job.output_dir, "description.yaml")
    logger = logging.getLogger("lava-master")
    try:
        with open(filename, "r") as f_describe:
            description = f_describe.read()
        pipeline = yaml_load(description)
    except (OSError, yaml.YAMLError):
        logger.error("'Unable to open and parse '%s'", filename)
        return

    if not map_metadata(description, job):
        logger.warning("[%d] unable to map metadata", job.id)

    # add the compatibility result from the master to the definition for comparison on the slave.
    try:
        compat = int(pipeline["compatibility"])
    except (TypeError, ValueError):
        compat = pipeline["compatibility"] if pipeline is not None else None
        logger.error("[%d] Unable to parse job compatibility: %s", job.id, compat)
        compat = 0
    job.pipeline_compatibility = compat
    job.save(update_fields=["pipeline_compatibility"])
Пример #23
0
def test_mapping_for_new_container_overrides_previous_mapping(tmpdir):
    add_device_container_mapping(
        "1",
        {
            "serial_number": "1234567890",
            "vendor_id": None,
            "product_id": None,
            "fs_label": None,
        },
        "mycontainer1",
    )
    add_device_container_mapping(
        "1",
        {
            "serial_number": "1234567890",
            "vendor_id": None,
            "product_id": None,
            "fs_label": None,
        },
        "mycontainer2",
    )
    data = yaml_load(open(tmpdir / "1" / "usbmap.yaml"))
    assert len(data) == 1
    assert data[0]["container"] == "mycontainer2"
Пример #24
0
 def make_job_data(self, actions=None, **kw):
     sample_job_file = os.path.join(os.path.dirname(__file__), "qemu.yaml")
     with open(sample_job_file, "r") as test_support:
         data = yaml_load(test_support)
     data.update(kw)
     return data
Пример #25
0
    def logging_socket(self):
        msg = self.log_socket.recv_multipart()
        try:
            (job_id, message) = (u(m) for m in msg)
        except UnicodeDecodeError:
            self.logger.error("[POLL] Invalid log message: can't be decoded")
            return
        except ValueError:
            # do not let a bad message stop the master.
            self.logger.error(
                "[POLL] failed to parse log message, skipping: %s", msg)
            return

        try:
            scanned = yaml_load(message)
        except yaml.YAMLError:
            self.logger.error("[%s] data are not valid YAML, dropping", job_id)
            return

        # Look for "results" level
        try:
            message_lvl = scanned["lvl"]
            message_msg = scanned["msg"]
        except TypeError:
            self.logger.error("[%s] not a dictionary, dropping", job_id)
            return
        except KeyError:
            self.logger.error(
                '[%s] invalid log line, missing "lvl" or "msg" keys: %s',
                job_id,
                message,
            )
            return

        # Find the handler (if available)
        if job_id not in self.jobs:
            # Query the database for the job
            try:
                job = TestJob.objects.get(id=job_id)
            except TestJob.DoesNotExist:
                self.logger.error("[%s] unknown job id", job_id)
                return

            self.logger.info("[%s] receiving logs from a new job", job_id)
            # Create the sub directories (if needed)
            mkdir(job.output_dir)
            self.jobs[job_id] = JobHandler(job)

        # For 'event', send an event and log as 'debug'
        if message_lvl == "event":
            self.logger.debug("[%s] event: %s", job_id, message_msg)
            send_event(".event", "lavaserver", {
                "message": message_msg,
                "job": job_id
            })
            message_lvl = "debug"
        # For 'marker', save in the database and log as 'debug'
        elif message_lvl == "marker":
            # TODO: save on the file system in case of lava-logs restart
            m_type = message_msg.get("type")
            case = message_msg.get("case")
            if m_type is None or case is None:
                self.logger.error("[%s] invalid marker: %s", job_id,
                                  message_msg)
                return
            # This is in fact the previous line
            line = self.jobs[job_id].line_count() - 1
            self.jobs[job_id].markers.setdefault(case, {})[m_type] = line
            return

        # Mark the file handler as used
        self.jobs[job_id].last_usage = time.time()
        # The format is a list of dictionaries
        self.jobs[job_id].write("- %s" % message)

        if message_lvl == "results":
            try:
                job = TestJob.objects.get(pk=job_id)
            except TestJob.DoesNotExist:
                self.logger.error("[%s] unknown job id", job_id)
                return
            meta_filename = create_metadata_store(message_msg, job)
            new_test_case = map_scanned_results(
                results=message_msg,
                job=job,
                markers=self.jobs[job_id].markers,
                meta_filename=meta_filename,
            )

            if new_test_case is None:
                self.logger.warning("[%s] unable to map scanned results: %s",
                                    job_id, message)
            else:
                self.test_cases.append(new_test_case)

            # Look for lava.job result
            if (message_msg.get("definition") == "lava"
                    and message_msg.get("case") == "job"):
                # Flush cached test cases
                self.flush_test_cases()

                if message_msg.get("result") == "pass":
                    health = TestJob.HEALTH_COMPLETE
                    health_msg = "Complete"
                else:
                    health = TestJob.HEALTH_INCOMPLETE
                    health_msg = "Incomplete"
                self.logger.info("[%s] job status: %s", job_id, health_msg)

                infrastructure_error = message_msg.get("error_type") in [
                    "Bug",
                    "Configuration",
                    "Infrastructure",
                ]
                if infrastructure_error:
                    self.logger.info("[%s] Infrastructure error", job_id)

                # Update status.
                with transaction.atomic():
                    # TODO: find a way to lock actual_device
                    job = TestJob.objects.select_for_update().get(id=job_id)
                    job.go_state_finished(health, infrastructure_error)
                    job.save()
Пример #26
0
     job = get_restricted_job(request.user, pk=job, request=request)
 if not pk:
     test_suite = case.suite
 else:
     test_suite = get_object_or_404(TestSuite, name=pk, job=job)
 if test_sets:
     # No test case was found.
     test_sets = test_sets.filter(suite=test_suite)
     test_cases = TestCase.objects.none()
 else:
     test_cases = TestCase.objects.filter(name=case.name, suite=test_suite)
 extra_source = {}
 logger = logging.getLogger("lava-master")
 for extra_case in test_cases:
     try:
         f_metadata = yaml_load(extra_case.metadata)
         if not f_metadata:
             continue
     except TypeError:
         logger.info("Unable to load extra case metadata for %s",
                     extra_case)
         continue
     extra_data = f_metadata.get("extra")
     try:
         if extra_data and os.path.exists(extra_data):
             with open(f_metadata["extra"], "r") as extra_file:
                 items = yaml_load(extra_file)
             # hide the !!python OrderedDict prefix from the output.
             for key, value in items.items():
                 extra_source.setdefault(extra_case.id, "")
                 extra_source[extra_case.id] += "%s: %s\n" % (key, value)
Пример #27
0
def map_metadata(description, job):
    """
    Generate metadata from the combination of the pipeline definition
    file (after any parsing for protocols) and the pipeline description
    into static metadata (TestData) related to this specific job
    The description itself remains outside the database - it will need
    to be made available as a download link.
    :param description: the pipeline description output
    :param job: the TestJob to associate
    :return: True on success, False on error
    """
    logger = logging.getLogger("lava-master")
    try:
        submission_data = yaml_safe_load(job.definition)
        description_data = yaml_load(description)
    except yaml.YAMLError as exc:
        logger.exception("[%s] %s", job.id, exc)
        return False
    try:
        testdata, created = TestData.objects.get_or_create(testjob=job)
    except MultipleObjectsReturned:
        # only happens for small number of jobs affected by original bug.
        logger.info("[%s] skipping alteration of duplicated TestData", job.id)
        return False
    if not created:
        # prevent updates of existing TestData
        logger.debug("[%s] skipping alteration of existing TestData", job.id)
        return False

    # get job-action metadata
    if description is None:
        logger.warning("[%s] skipping empty description", job.id)
        return False
    if not description_data:
        logger.warning("[%s] skipping invalid description data", job.id)
        return False
    if "job" not in description_data:
        logger.warning("[%s] skipping description without a job.", job.id)
        return False
    action_values = _get_action_metadata(description_data["job"]["actions"])
    for key, value in action_values.items():
        if not key or not value:
            logger.warning("[%s] Missing element in job. %s: %s", job.id, key,
                           value)
            continue
        testdata.attributes.create(name=key, value=value)

    # get common job metadata
    job_metadata = _get_job_metadata(job)
    for key, value in job_metadata.items():
        testdata.attributes.create(name=key, value=value)

    # get metadata from device
    device_values = {}
    device_values["target.device_type"] = job.requested_device_type
    for key, value in device_values.items():
        if not key or not value:
            logger.warning("[%s] Missing element in device. %s: %s", job.id,
                           key, value)
            continue
        testdata.attributes.create(name=key, value=value)

    # Add metadata from job submission data.
    if "metadata" in submission_data:
        for key in submission_data["metadata"]:
            value = submission_data["metadata"][key]
            if not key or not value:
                logger.warning("[%s] Missing element in job. %s: %s", job.id,
                               key, value)
                continue
            testdata.attributes.create(name=key, value=value)

    walk_actions(description_data["pipeline"], testdata, submission_data)
    return True
Пример #28
0
def test_elasticsearch_logs(mocker, logs_elasticsearch):
    job = mocker.Mock()
    job.id = 1

    post = mocker.MagicMock()
    get = mocker.MagicMock()
    get_ret_val = mocker.Mock()

    # Test with empty object first.
    get_ret_val.text = "{}"
    get.return_value = get_ret_val
    mocker.patch("requests.get", get)
    result = logs_elasticsearch.read(job)
    assert result == ""

    # Normal test.
    get_ret_val.text = '{"hits":{"hits":[{"_source":{"dt": 1585165476209, "lvl": "info", "msg": "first message"}}, {"_source":{"dt": 1585165476210, "lvl": "info", "msg": "second message"}}]}}'
    get.return_value = get_ret_val

    mocker.patch("requests.get", get)
    mocker.patch("requests.post", post)

    line = '- {"dt": "2020-03-25T19:44:36.209", "lvl": "info", "msg": "lava-dispatcher, installed at version: 2020.02"}'
    logs_elasticsearch.write(job, line)
    post.assert_called_with(
        "%s%s/_doc/" %
        (settings.ELASTICSEARCH_URI, settings.ELASTICSEARCH_INDEX),
        data=
        '{"dt": 1585165476209, "lvl": "info", "msg": "lava-dispatcher, installed at version: 2020.02", "job_id": 1}',
        headers={"Content-type": "application/json"},
    )  # nosec
    result = yaml_load(logs_elasticsearch.read(job))

    assert len(result) == 2  # nosec
    assert result == [
        {
            "dt": "2020-03-25T19:44:36.209000",
            "lvl": "info",
            "msg": "first message"
        },
        {
            "dt": "2020-03-25T19:44:36.210000",
            "lvl": "info",
            "msg": "second message"
        },
    ]  # nosec
    # size of get_ret_val in bytes
    assert logs_elasticsearch.size(job) == 137  # nosec

    assert (logs_elasticsearch.open(job).read() == yaml_dump([
        {
            "dt": "2020-03-25T19:44:36.209000",
            "lvl": "info",
            "msg": "first message",
        },
        {
            "dt": "2020-03-25T19:44:36.210000",
            "lvl": "info",
            "msg": "second message",
        },
    ]).encode("utf-8"))
Пример #29
0
def find_mapping(options):
    for mapping in glob.glob(get_mapping_path("*")):
        data = yaml_load(open(mapping))
        if match_mapping(data["device_info"], options):
            return data
    return None