Example #1
0
    def _mkreport_junit(self, _tc, kws, header, output, tag_info,
                        reproduction):

        for hook in self.junit_hooks:
            hook(self, _tc, kws, output)
        jtc = junit_xml.TestCase(self.junit_name % kws,
                                 classname=self.junit_classname % kws,
                                 elapsed_sec=123.456,
                                 stdout=header + tag_info + reproduction,
                                 stderr=None)

        # FIXME: nail down the exception
        # <error/failure/blockage/skipped/or cause to put that only in
        # the messagee and let's put the whole output always in
        # stdout, with the rest of the info on stderr
        msg_tag = kws['msg_tag']
        if msg_tag == "FAIL":
            jtc.add_failure_info(message="Failed", output=output)
        elif msg_tag == "ERRR":
            jtc.add_error_info(message="Error", output=output)
        elif msg_tag == "BLCK":
            jtc.add_error_info(message="Infrastructure", output=output)
        elif msg_tag == "SKIP":
            if self.junit_report_skip:
                jtc.add_skipped_info(message="Skipped", output=output)
            else:
                jtc.add_skipped_info(message="Skipped")
                jtc.stdout = None
                jtc.stderr = None
        elif msg_tag == "PASS":
            if self.junit_report_pass:
                jtc.stderr = output
            elif self.junit_report_pass == None:
                # we don  want *anything*
                jtc.stderr = None
                jtc.stdout = None
            else:  # False
                jtc.stderr = "<inclusion of output disabled by " \
                             "configuration setting of " \
                             "tcfl.report.junit_report_pass>"
                jtc.stdout = "<inclusion of output disabled by " \
                             "configuration setting of " \
                             "tcfl.report.junit_report_pass>"

        # Write the JUNIT to a pickle file, as we'll join it later
        # with the rest in _finalize. We can't put it in a
        # global because this testcase might be running in a separate
        # thread or process.  later == when the global testcase
        # reporter (tcfl.tc.tc_global) emits a COMPLETION message,
        # then we call _finalize()
        domain = commonl.file_name_make_safe(self.junit_domain % kws)
        # use the core keywords, so it is not modified
        tc_hash = _tc.kws['tc_hash']
        # Note we store it in the common
        pathname = os.path.join(tcfl.tc.tc_c.tmpdir, "junit", domain)
        commonl.makedirs_p(pathname)
        with open(os.path.join(pathname, tc_hash + ".pickle"), "w") as picklef:
            cPickle.dump(jtc, picklef, protocol=2)
def console_rx_poller(expecter, target, console=None):
    """
    Poll a console
    """
    # Figure out to which file we are writing
    console_id_name, console_code = console_mk_code(target, console)
    of = expecter.buffers.setdefault(
        console_code,
        open(
            os.path.join(
                target.testcase.buffersdir,
                "console-%s:%s-%s.log" % (commonl.file_name_make_safe(
                    target.fullid), target.kws['tc_hash'], console_id_name)),
            "a+", 0))
    ofd = of.fileno()
    expecter.buffers.setdefault(console_code + "-ts0", time.time())

    # Don't read too much, leave the rest for another run -- otherwise
    # we could spend our time trying to read a 1G console log file
    # from a really broken test case that spews a lot of stuff.
    # FIXME: move this to configuration
    max_size = 3000

    # Read anything new since the last time we read -- this relies
    # on we having an exclusive lock on the target
    try:
        offset = os.fstat(ofd).st_size
        ts_start = time.time()
        target.report_info(
            "reading from console %s @%d at %.2fs [%s]" %
            (console_id_name, offset, ts_start - expecter.ts0, of.name),
            dlevel=3)
        # We are dealing with a file as our buffering and accounting
        # system, so because read_to_fd() is bypassing caching, flush
        # first and sync after the read.
        of.flush()
        total_bytes = target.rtb.rest_tb_target_console_read_to_fd(
            ofd, target.rt, console, offset, max_size, target.ticket)
        of.flush()
        os.fsync(ofd)
        ts_end = time.time()
        target.report_info("read from console %s @%d %dB at %.2fs (%.2fs) "
                           "[%s]" %
                           (console_id_name, offset, total_bytes,
                            ts_end - expecter.ts0, ts_end - ts_start, of.name),
                           dlevel=3)
        # FIXME: do we want to print some debug of what we read? how
        # do we do it for huge files anyway?
        expecter._consoles.add((target, console))

    except requests.exceptions.HTTPError as e:
        raise tc.blocked_e(
            "error reading console %s: %s\n" % (console_id_name, e),
            {"error trace": traceback.format_exc()})
    # Don't count this as something that we need to treat as succesful
    return None
Example #3
0
def console_rx_poller(expecter, target, console = None):
    """
    Poll a console
    """
    # Figure out to which file we are writing
    console_id_name, console_code = console_mk_code(target, console)
    of = expecter.buffers.setdefault(
        console_code,
        open(os.path.join(target.testcase.buffersdir,
                          "console-%s:%s-%s.log" % (
                              commonl.file_name_make_safe(target.fullid),
                              target.kws['tc_hash'], console_id_name)),
             "a+", 0))
    ofd = of.fileno()
    expecter.buffers.setdefault(console_code + "-ts0", time.time())

    # Don't read too much, leave the rest for another run -- otherwise
    # we could spend our time trying to read a 1G console log file
    # from a really broken test case that spews a lot of stuff.
    # FIXME: move this to configuration
    max_size = 3000

    # Read anything new since the last time we read -- this relies
    # on we having an exclusive lock on the target
    try:
        offset = os.fstat(ofd).st_size
        ts_start = time.time()
        target.report_info("reading from console %s @%d at %.2fs [%s]"
                           % (console_id_name, offset,
                              ts_start - expecter.ts0, of.name),
                           dlevel = 3)
        # We are dealing with a file as our buffering and accounting
        # system, so because read_to_fd() is bypassing caching, flush
        # first and sync after the read.
        of.flush()
        total_bytes = target.rtb.rest_tb_target_console_read_to_fd(
            ofd, target.rt, console, offset, max_size, target.ticket)
        of.flush()
        os.fsync(ofd)
        ts_end = time.time()
        target.report_info("read from console %s @%d %dB at %.2fs (%.2fs) "
                           "[%s]"
                           % (console_id_name, offset, total_bytes,
                              ts_end - expecter.ts0, ts_end - ts_start,
                              of.name),
                           dlevel = 3)
        # FIXME: do we want to print some debug of what we read? how
        # do we do it for huge files anyway?
        expecter._consoles.add(( target, console ))

    except requests.exceptions.HTTPError as e:
        raise tc.blocked_e("error reading console %s: %s\n"
                           % (console_id_name, e),
                           { "error trace": traceback.format_exc() })
    # Don't count this as something that we need to treat as succesful
    return None
Example #4
0
    def __init__(self,
                 state_path,
                 url,
                 ignore_ssl=False,
                 aka=None,
                 ca_path=None):
        """Create a proxy for a target broker, optionally loading state
        (like cookies) previously saved.

        :param str state_path: Path prefix where to load state from
        :param str url: URL for which we are loading state
        :param bool ignore_ssl: Ignore server's SSL certificate
           validation (use for self-signed certs).
        :param str aka: Short name for this server; defaults to the
           hostname (sans domain) of the URL.
        :param str ca_path: Path to SSL certificate or chain-of-trust bundle
        :returns: True if information was loaded for the URL, False otherwise
        """
        self._url = url
        self._base_url = None
        self.cookies = {}
        self.valid_session = None
        if ignore_ssl == True:
            self.verify_ssl = False
        elif ca_path:
            self.verify_ssl = ca_path
        else:
            self.verify_ssl = True
        self.lock = threading.Lock()
        self.parsed_url = urlparse.urlparse(url)
        if aka == None:
            self.aka = self.parsed_url.hostname.split('.')[0]
        else:
            assert isinstance(aka, basestring)
            self.aka = aka
        # Load state
        url_safe = commonl.file_name_make_safe(url)
        file_name = state_path + "/cookies-%s.pickle" % url_safe
        try:
            with open(file_name, "r") as f:
                self.cookies = cPickle.load(f)
            logger.info("%s: loaded state", file_name)
        except cPickle.UnpicklingError as e:  #invalid state, clean file
            os.remove(file_name)
        except IOError as e:
            if e.errno != errno.ENOENT:
                raise e
            else:
                logger.debug("%s: no state-file, will not load", file_name)
Example #5
0
def rest_tb_target_images_upload(rtb, _images):  # COMPAT
    """
    Upload images from a list images

    :param dict rtb: Remote Target Broker
    :param _images: list of images, which can be specified as:

      - string with ``"IMAGE1:FILE1 IMAGE2:FILE2..."``
      - list or set of strings ``["IMAGE1:FILE1", "IMAGE2:FILE2", ...]``
      - list or set of tuples ``[("IMAGE1", "FILE1"), ("IMAGE2", "FILE2"), ...]``

    :returns: list of remote images (that can be fed straight to
      :meth:`tcfl.ttb_client.rest_target_broker.rest_tb_target_images_set`)

    """
    images = []
    if isinstance(_images, basestring):
        for image_spec in _images:
            try:
                t, f = image_spec.split(":", 1)
                images.append((t, f))
            except ValueError as _e:
                raise ValueError("Bad image specification `%s` "
                                 "(expecting TYPE:FILE)" % image_spec)
    elif isinstance(_images, set) or isinstance(_images, list):
        for image_spec in _images:
            if isinstance(image_spec, basestring):
                t, f = image_spec.split(":", 1)
                images.append((t, f))
            elif isinstance(image_spec, tuple) and len(image_spec) == 2:
                images.append(image_spec)
            else:
                raise TypeError("Invalid image specification %s" % image_spec)
    else:
        raise TypeError("_images is type %s" % type(_images).__name__)

    remote_images = {}
    for image_type, local_filename in images:
        logger.info("%s: uploading %s", rtb._url, local_filename)
        digest = commonl.hash_file(hashlib.sha256(), local_filename)\
                        .hexdigest()[:10]
        remote_filename = commonl.file_name_make_safe(
            os.path.abspath(local_filename)) + "-" + digest
        rtb.rest_tb_file_upload(remote_filename, local_filename)
        remote_images[image_type] = remote_filename

    return remote_images
Example #6
0
    def tb_state_save(self, filepath):
        """Save cookies in *path* so they can be loaded by when the object is
        created.

        :param path: Filename where to save to
        :type path: str

        """
        url_safe = commonl.file_name_make_safe(self._url)
        if not os.path.isdir(filepath):
            logger.warning("%s: created state storage directory", filepath)
            os.mkdir(filepath)
        fname = filepath + "/cookies-%s.pickle" % url_safe
        with os.fdopen(os.open(fname, os.O_CREAT | os.O_WRONLY, 0o600),
                       "w") as f, \
                self.lock:
            cPickle.dump(self.cookies, f, protocol=2)
            logger.debug("%s: state saved %s", self._url,
                         pprint.pformat(self.cookies))
Example #7
0
    def _finalize_junit_domain(self, _tc, domain):
        # Find all the junit-*.pickle files dropped by _mkreport()
        # above and collect them into a testsuite, writing an XML in
        # the CWD called junit.xml.

        reports = []
        domain_path_glob = os.path.join(tcfl.tc.tc_c.tmpdir, "junit", domain,
                                        "*.pickle")
        for filename in glob.glob(domain_path_glob):
            with open(filename) as f:
                jtc = cPickle.load(f)
                reports.append(jtc)

        ts = junit_xml.TestSuite(
            self.junit_suite_name % _tc.kws,
            reports,
            # I'd like:
            # hostname = _tc.kws['target_group_info'],
            # but it can't, because each TC in the suite is run in a
            # different target group. Maybe at some point TestCase
            # will support hostname?
            hostname=None,
            id=_tc.kws['runid'],
            package=self.junit_suite_package % _tc.kws,
            timestamp=time.time(),
            properties=self.junit_suite_properties,  # Dictionary
        )
        del reports

        if domain == "default":
            junit_filename = "junit.xml"
        else:
            junit_filename = commonl.file_name_make_safe("junit-%s.xml" %
                                                         domain,
                                                         extra_chars="")
        with codecs.open(junit_filename,
                         'w',
                         encoding='utf-8',
                         errors='ignore') as f:
            junit_xml.TestSuite.to_file(f, [ts], prettyprint=True)
Example #8
0
    def __init__(self,
                 state_path,
                 url,
                 ignore_ssl=False,
                 aka=None,
                 ca_path=None):
        """Create a proxy for a target broker, optionally loading state
        (like cookies) previously saved.

        :param str state_path: Path prefix where to load state from
        :param str url: URL for which we are loading state
        :param bool ignore_ssl: Ignore server's SSL certificate
           validation (use for self-signed certs).
        :param str aka: Short name for this server; defaults to the
           hostname (sans domain) of the URL.
        :param str ca_path: Path to SSL certificate or chain-of-trust bundle
        :returns: True if information was loaded for the URL, False otherwise
        """
        self._url = url
        self._base_url = None
        self.cookies = {}
        self.valid_session = None
        if ignore_ssl == True:
            self.verify_ssl = False
        elif ca_path:
            self.verify_ssl = ca_path
        else:
            self.verify_ssl = True
        self.lock = threading.Lock()
        self.parsed_url = urlparse.urlparse(url)
        if aka == None:
            self.aka = self.parsed_url.hostname.split('.')[0]
        else:
            assert isinstance(aka, basestring)
            self.aka = aka
        # Load state
        url_safe = commonl.file_name_make_safe(url)
        file_name = state_path + "/cookies-%s.pickle" % url_safe
        try:
            with open(file_name, "r") as f:
                self.cookies = cPickle.load(f)
            logger.info("%s: loaded state", file_name)
        except cPickle.UnpicklingError as e:  #invalid state, clean file
            os.remove(file_name)
        except IOError as e:
            if e.errno != errno.ENOENT:
                raise e
            else:
                logger.debug("%s: no state-file, will not load", file_name)
        #: Version of the code ran by the server; filled up when we do
        #: the first target list, from the target local, metadata
        #: versions.server; done in _rts_get() _> _rt_list_to_dict()
        #:
        #: Used to adjust backwards compat for short periods of time
        self.server_version = None

        #: Major version of the server code
        self.server_version_major = None
        #: Minor version of the server code
        self.server_version_minor = None
        self.server_version_pl = 0
        #: Changes since the major/minor version tag
        self.server_version_changes = None
        #: Commit ID of the server code
        self.server_version_commit = None

        #: Server can take all arguments JSON encoded
        #:
        #: Previous servers woul donly take as JSON things are are not
        #: strings, numbers, bools. Newer can do both, since this
        #: allows the server to also be used from curl command line
        #: with much ado.
        #:
        #: This allows us to transition the code without major changes
        #: to the code or hard dependencies.
        self.server_json_capable = True
Example #9
0
    def flash(self, images, upload=True):
        """Flash images onto target

        >>> target.images.flash({
        >>>         "kernel-86": "/tmp/file.bin",
        >>>         "kernel-arc": "/tmp/file2.bin"
        >>>     }, upload = True)

        or:

        >>> target.images.flash({
        >>>         "vmlinuz": "/tmp/vmlinuz",
        >>>         "initrd": "/tmp/initrd"
        >>>     }, upload = True)

        If *upload* is set to true, this function will first upload
        the images to the server and then flash them.

        :param dict images: dictionary keyed by (str) image type of
          things to flash in the target. e.g.:

          The types if images supported are determined by the target's
          configuration and can be reported with :meth:`list` (or
          command line *tcf images-list TARGETNAME*).

        :param bool upload: (optional) the image names are local files
          that need to be uploaded first to the server (this function
          will take care of that).

        """
        if isinstance(images, dict):
            for k, v in images.items():
                assert isinstance(k, basestring) \
                    and isinstance(v, basestring), \
                    "images has to be a dictionary IMAGETYPE:IMAGEFILE;" \
                    " all strings; %s, %s (%s, %s)" \
                    % (k, v, type(k), type(v))
        else:
            raise AssertionError(
                "images has to be a dictionary IMAGETYPE:IMAGEFILE; got %s" \
                % type(images))
        if self.compat:
            raise RuntimeError("target does not support new images"
                               " interface, use set() or upload_set()")

        target = self.target
        images_str = " ".join("%s:%s" % (k, v) for k, v in images.items())

        # if we have to upload them, then we'll transform the names to
        # point to the names we got when uploading
        if upload:
            # Ok, we need to upload--the names in the dictionary point
            # to local filenames relative to the dir where we are
            # from, or absolute. Upload them to the server file space
            # for the user and give them a local name in there.
            _images = {}
            target.report_info("uploading: " + images_str, dlevel=2)
            for img_type, img_name in images.iteritems():
                # the remote name will be NAME-DIGEST, so if multiple
                # testcases for the same user are uploading files with
                # the same name but different context, they don't
                # collide
                digest = commonl.hash_file(hashlib.sha256(), img_name)
                img_name_remote = \
                    commonl.file_name_make_safe(os.path.abspath(img_name)) \
                    + "-" + digest.hexdigest()[:10]
                target.rtb.rest_tb_file_upload(img_name_remote, img_name)
                _images[img_type] = img_name_remote
                target.report_info("uploaded: " + images_str, dlevel=1)
        else:
            _images = images

        # We don't do retries here, we leave it to the server
        target.report_info("flashing: " + images_str, dlevel=2)
        target.ttbd_iface_call("images", "flash", images=_images)
        target.report_info("flashed:" + images_str, dlevel=1)
Example #10
0
    def _list(cls, dirname, native_bindir, java_home, java_version):
        # List testsuites' testcases with jtreg

        if jtreg_dir == None:
            raise tcfl.tc.blocked_e(
                "please export env JTREG_DIR to point to the path "
                "of the built JTreg package; see documentation")

        # expand .. /  ... etc
        _dirname = os.path.realpath(dirname)
        logging.error("WIP: scanning test suite %s, will take a few secs",
                      dirname)
        if native_bindir:
            # if we have bindirs with native code built, we have put
            # them in /opt/SOMETHING; do the string so we can run
            # those testcases
            native_cmdline = "-exclude:%s/exclude.list" % native_bindir
        else:
            native_cmdline = ""
        _env = dict(os.environ)
        _env["JAVA_HOME"] = java_home
        _env["PATH"] = java_home + "/bin:" + _env.get("PATH", "")
        # we have set JAVA_HOME and the PATH to *that* JAVA_HOME,
        # so `java`  shall run that version period the end.
        # We need to specify a JTWork directory that is specific to
        # the version of java we are running, so when we are executing
        # things from different versions at the same time they do not
        # conflict with each other
        jtwork_dir = os.path.join(
            cls.tmpdir, commonl.file_name_make_safe(dirname),
            "JTWork-%s" % commonl.file_name_make_safe(java_version))
        output = subprocess.check_output(
            "java -jar %s/lib/jtreg.jar %s -l -w:'%s' '%s'" %
            (jtreg_dir, native_cmdline, jtwork_dir, dirname),
            shell=True,
            env=_env,
            stderr=subprocess.STDOUT)
        # So that thing prints
        #
        ## $ java -jar %s/lib/jtreg.jar -l PATH/DIRNAME/def/1.java
        ## output 0 Directory "JTwork" not found: creating
        ## Testsuite: PATH
        ## ....
        ## dirname/DEF/1.java#id0
        ## dirname/DEF/1.java#id1
        ## dirname/DEF/1.java#id2
        ## ...
        ## Tests found: 3
        #
        # All merged on stdout, yay
        #
        tcs = []
        ts = None
        for line in output.splitlines():
            if line.startswith("Testsuite: "):
                ts = line[len("Testsuite: "):]
            elif line.startswith("Tests found: "):
                continue  # bleh
            else:
                tcs.append(line.strip())
        return ts, tcs
Example #11
0
    def flash(self,
              images,
              upload=True,
              timeout=None,
              soft=False,
              hash_target_name=True):
        """Flash images onto target

        >>> target.images.flash({
        >>>         "kernel-86": "/tmp/file.bin",
        >>>         "kernel-arc": "/tmp/file2.bin"
        >>>     }, upload = True)

        or:

        >>> target.images.flash({
        >>>         "vmlinuz": "/tmp/vmlinuz",
        >>>         "initrd": "/tmp/initrd"
        >>>     }, upload = True)

        If *upload* is set to true, this function will first upload
        the images to the server and then flash them.

        :param dict images: dictionary keyed by (str) image type of
          things to flash in the target. e.g.:

          The types if images supported are determined by the target's
          configuration and can be reported with :meth:`list` (or
          command line *tcf images-ls TARGETNAME*).

        :param int timeout: (optional) seconds to wait for the
          operation to complete; defaults to whatever the interface
          declares in property
          *interfaces.images.IMAGETYPE.estimated_duration*.

          This is very tool and file specific, a bigger file with a
          slow tool is going to take way longer than a bigfer file on
          a slow one.

        :param bool upload: (optional) the image names are local files
          that need to be uploaded first to the server (this function
          will take care of that).

        :param boot soft: (optional, default *False*) if *True*, it
          will only flash an image if the hash of the file is
          different to the hash of the last image recorded in that
          image type (or if there is no record of anything having been
          flashed).

        """
        if isinstance(images, dict):
            for k, v in images.items():
                assert isinstance(k, basestring) \
                    and isinstance(v, basestring), \
                    "images has to be a dictionary IMAGETYPE:IMAGEFILE;" \
                    " all strings; %s, %s (%s, %s)" \
                    % (k, v, type(k), type(v))
        else:
            raise AssertionError(
                "images has to be a dictionary IMAGETYPE:IMAGEFILE; got %s" \
                % type(images))

        target = self.target
        images_str = " ".join("%s:%s" % (k, v) for k, v in images.items())

        if timeout == None:
            timeout = 0
            for image_type, image in images.items():
                images_data = target.rt['interfaces']['images']
                image_data = images_data.get(image_type, None)
                if image_data == None:
                    raise tc.blocked_e(
                        "%s: image type '%s' not available" %
                        (target.id, image_type), dict(target=target))
                timeout += image_data.get("estimated_duration", 60)
        else:
            assert isinstance(timeout, int)

        # if we have to upload them, then we'll transform the names to
        # point to the names we got when uploading
        if upload:
            # Ok, we need to upload--the names in the dictionary point
            # to local filenames relative to the dir where we are
            # from, or absolute. Upload them to the server file space
            # for the user and give them a local name in there.
            _images = {}
            target.report_info("uploading: " + images_str, dlevel=2)
            for img_type, img_name in images.iteritems():
                # the remote name will be NAME-DIGEST, so if multiple
                # testcases for the same user are uploading files with
                # the same name but different content / target, they don't
                # collide
                hd = commonl.hash_file_maybe_compressed(
                    hashlib.sha512(), img_name)
                img_name_remote = \
                    hd[:10] \
                    + "-" + commonl.file_name_make_safe(os.path.abspath(img_name))
                if hash_target_name:
                    # put the target name first, otherwise we might
                    # alter the extension that the server relies on to
                    # autodecompress if need to
                    img_name_remote = target.id + "-" + img_name_remote
                last_sha512 = target.rt['interfaces']['images']\
                    [img_type].get('last_sha512', None)
                if soft and last_sha512 == hd:
                    # soft mode -- don't flash again if the last thing
                    # flashed has the same hash as what we want to flash
                    target.report_info(
                        "%s:%s: skipping (soft flash: SHA512 match %s)" %
                        (img_type, img_name, hd),
                        dlevel=1)
                    continue
                target.report_info("uploading: %s %s" % (img_type, img_name),
                                   dlevel=3)
                target.store.upload(img_name_remote, img_name)
                _images[img_type] = img_name_remote
                target.report_info("uploaded: %s %s" % (img_type, img_name),
                                   dlevel=2)
            target.report_info("uploaded: " + images_str, dlevel=1)
        else:
            _images = images

        if _images:
            # We don't do retries here, we leave it to the server
            target.report_info("flashing: " + images_str, dlevel=2)
            target.ttbd_iface_call("images",
                                   "flash",
                                   images=_images,
                                   timeout=timeout)
            target.report_info("flashed: " + images_str, dlevel=1)
        else:
            target.report_info("flash: all images soft flashed", dlevel=1)
def console_mk_code(target, console):
    # This mimics console_rx_eval
    console_id_name = "default" if console in (None, "") else console
    return console_id_name, "console-rx-" + \
        commonl.file_name_make_safe(target.fullid) + "-" + console_id_name
Example #13
0
def console_mk_code(target, console):
    # This mimics console_rx_eval
    console_id_name = "default" if console in (None, "") else console
    return console_id_name, "console-rx-" + \
        commonl.file_name_make_safe(target.fullid) + "-" + console_id_name
Example #14
0
def console_mk_code(target, console):
    # This mimics console_rx_eval
    _console = target.console._console_get(console)
    return _console, "console-rx-" + \
        commonl.file_name_make_safe(target.fullid) + "-" + _console