Ejemplo n.º 1
0
    def test_wrong_type_add(self):
        with open("metric.unknown", "w+") as fd:
            fd.write("unknown")
            fd.flush()

        ret = main(["add", "metric.unknown"])
        self.assertEqual(ret, 0)

        with MockLoggerHandlers(logger.logger):
            logger.logger.handlers[1].stream = StringIO()
            ret = main(["metrics", "add", "metric.unknown", "-t", "unknown"])
            self.assertEqual(ret, 1)
            self.assertIn(
                "failed to add metric file 'metric.unknown' - metric type "
                "'unknown' is not supported, must be one of "
                "[raw, json, csv, tsv, hcsv, htsv]",
                logger.logger.handlers[1].stream.getvalue(),
            )

            ret = main(["metrics", "add", "metric.unknown", "-t", "raw"])
            self.assertEqual(ret, 0)

            logger.logger.handlers[0].stream = StringIO()
            ret = main(["metrics", "show", "metric.unknown"])
            self.assertEqual(ret, 0)
            self.assertIn(
                "\tmetric.unknown: unknown",
                logger.logger.handlers[0].stream.getvalue(),
            )
Ejemplo n.º 2
0
    def test_wrong_type_modify(self):
        with open("metric.unknown", "w+") as fd:
            fd.write("unknown")
            fd.flush()

        ret = main(["run", "-m", "metric.unknown"])
        self.assertEqual(ret, 0)

        with MockLoggerHandlers(logger.logger):
            logger.logger.handlers[1].stream = StringIO()
            ret = main(
                ["metrics", "modify", "metric.unknown", "-t", "unknown"])
            self.assertEqual(ret, 1)
            self.assertIn(
                "failed to modify metric file settings - metric type 'unknown'"
                " is not supported, must be one of [raw, json, csv, tsv, hcsv, htsv]",
                logger.logger.handlers[1].stream.getvalue(),
            )

            ret = main(["metrics", "modify", "metric.unknown", "-t", "CSV"])
            self.assertEqual(ret, 0)

            logger.logger.handlers[0].stream = StringIO()
            ret = main(["metrics", "show", "metric.unknown"])
            self.assertEqual(ret, 0)
            self.assertIn(
                "\tmetric.unknown: unknown",
                logger.logger.handlers[0].stream.getvalue(),
            )
Ejemplo n.º 3
0
 def setUp(self):
     logger.logger.handlers = [
         logger.logging.StreamHandler(),
         logger.logging.StreamHandler(),
     ]
     logger.logger.handlers[0].stream = StringIO()
     logger.logger.handlers[1].stream = StringIO()
     logger.set_default_level()
     self.color_patch.start()
Ejemplo n.º 4
0
    def setUp(self):
        logger.logger.handlers = [
            logger.logging.StreamHandler(),
            logger.logging.StreamHandler(),
        ]
        logger.logger.handlers[0].stream = StringIO()
        logger.logger.handlers[1].stream = StringIO()
        logger.set_default_level()

        self.consoleColorRemover = ConsoleFontColorsRemover()
        self.consoleColorRemover.__enter__()
Ejemplo n.º 5
0
    def __write_dot(self, target, commands, outs):
        from dvc.utils.compat import StringIO
        import networkx
        from networkx.drawing.nx_pydot import write_dot

        _, edges, _ = self.__build_graph(target, commands, outs)
        edges = [edge[::-1] for edge in edges]

        simple_g = networkx.DiGraph()
        simple_g.add_edges_from(edges)

        dot_file = StringIO()
        write_dot(simple_g, dot_file)
        logger.info(dot_file.getvalue())
Ejemplo n.º 6
0
    def _test(self):
        url = get_local_url()
        self.main(["remote", "add", "-d", TEST_REMOTE, url])

        self.dvc.add(self.FOO)
        stage = self.dvc.run(deps=["foo"], outs=["bar"], cmd="echo bar > bar")
        self.main(["push"])

        stage_file_path = stage.relpath
        with open(stage_file_path, "r") as stage_file:
            content = yaml.safe_load(stage_file)
        del (content["outs"][0]["md5"])
        with open(stage_file_path, "w") as stage_file:
            yaml.dump(content, stage_file)

        with MockLoggerHandlers(logger.logger):
            logger.logger.handlers[0].stream = StringIO()
            self.main(["status", "-c"])
            self.assertIn(
                "Warning: Output 'bar'(Stage: 'bar.dvc') is "
                "missing version info. Cache for it will not be "
                "collected. Use dvc repro to get your pipeline up to "
                "date.",
                logger.logger.handlers[0].stream.getvalue(),
            )
Ejemplo n.º 7
0
def _format_csv(content, delimiter):
    """Format delimited text to have same column width.

    Args:
        content (str): The content of a metric.
        delimiter (str): Value separator

    Returns:
        str: Formatted content.

    Example:

        >>> content = (
            "value_mse,deviation_mse,data_set\n"
            "0.421601,0.173461,train\n"
            "0.67528,0.289545,testing\n"
            "0.671502,0.297848,validation\n"
        )
        >>> _format_csv(content, ",")

        "value_mse  deviation_mse   data_set\n"
        "0.421601   0.173461        train\n"
        "0.67528    0.289545        testing\n"
        "0.671502   0.297848        validation\n"
    """
    reader = csv_reader(StringIO(content), delimiter=builtin_str(delimiter))
    rows = [row for row in reader]
    max_widths = [max(map(len, column)) for column in zip(*rows)]

    lines = [
        " ".join("{entry:{width}}".format(entry=entry, width=width + 2)
                 for entry, width in zip(row, max_widths)) for row in rows
    ]

    return "\n".join(lines)
Ejemplo n.º 8
0
    def test_wrong_type_show(self):
        with open("metric.unknown", "w+") as fd:
            fd.write("unknown")
            fd.flush()

        ret = main(["run", "-m", "metric.unknown"])
        self.assertEqual(ret, 0)

        with MockLoggerHandlers(logger.logger):
            logger.logger.handlers[0].stream = StringIO()
            ret = main(
                [
                    "metrics",
                    "show",
                    "metric.unknown",
                    "-t",
                    "unknown",
                    "-x",
                    "0,0",
                ]
            )
            self.assertEqual(ret, 0)
            self.assertIn(
                "\tmetric.unknown: unknown",
                logger.logger.handlers[0].stream.getvalue(),
            )
Ejemplo n.º 9
0
 def _load_user_ssh_config(hostname):
     user_config_file = RemoteSSH.ssh_config_filename()
     user_ssh_config = dict()
     if hostname and os.path.exists(user_config_file):
         ssh_config = paramiko.SSHConfig()
         with open(user_config_file) as f:
             # For whatever reason parsing directly from f is unreliable
             f_copy = StringIO(f.read())
             ssh_config.parse(f_copy)
         user_ssh_config = ssh_config.lookup(hostname)
     return user_ssh_config
Ejemplo n.º 10
0
    def _walk_exc(self, exc_info):
        import traceback

        buffer = StringIO()

        traceback.print_exception(*exc_info, file=buffer)

        exc = exc_info[1]
        tb = buffer.getvalue()

        exc_list = [str(exc)]
        tb_list = [tb]

        # NOTE: parsing chained exceptions. See dvc/exceptions.py for more info
        while hasattr(exc, "cause") and exc.cause:
            exc_list.append(str(exc.cause))
            if hasattr(exc, "cause_tb") and exc.cause_tb:
                tb_list.insert(0, str(exc.cause_tb))
            exc = exc.cause

        return exc_list, tb_list
Ejemplo n.º 11
0
    def _test(self, imp_urlparse_patch):
        logger.logger.handlers[1].stream = StringIO()
        page_address = "http://somesite.com/file_name"

        def dvc_exception(*args, **kwargs):
            raise DvcException("message")

        imp_urlparse_patch.side_effect = dvc_exception
        main(["import", page_address])
        self.assertIn(
            "Error: failed to import "
            "http://somesite.com/file_name. You could also try "
            "downloading it manually and adding it with `dvc add` "
            "command.",
            logger.logger.handlers[1].stream.getvalue(),
        )
Ejemplo n.º 12
0
Archivo: tree.py Proyecto: rpip/dvc
    def open(self, path):

        relpath = os.path.relpath(path, self.git.working_dir)

        obj = self.git_object_by_path(path)
        if obj is None:
            msg = "No such file in branch '{}'".format(self.rev)
            raise IOError(errno.ENOENT, msg, relpath)
        if obj.mode == GIT_MODE_DIR:
            raise IOError(errno.EISDIR, "Is a directory", relpath)

        # GitPython's obj.data_stream is a fragile thing, it is better to
        # read it immediately, also it needs to be to decoded if we follow
        # the `open()` behavior (since data_stream.read() returns bytes,
        # and `open` with default "r" mode returns str)
        return StringIO(obj.data_stream.read().decode("utf-8"))
Ejemplo n.º 13
0
    def test(self):
        with MockLoggerHandlers(logger), ConsoleFontColorsRemover():
            logger.handlers[1].stream = StringIO()

            ret = main(["add", self.FOO])
            self.assertEqual(0, ret)

            foo_stage = os.path.abspath(self.FOO + Stage.STAGE_FILE_SUFFIX)

            # corrupt stage file
            with open(foo_stage, "a+") as file:
                file.write("this will break yaml file structure")

            ret = main(["add", self.BAR])
            self.assertEqual(1, ret)

            self.assertIn(
                "unable to read stage file: {} "
                "YAML file structure is corrupted".format(foo_stage),
                logger.handlers[1].stream.getvalue(),
            )
Ejemplo n.º 14
0
def to_yaml_string(data):
    stream = StringIO()
    yaml = YAML()
    yaml.default_flow_style = False
    yaml.dump(data, stream)
    return stream.getvalue()
Ejemplo n.º 15
0
def from_yaml_string(s):
    return YAML().load(StringIO(s))
Ejemplo n.º 16
0
def reset_logger_standard_output():
    from dvc.logger import logger

    logger.handlers[0].stream = StringIO()
Ejemplo n.º 17
0
def reset_logger_error_output():
    from dvc.logger import logger

    logger.handlers[1].stream = StringIO()