Beispiel #1
0
def new_demand_calculator(
    config: Union[str, dict],
    existing_nodes: Optional[List[SchedulerNode]] = None,
    node_mgr: Optional[NodeManager] = None,
    node_history: Optional[NodeHistory] = None,
    disable_default_resources: bool = False,
    node_queue: Optional[NodeQueue] = None,
    singleton_lock: Optional[SingletonLock] = NullSingletonLock(),
) -> DemandCalculator:
    config_dict = load_config(config)

    existing_nodes = existing_nodes or []

    if node_mgr is None:
        node_mgr = new_node_manager(
            config_dict,
            disable_default_resources=disable_default_resources,
        )
    else:
        logging.initialize_logging(config_dict)

        if not disable_default_resources:
            node_mgr.set_system_default_resources()

    node_history = node_history or SQLiteNodeHistory()

    if singleton_lock is None:
        singleton_lock = new_singleton_lock(config_dict)

    dc = DemandCalculator(node_mgr, node_history, node_queue, singleton_lock)

    dc.update_scheduler_nodes(existing_nodes)

    return dc
        def _rec_merge_test(ad: Dict, bd: Dict, expected: Dict) -> None:

            with open(a, "w") as fw:
                ad["include"] = [b]
                json.dump(ad, fw)

            with open(b, "w") as fw:
                json.dump(bd, fw)

            actual = load_config(a)
            actual.pop("include")
            assert actual == expected
def test_include_json() -> None:
    # Issue: #5
    # Feature Request: support multiple autoscale.json files
    tempdir = tempfile.mkdtemp()
    try:
        a = os.path.join(tempdir, "a.json")
        b = os.path.join(tempdir, "b.json")
        c = os.path.join(tempdir, "c.json")

        try:
            load_config(a)
            assert False
        except ConfigurationException as e:
            assert a in str(e)

        with open(a, "w") as fw:
            json.dump({"include": ["b.json"]}, fw)

        assert os.path.exists(a)
        assert not os.path.exists(b)
        assert not os.path.exists(c)

        try:
            load_config(a)
            assert False
        except ConfigurationException as e:
            assert b in str(e)

        with open(b, "w") as fw:
            json.dump({"include": [c]}, fw)

        assert os.path.exists(a)
        assert os.path.exists(b)
        assert not os.path.exists(c)

        try:
            load_config(a)
            assert False
        except ConfigurationException as e:
            assert c in str(e)

        with open(a, "w") as fw:
            json.dump({"include": [b, c], "a": 1}, fw)
        with open(b, "w") as fw:
            json.dump({"b": 2}, fw)
        with open(c, "w") as fw:
            json.dump({"c": 3}, fw)

        expected = load_config(a)
        expected.pop("include")
        assert {"a": 1, "b": 2, "c": 3} == expected

        with open(a, "w") as fw:
            json.dump({"include": [b, c], "x": 1}, fw)
        with open(b, "w") as fw:
            json.dump({"x": 2}, fw)
        with open(c, "w") as fw:
            json.dump({"x": 3}, fw)

        expected = load_config(a)
        expected.pop("include")
        assert {"x": 3} == expected

        with open(a, "w") as fw:
            json.dump({"include": [b]}, fw)
        with open(b, "w") as fw:
            json.dump({"include": [a]}, fw)

        try:
            load_config(a)
            assert False
        except CircularIncludeError as e:
            expected = "Circular include found: {} ---> {} ---> {}".format(
                a, b, a)
            assert expected == str(e)

        def _rec_merge_test(ad: Dict, bd: Dict, expected: Dict) -> None:

            with open(a, "w") as fw:
                ad["include"] = [b]
                json.dump(ad, fw)

            with open(b, "w") as fw:
                json.dump(bd, fw)

            actual = load_config(a)
            actual.pop("include")
            assert actual == expected

        _rec_merge_test({"d0": {
            "k0": 0
        }}, {"d0": {
            "k1": 1
        }}, {"d0": {
            "k0": 0,
            "k1": 1
        }})

        _rec_merge_test(
            {"d0": {
                "d1": {
                    "k0": 0
                }
            }},
            {"d0": {
                "d1": {
                    "k1": 1
                }
            }},
            {"d0": {
                "d1": {
                    "k0": 0,
                    "k1": 1
                }
            }},
        )

        _rec_merge_test(
            {"d0": {
                "d1": [0]
            }},
            {"d0": {
                "d1": [1]
            }},
            {"d0": {
                "d1": [0, 1]
            }},
        )

        _rec_merge_test(
            {"d0": {
                "d1": [0]
            }},
            {"d0": {
                "d1": {
                    "a": 0
                }
            }},
            {"d0": {
                "d1": {
                    "a": 0
                }
            }},
        )

        _rec_merge_test(
            {"d0": {
                "d1": [{
                    "k0": 0
                }]
            }},
            {"d0": {
                "d1": [{
                    "k1": 1
                }]
            }},
            {"d0": {
                "d1": [{
                    "k0": 0
                }, {
                    "k1": 1
                }]
            }},
        )

        # check slurm converged
        # allow multipl configs (abspath them!)
        with open(a, "w") as fw:
            json.dump({"a": 1}, fw)
        with open(b, "w") as fw:
            json.dump({"b": 2}, fw)
        assert {"a": 1, "b": 2} == load_config(a, b)

    finally:
        shutil.rmtree(tempdir, ignore_errors=True)
Beispiel #4
0

def new_rest_client(config: Dict[str, Any]) -> HpcRestClient:

    hpcpack_config = config.get('hpcpack') or {}
    hpc_pem_file = hpcpack_config.get('pem')
    hn_hostname = hpcpack_config.get('hn_hostname')
    return HpcRestClient(config, pem=hpc_pem_file, hostname=hn_hostname)


if __name__ == "__main__":

    config_file = ""
    if len(sys.argv) > 1:
        config_file = sys.argv[1]

    dry_run = False
    if len(sys.argv) > 2:
        dry_run = ci_in(sys.argv[2], ['true', 'dryrun'])

    ctx_handler = register_result_handler(
        DefaultContextHandler("[initialization]"))
    config = load_config(config_file)
    logging.initialize_logging(config)
    logging.info(
        "------------------------------------------------------------------------"
    )
    if config["autoscale"]["start_enabled"]:
        autoscale_hpcpack(config, ctx_handler=ctx_handler, dry_run=dry_run)
    else:
        logging.info("Autoscaler is not enabled")
def main(argv: Iterable[str] = None) -> None:
    default_install_dir = os.path.join("/", "opt", "cycle", "gridengine")

    parser = ArgumentParser()
    sub_parsers = parser.add_subparsers()

    def csv_list(x: str) -> List[str]:
        return [x.strip() for x in x.split(",")]

    help_msg = io.StringIO()

    def add_parser(
        name: str, func: Callable, read_only: bool = True, skip_config: bool = False
    ) -> ArgumentParser:
        doc_str = (func.__doc__ or "").strip()
        doc_str = " ".join([x.strip() for x in doc_str.splitlines()])
        help_msg.write("\n    {:20} - {}".format(name, doc_str))

        default_config: Optional[str]
        default_config = os.path.join(default_install_dir, "autoscale.json")
        if not os.path.exists(default_config):
            default_config = None

        new_parser = sub_parsers.add_parser(name)
        new_parser.set_defaults(func=func, read_only=read_only)

        if skip_config:
            return new_parser

        new_parser.add_argument(
            "--config",
            "-c",
            default=default_config,
            required=not bool(default_config),
            action="append",
        )
        return new_parser

    def str_list(c: str) -> List[str]:
        return c.split(",")

    def add_parser_with_columns(
        name: str, func: Callable, read_only: bool = True
    ) -> ArgumentParser:
        parser = add_parser(name, func, read_only)

        def parse_format(c: str) -> str:
            c = c.lower()
            if c in ["json", "table", "table_headerless"]:
                return c
            print("Expected json, table or table_headerless - got", c, file=sys.stderr)
            sys.exit(1)

        parser.add_argument("--output-columns", "-o", type=str_list)
        parser.add_argument("--output-format", "-F", type=parse_format)
        return parser

    add_parser_with_columns("autoscale", autoscale, read_only=False)

    add_parser_with_columns("buckets", buckets).add_argument(
        "--constraint-expr", "-C", default="[]"
    )

    add_parser("complexes", complexes).add_argument(
        "-a", "--include-irrelevant", action="store_true", default=False
    )

    delete_parser = add_parser("delete_nodes", delete_nodes, read_only=False)
    delete_parser.add_argument("-H", "--hostnames", type=str_list, default=[])
    delete_parser.add_argument("-N", "--node-names", type=str_list, default=[])
    delete_parser.add_argument("--force", action="store_true", default=False)

    remove_parser = add_parser("remove_nodes", remove_nodes, read_only=False)
    remove_parser.add_argument("-H", "--hostnames", type=str_list, default=[])
    remove_parser.add_argument("-N", "--node-names", type=str_list, default=[])
    remove_parser.add_argument("--force", action="store_true", default=False)

    add_parser_with_columns("demand", demand).add_argument(
        "--jobs", "-j", default=None, required=False
    )

    add_parser("drain_node", drain_node, read_only=False).add_argument(
        "-H", "--hostname", required=True
    )

    initconfig_parser = add_parser(
        "initconfig", initconfig, read_only=False, skip_config=True
    )

    initconfig_parser.add_argument("--cluster-name", required=True)
    initconfig_parser.add_argument("--username", required=True)
    initconfig_parser.add_argument("--password")
    initconfig_parser.add_argument("--url", required=True)
    initconfig_parser.add_argument(
        "--log-config",
        default=os.path.join(default_install_dir, "logging.conf"),
        dest="logging__config_file",
    )
    initconfig_parser.add_argument(
        "--lock-file", default=os.path.join(default_install_dir, "scalelib.lock")
    )
    initconfig_parser.add_argument(
        "--default-resource",
        type=json.loads,
        action="append",
        default=[],
        dest="default_resources",
    )
    initconfig_parser.add_argument(
        "--relevant-complexes",
        default=["slots", "slot_type", "exclusive"],
        type=csv_list,
        dest="gridengine__relevant_complexes",
    )

    initconfig_parser.add_argument(
        "--idle-timeout", default=300, type=int, dest="idle_timeout"
    )
    initconfig_parser.add_argument(
        "--boot-timeout", default=1800, type=int, dest="boot_timeout"
    )
    initconfig_parser.add_argument(
        "--disable-pgs-for-pe",
        default=[],
        type=str,
        action="append",
        help="Disable creation of placement groups for a parallel environment. "
        + "This can be invoked more than once.",
        dest="disable_pgs_for_pe",
    )
    initconfig_parser.add_argument(
        "--hostgroup-constraint",
        default=[],
        action="append",
        dest="hostgroup_constraints",
    )

    add_parser("jobs", jobs)
    add_parser("jobs_and_nodes", jobs_and_nodes)

    add_parser("join_cluster", join_cluster).add_argument(
        "-H", "--hostname", type=str_list, required=True
    )

    add_parser_with_columns("nodes", nodes).add_argument(
        "--constraint-expr", "-C", default="[]"
    )

    add_parser("scheduler_nodes", scheduler_nodes)

    help_msg.write("\nadvanced usage:")
    add_parser("validate", validate_func, read_only=True)
    add_parser("queues", queues, read_only=True)
    add_parser("shell", shell)
    analyze_parser = add_parser("analyze", analyze)
    analyze_parser.add_argument("--job-id", "-j", required=True)
    analyze_parser.add_argument("--wide", "-w", action="store_true", default=False)

    parser.usage = help_msg.getvalue()
    args = parser.parse_args()
    if not hasattr(args, "func"):
        parser.print_help()
        sys.exit(1)

    # parse list of config paths to a single config
    if hasattr(args, "config"):
        args.config = load_config(*args.config)
        logging.initialize_logging(args.config)

    if args.read_only:
        args.config["read_only"] = True
        args.config["lock_file"] = None

    kwargs = {}
    for k in dir(args):
        if k[0].islower() and k not in ["read_only", "func"]:
            kwargs[k] = getattr(args, k)

    try:
        args.func(**kwargs)
    except Exception as e:
        print(str(e), file=sys.stderr)
        if hasattr(e, "message"):
            print(getattr(e, "message"), file=sys.stderr)
        logging.debug("Full stacktrace", exc_info=sys.exc_info())
        sys.exit(1)