Example #1
0
def create_slideshow(
    files: List[Union[Path, str]],
    output_file: Union[Path, str],
    duration: float = 30.0,
):
    if not len(files):
        LOGGER.error("No image files specified! Nothing to do.")
        return

    files = [make_path(file).resolve() for file in files]
    output_file = make_path(output_file).resolve()

    with tempfile.TemporaryDirectory() as tmp:
        with WorkingDir(tmp):
            max_number_len = len(str(len(files)))

            for i, file in enumerate(files):
                os.symlink(
                    file,
                    ("{:0" + str(max_number_len) + "d}{}").format(
                        i, file.suffix),
                )

            cmd = [
                "ffmpeg",
                "-r",
                str(float(len(files)) / duration),
                "-i",
                "%0" + str(max_number_len) + "d" + files[0].suffix,
                "output.mp4",
            ]
            subprocess.run(cmd)

            shutil.copy2("output.mp4", output_file)
Example #2
0
def cmd_archive(self: SimulationSetBase, args: argparse.Namespace):
    self.create_working_dir()
    with WorkingDir(self.working_dir.resolve().parent):
        compress_folder(
            self.working_dir.name,
            compression=args.compression,
            jobs=args.jobs,
        )
Example #3
0
def cmd_list(self: SimulationBase, args: argparse.Namespace):
    del args

    self.create_working_dir()
    with WorkingDir(self.working_dir):
        run_doit(
            self.tasks_run,
            ["list", "--backend=json", "--db-file=doit.json"],
        )
Example #4
0
def cmd_propagation_status(self: SimulationSetBase, args: argparse.Namespace):
    total = 0.0
    self.logger.info("check propagation status of propagation: %s", args.name)
    with WorkingDir(self.working_dir):
        for simulation in self.simulations:
            progress = simulation.check_propagation_status(args.name)
            total += progress
            self.logger.info("sim %s: %6.2f%%", simulation.name,
                             progress * 100.0)
    total = total / len(self.simulations)
    self.logger.info("total: %6.2f%%", total * 100.0)
Example #5
0
def cmd_task_info(self: SimulationBase, args: argparse.Namespace):
    with WorkingDir(self.working_dir):
        run_doit(
            self.tasks_run,
            [
                "info",
                "--backend=json",
                "--db-file=doit.json",
                args.name,
            ],
        )
Example #6
0
def cmd_qsub(self: SimulationBase, args: argparse.Namespace):
    self.create_working_dir()
    call_dir = Path.cwd().absolute()
    script_path = Path(sys.argv[0]).absolute()
    with WorkingDir(self.working_dir):
        sge.submit(
            " ".join([sys.executable, str(script_path), "run"]),
            args,
            sge_dir=call_dir,
            job_name=self.name,
        )
Example #7
0
def cmd_qdel(self: SimulationSetBase, args: argparse.Namespace):
    del args

    if not self.working_dir.exists():
        self.logger.warning(
            "working dir %s does not exist, do nothing",
            self.working_dir,
        )

    with WorkingDir(self.working_dir):
        for simulation in self.simulations:
            simulation.main(["qdel"])
Example #8
0
def cmd_clean(self: SimulationBase, args: argparse.Namespace):
    self.create_working_dir()
    with WorkingDir(self.working_dir):
        with LockFile(Path("run.lock")):
            run_doit(
                self.tasks_clean,
                [
                    "--process=" + str(args.jobs),
                    "--backend=json",
                    "--db-file=doit.json",
                ],
            )
Example #9
0
def cmd_dry_run(self: SimulationBase, args: argparse.Namespace):
    del args

    self.create_working_dir()
    with WorkingDir(self.working_dir):
        with LockFile(Path("run.lock")):
            run_doit(
                self.tasks_dry_run,
                [
                    "--backend=json",
                    "--db-file=doit.json",
                ],
            )
Example #10
0
def cmd_qdel(self: SimulationBase, args: argparse.Namespace):
    del args

    if not self.working_dir.exists():
        self.logger.warning(
            "working dir %s does not exist, do nothing",
            self.working_dir,
        )
        return
    with WorkingDir(self.working_dir):
        if Path("sge_stop").exists():
            self.logger.warning("stopping job")
            subprocess.check_output("sge_stop")
Example #11
0
    def plot_foreach(
        self,
        name: str,
        func: Callable[[int, str, Parameters], None],
    ) -> Optional[List[Any]]:
        if not self.path:
            raise RuntimeError("No path set for parameter selection")

        plot_dir = self.path / "plots" / name
        if not plot_dir.exists():
            os.makedirs(plot_dir)

        with WorkingDir(plot_dir):
            return self.foreach(func)
Example #12
0
def cmd_qsub_array(self: SimulationSetBase, args: argparse.Namespace):
    self.logger.info("submitting simulation set as an array to SGE scheduler")
    self.create_working_dir()

    script_path = Path(sys.argv[0]).resolve()
    command = " ".join([sys.executable, str(script_path), "run-index"])

    with WorkingDir(self.working_dir):
        sge.submit_array(
            command,
            len(self.simulations),
            args,
            sge_dir=script_path.parent,
            job_name=self.name,
        )
Example #13
0
        def action_compute(targets: List[str]):
            path_psi = Path(self.psi).resolve()
            path_operator = Path(self.operator).resolve()
            path_wfn = Path(self.wfn).resolve()
            path_output = Path(self.output_file).resolve()
            path_temp = path_psi.parent / ("." + self.name)

            if path_temp.exists():
                shutil.rmtree(path_temp)

            path_temp.mkdir(parents=True)
            with WorkingDir(path_temp):
                shutil.copy(path_psi, "psi")
                shutil.copy(path_operator, "oper")
                shutil.copy(path_wfn, "restart")

                trafo_to_momentum_rep(
                    [
                        self.momentum_operator,
                    ],
                    [
                        1,
                    ],
                )

                cmd = [
                    "qdtk_analysis.x",
                    "-mtrafo",
                    "trafo_mom_rep",
                    "-opr",
                    "oper",
                    "-psi",
                    "psi",
                    "-rst",
                    "restart",
                ]
                env = os.environ.copy()
                env["OMP_NUM_THREADS"] = env.get("OMP_NUM_THREADS", "1")

                result = subprocess.run(cmd, env=env)
                if result.returncode != 0:
                    raise RuntimeError("Failed to run qdtk_analysis.x")

                with h5py.File(path_output, "w") as fptr:
                    add_momentum_distribution_to_hdf5(
                        fptr, *read_momentum_distribution_ascii("mom_distr_1"))

            shutil.rmtree(path_temp)
Example #14
0
    def task_request_wave_function_dry_run(self) -> Dict[str, Any]:
        db = load_db(self.db_path, self.variable_name)
        db.working_dir = self.db_path.parent / db.name

        with WorkingDir(self.db_path.parent):
            try:
                db.request(self.parameters, False)
            except MissingWfnError:
                pass

        @DoitAction
        def action_noop(targets):
            del targets
            pass

        return {
            "name": f"wfn:{self.name}:request_dry_run",
            "actions": [action_noop],
            "verbosity": 2,
        }
Example #15
0
def cmd_qsub(self: SimulationSetBase, args: argparse.Namespace):
    self.logger.info("submitting simulation set to SGE scheduler")
    self.create_working_dir()

    set_dir = self.working_dir.resolve()
    script_path = Path(sys.argv[0]).resolve()

    for index, simulation in enumerate(self.simulations):
        simulation_dir = set_dir / simulation.working_dir
        if not simulation_dir.exists():
            os.makedirs(simulation_dir)
        with WorkingDir(simulation_dir):
            sge.submit(
                " ".join([
                    sys.executable,
                    str(script_path), "run-index",
                    str(index)
                ]),
                args,
                sge_dir=script_path.parent,
                job_name=simulation.name,
            )
Example #16
0
def cmd_graph(self: SimulationBase, args: argparse.Namespace):
    del args

    regex = re.compile(r"^\s+\".+\"\s*->\s*\".+\";$")

    self.create_working_dir()
    with WorkingDir(self.working_dir):
        run_doit(
            self.tasks_run,
            ["graph", "--backend=json", "--db-file=doit.json"],
        )

        with open("tasks.dot") as fptr:
            code = fptr.readlines()

        for i, _ in enumerate(code):
            if not regex.match(code[i]):
                continue

            code[i] = code[i].replace(":", "\\n")

        with open("tasks.dot", "w") as fptr:
            fptr.writelines(code)
Example #17
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--restart",
        type=Path,
        default=Path("restart"),
        help="path to the restart file",
    )
    parser.add_argument(
        "--operator",
        type=Path,
        default=Path("operator"),
        help="path to the hamiltonian operator",
    )
    parser.add_argument(
        "--psi",
        type=Path,
        default=Path("psi"),
        help="path to the psi file",
    )
    parser.add_argument("--dof1",
                        type=int,
                        default=1,
                        help="first degree of freedom")
    parser.add_argument("--dof2",
                        type=int,
                        default=1,
                        help="second degree of freedom")
    parser.add_argument("-o",
                        "--output",
                        type=Path,
                        help="path for the output file")
    args = parser.parse_args()

    restart_file = Path(args.restart).resolve()
    operator_file = Path(args.operator).resolve()
    psi_file = Path(args.psi).resolve()
    basename = f"dmat2_dof{args.dof1}_dof{args.dof2}"
    if not args.output:
        output_file = Path.cwd() / (basename + "_gridrep.h5")
    else:
        output_file = Path(args.output).resolve()

    with tempfile.TemporaryDirectory() as tmpdir:
        with WorkingDir(tmpdir):
            copy_file(restart_file, "restart")
            copy_file(operator_file, "operator")
            copy_file(psi_file, "psi")
            subprocess.run([
                "qdtk_analysis.x",
                "-dmat2",
                "-rst",
                "restart",
                "-opr",
                "operator",
                "-psi",
                "psi",
                "-dof",
                str(args.dof1),
                "-dofB",
                str(args.dof2),
            ], )
            with h5py.File(output_file, "w") as fptr:
                add_dmat2_gridrep_to_hdf5(
                    fptr,
                    read_dmat2_gridrep_ascii(basename + "_grid"),
                )
Example #18
0
def main():
    # pylint: disable=protected-access

    parser = argparse.ArgumentParser()
    parser.add_argument(
        "basis",
        type=Path,
        help="wave function file containing the basis to project onto",
    )
    parser.add_argument(
        "psi",
        type=Path,
        help="wave function file containing the wave function to analyse",
    )
    parser.add_argument(
        "-o",
        "--output",
        type=Path,
        help="name of the output file (optional)",
    )
    args = parser.parse_args()

    output = args.output
    if not output:
        output = Path(f"{args.psi.stem}_{args.basis.stem}.fixed_ns.h5")

    output = output.resolve()

    with tempfile.TemporaryDirectory(dir=Path.cwd()) as tmpdir:
        LOGGER.info("create a restart file from psi file")
        with open(Path(tmpdir) / "restart", "w") as fp:
            fp.write(read_first_frame(args.psi))

        LOGGER.info("copy psi file")
        shutil.copy2(args.psi, Path(tmpdir) / "psi")

        LOGGER.info("copy basis")
        shutil.copy2(args.basis, Path(tmpdir) / "basis")

        with WorkingDir(tmpdir):
            cmd = [
                "qdtk_analysis.x",
                "-fixed_ns",
                "-rst_bra",
                "basis",
                "-rst_ket",
                "restart",
                "-psi",
                "psi",
                "-save",
                "result",
            ]
            LOGGER.info("run qdtk_analysis.x: %s", " ".join(cmd))
            subprocess.check_output(cmd)

            times, real, imag = inout.read_fixed_ns_ascii("result")
            wfn = load_wave_function("basis")
            inout.write_fixed_ns_hdf5(
                "result.h5",
                times,
                real,
                imag,
                wfn._tape[1],
                wfn._tape[3],
            )
            LOGGER.info("copy result")
            shutil.copy2("result.h5", output)
Example #19
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("input", type=Path, help="path for the input file")
    parser.add_argument(
        "-o",
        "--output",
        type=Path,
        default=None,
        help="path for the video file",
    )
    parser.add_argument(
        "-f",
        "--fps",
        type=float,
        default=5,
        help="frames per second for the video",
    )
    parser.add_argument(
        "--dpi",
        type=int,
        default=600,
        help="resolution (dpi) of the individual frames",
    )
    parser.add_argument(
        "-j",
        "--jobs",
        type=int,
        default=os.cpu_count(),
        help="number of threads to use to create the individual frames",
    )
    args = parser.parse_args()

    times, x1, x2, dmat2 = read_dmat2_gridrep_hdf5(args.input)
    X2, X1 = numpy.meshgrid(x2, x1)

    num_digits = len(str(len(times)))

    if args.output:
        output = args.output.resolve()
    else:
        output = args.input.with_suffix(".mp4").resolve()

    with tempfile.TemporaryDirectory() as tmpdir:
        with WorkingDir(tmpdir):
            with multiprocessing.Pool(args.jobs) as pool:
                pool.map(
                    partial(
                        render_frame,
                        times=times,
                        dmat2=dmat2,
                        X1=X1,
                        X2=X2,
                        num_digits=num_digits,
                        dpi=args.dpi,
                    ),
                    [i for i, _ in enumerate(times)],
                )

            cmd = [
                "ffmpeg",
                "-y",
                "-framerate",
                str(args.fps),
                "-i",
                "%0" + str(num_digits) + "d.png",
                "out.mp4",
            ]
            LOGGER.info("ffmpeg command: %s", " ".join(cmd))
            subprocess.run(cmd)
            LOGGER.info("copy file: out.mp4 -> %s", str(output))
            copy_file("out.mp4", output)
Example #20
0
def cmd_task_info(self: SimulationSetBase, args: argparse.Namespace):
    self.create_working_dir()

    with WorkingDir(self.working_dir):
        self.simulations[args.index].main(["task-info", args.name])
Example #21
0
    def cmd_task_info(self, args: argparse.Namespace):
        self.create_working_dir()

        with WorkingDir(self.working_dir):
            self.compute_simulation(self.combinations[args.index]).main(
                ["task-info", args.name], )
Example #22
0
def cmd_run_index(self: SimulationSetBase, args: argparse.Namespace):
    script_dir = Path(sys.argv[0]).parent.resolve()
    with WorkingDir(script_dir):
        self.logger.info("run simulation with index %d", args.index)
        self.simulations[args.index].main(["run"])
Example #23
0
def compute_reduced_density_matrix(
    wave_function: PathLike,
    output_file: PathLike,
    dvrs: Sequence[DVRSpecification],
    dofs_A: Sequence[int],
    basis_states: Mapping[int, Sequence[ArrayLike]] | None = None,
    threads: int = 1,
):
    # convert paths
    wave_function = Path(wave_function).resolve()
    output_file = Path(output_file).resolve()

    # function to generate basis states for DoF
    def generate_basis_states(dof: int) -> list[ArrayLike]:
        states: list[ArrayLike] = []
        dim = dvrs[dof].get().npoints
        for i in range(dim):
            states.append(numpy.zeros((dim, )))
            states[-1][i] = 1
        return states

    # check/prepare basis states for each DoF
    if basis_states is None:
        # generate basis_states from DVR
        basis_states: dict[int, list[ArrayLike]] = {}
        for dof in dofs_A:
            basis_states[dof] = generate_basis_states(dof)

    else:
        # check that basis_states are consitent with DVR
        for dof in dofs_A:
            # no basis states for DoF -> generate them
            if dof not in basis_states:
                basis_states[dof] = generate_basis_states(dof)

            dim = dvrs[dof].get().npoints

            if len(basis_states[dof]) != dim:
                raise ValueError(
                    f"basis for DoF {dof} is overcomplete or incomplete (got {len(basis_states[dof])} states, expected {dim})",
                )

            for i, state in enumerate(basis_states[dof]):
                if len(state) != dim:
                    raise ValueError(
                        f"basis state {i} of DoF {dof} has wrong size (got {len(state)}, expected {dim})",
                    )

    # compute dimension of reduced density operator
    dim_A = reduce(mul, (len(basis_states[dof]) for dof in basis_states))

    # create storage for results
    results: dict[tuple[int, int], complex] = {}

    with TemporaryDir(
            Path(output_file).parent /
        ("." + Path(output_file).name + ".tmp"), ) as tmpdir:
        with WorkingDir(tmpdir.path):
            # copy wave function file
            copy_file(Path(wave_function), Path.cwd() / "psi")

            # generate a restart file
            tape, times, psi = read_psi_frame_ascii("psi", 0)
            write_psi_ascii("restart", [tape, [times], psi[numpy.newaxis, :]])

            # generate the tasks for the multiprocessing pool
            tasks = ([dvrs, dofs_A, a, state_a, b, state_b]
                     for a, state_a in enumerate(
                         product(*(basis_states[dof] for dof in dofs_A)), )
                     for b, state_b in enumerate(
                         product(*(basis_states[dof] for dof in dofs_A)), )
                     if b <= a)

            with multiprocessing.Pool(threads) as pool:
                for a, b, time, values in pool.starmap(
                        compute_reduced_density_matrix_element,
                        tasks,
                        1,
                ):
                    results[(a, b)] = values

        tmpdir.complete = True

    # convert results to an array
    steps = len(results[(0, 0)])
    results_arr = numpy.zeros((steps, dim_A, dim_A), dtype=numpy.complex128)
    for a in range(dim_A):
        for b in range(a + 1):
            results_arr[:, a, b] = results[(a, b)]

        for b in range(a + 1, dim_A):
            results_arr[:, a, b] = numpy.conjugate(results[(b, a)])

    # create parent directory of output file
    Path(output_file).parent.mkdir(exist_ok=True, parents=True)

    # make sure that results array is a 3rd order tensor
    # first index denotes time
    if len(results_arr.shape) == 2:
        results_arr = results_arr[numpy.newaxis, :, :]

    # store results in a HDF5 file
    with h5py.File(output_file, "w") as fptr:
        fptr.create_dataset("time", data=time)
        fptr.create_dataset("rho_A", data=results_arr)
        fptr.create_dataset("trace",
                            data=numpy.trace(results_arr, axis1=1, axis2=2))
        for dof in dofs_A:
            fptr.create_dataset(
                f"basis_states_{dof}",
                data=numpy.array(basis_states[dof]),
            )
Example #24
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("operator",
                        type=Path,
                        help="path of the operator file")
    parser.add_argument("restart", type=Path, help="path of the restart file")
    parser.add_argument("psi", type=Path, help="path of the psi file")
    parser.add_argument("--node", type=int, default=0, help="node to use")
    parser.add_argument("--dof",
                        type=int,
                        default=1,
                        help="degree of freedom to use")
    parser.add_argument(
        "-o",
        "--output",
        type=Path,
        default=Path.cwd() / "dmat.h5",
        help="output file",
    )
    parser.add_argument("--slice", type=str)

    group_diag = parser.add_mutually_exclusive_group()
    group_diag.add_argument(
        "--diagonalize",
        action="store_true",
        help="whether to diagonalize the one-body density matrix",
    )
    group_diag.add_argument(
        "--only-diagonalize",
        action="store_true",
        help="whether to only diagonalize the one-body density matrix",
    )

    parser.add_argument(
        "--only-eigenvalues",
        action="store_true",
        help="whether to only output the eigenvalues",
    )

    group_rep = parser.add_mutually_exclusive_group()
    group_rep.add_argument(
        "--spfrep",
        action="store_true",
        help="use spf representation",
    )
    group_rep.add_argument(
        "--gridrep",
        action="store_true",
        help="use grid representation",
    )

    args = parser.parse_args()
    if args.only_eigenvalues and not (args.diagonalize
                                      or args.only_diagonalize):
        parser.error(
            "--only-eigenvalues only applies when --diagonalize or --only-diagonalize is specified",
        )

    output = args.output.resolve()
    opr = args.operator.resolve()
    rst = args.restart.resolve()
    psi = args.psi.resolve()
    with tempfile.TemporaryDirectory() as tempdir:
        with WorkingDir(tempdir):
            copy_file(opr, "opr")
            copy_file(rst, "rst")

            if args.slice:
                tape, times, psi = read_psi_ascii(psi)

                m = RE_SLICE.match(args.slice)
                if not m:
                    raise RuntimeError(f'Invalid slice format "{args.slice}"')

                start = 0
                end = len(times)
                step = 1
                if m.group(1) != "":
                    start = int(m.group(1))
                if m.group(2) != "":
                    stop = int(m.group(2))
                try:
                    if m.group(3) != "":
                        step = int(m.group(3))
                except IndexError:
                    pass

                indices = [i for i in range(start, end, step)]
                write_psi_ascii("psi", (tape, times[indices], psi[indices]))
            else:
                copy_file(psi, "psi")

            gridrep = args.gridrep or ((not args.gridrep) and
                                       (not args.spfrep))

            cmd = [
                "qdtk_analysis.x",
                "-opr",
                "opr",
                "-rst",
                "rst",
                "-psi",
                "psi",
                "-dmat",
                "-nd",
                str(args.node),
                "-dof",
                str(args.dof),
            ]
            if not gridrep:
                cmd.append("-spfrep")
            if gridrep:
                cmd.append("-gridrep")
            if args.diagonalize:
                cmd.append("-diagonalize")
            if args.only_diagonalize:
                cmd += ["-diagonalize", "-onlydiag"]
            if args.only_eigenvalues:
                cmd.append("-onlyeigval")
            LOGGER.info("cmd: %s", " ".join(cmd))

            subprocess.run(cmd)

            with h5py.File(output, "a") as fptr:
                if not args.only_diagonalize:
                    if gridrep:
                        dmat.add_dmat_gridrep_to_hdf5(
                            fptr,
                            dmat.read_dmat_gridrep_ascii(
                                f"dmat_dof{args.dof}_grid"),
                        )
                    else:
                        dmat.add_dmat_spfrep_to_hdf5(
                            fptr,
                            *dmat.read_dmat_spfrep_ascii(
                                f"dmat_dof{args.dof}_spf"),
                        )

                if args.diagonalize or args.only_diagonalize:
                    dmat.add_dmat_evals_to_hdf5(
                        fptr,
                        *dmat.read_dmat_evals_ascii(
                            f"eval_dmat_dof{args.dof}"),
                    )
                    if not args.only_eigenvalues:
                        if gridrep:
                            dmat.add_dmat_evecs_grid_to_hdf5(
                                fptr,
                                *dmat.read_dmat_evecs_grid_ascii(
                                    f"evec_dmat_dof{args.dof}_grid", ),
                            )
                        else:
                            dmat.add_dmat_evecs_spf_to_hdf5(
                                fptr,
                                *dmat.read_dmat_evecs_spf_ascii(
                                    f"evec_dmat_dof{args.dof}_spf", ),
                            )
Example #25
0
def repickle_simulation(simulation_dir: str, bar_offset: Optional[int] = 0):
    if not Path(simulation_dir).exists():
        LOGGER.info(
            "simulation directory does not exist, simulation was not yet executed -> skipping",
        )
        return

    with WorkingDir(Path(simulation_dir).resolve()):
        if not Path("doit.json").exists():
            LOGGER.info(
                "doit.json does not exist, simulation was not yet executed -> skipping",
            )
            return

        with open("doit.json") as fptr:
            state = json.load(fptr)

        pickle_extensions = {
            ".mb_opr_pickle",
            ".prop_pickle",
            ".opr_pickle",
            ".wfn_pickle",
        }

        for task_name in tqdm.tqdm(
                state,
                position=bar_offset,
                leave=False,
                desc="tasks",
        ):
            if "deps:" not in state[task_name]:
                continue

            for dep in tqdm.tqdm(
                    state[task_name]["deps:"],
                    position=bar_offset + 1,
                    leave=False,
                    desc="dependencies",
            ):
                if Path(dep).suffix not in pickle_extensions:
                    continue

                with open(dep, "rb") as fptr:
                    obj = pickle.load(fptr)

                with open(dep, "wb") as fptr:
                    pickle.dump(obj, fptr, protocol=3)

                new_hash = subprocess.check_output(["md5sum",
                                                    dep]).decode().split()[0]
                old_mtime, old_size, old_hash = state[task_name][dep]
                stat = os.stat(dep)
                new_mtime = stat.st_mtime
                new_size = stat.st_size

                tqdm.auto.tqdm.write("\t" + dep + ":")
                tqdm.auto.tqdm.write(f"\t\tsize:  {old_size} -> {new_size}")
                tqdm.auto.tqdm.write(f"\t\tmtime: {old_mtime} -> {new_mtime}")
                tqdm.auto.tqdm.write(f"\t\tmd5:   {old_hash} -> {new_hash}")

                state[task_name][dep] = [new_mtime, new_size, new_hash]

        with open("doit.json", "w") as fptr:
            json.dump(state, fptr)