Пример #1
0
def generate_input_file(molecule,
                        hamiltonian,
                        basis,
                        charge=fixed_defaults['charge'],
                        forces=fixed_defaults['forces'],
                        title=fixed_defaults['title'],
                        multiplicity=fixed_defaults['multiplicity'],
                        wfn_symmetry=fixed_defaults['wfn_symmetry'],
                        mem_per_proc=None):
    """Generate a molpro input file.

    Args:
        molecule (chemcoord.Cartesian or chemcoord.Zmat or str):
            If it is a string, it has to be a valid xyz-file.
        hamiltonian (str): {hamiltonian}
        basis (str): {basis}
        charge (int): {charge}
        forces (bool): {forces}
        title (str): {title}
        multiplicity (int): {multiplicity}
        wfn_symmetry (int): {wfn_symmetry}
        mem_per_proc (str): {mem_per_proc}


    Returns:
        str : Molpro input.
    """
    if isinstance(molecule, str):
        molecule = molecule.read_xyz(StringIO(molecule))
    elif isinstance(molecule, cc.Zmat):
        molecule = molecule.get_cartesian()
    if mem_per_proc is None:
        mem_per_proc = conf_defaults['mem_per_proc']

    get_output = """\
*** {title}
memory, {memory}

basis, {basis_str}

geometry = {{
{geometry}
}}

{hamiltonian_str}
{forces}
---
""".format

    hamiltonian_str = _get_hamiltonian_str(
        hamiltonian, molecule.get_electron_number(charge), wfn_symmetry,
        multiplicity)

    out = get_output(title=title,
                     basis_str=basis,
                     geometry=molecule.to_xyz(sort_index=False),
                     hamiltonian_str=hamiltonian_str,
                     forces='forces' if forces else '',
                     memory=_get_molpro_mem(DataSize(mem_per_proc)))
    return out
Пример #2
0
def list_datasets(basedir, db, index):
    """List local store content."""
    # Read the index of given.
    loader = DictLoader(
        util.read_index(index)) if index is not None else UrlLoader()
    store = RefStore(basedir=basedir, loader=loader, connect_url=db)
    datasets = store.list()
    headers = ['Name', 'Size', 'Downloaded', 'Package']
    data = list()
    # Maintain the maximum with for each columns.
    widths = [len(h) + 1 for h in headers]
    # Sort datasets by name before output.
    for dataset in sorted(datasets, key=lambda d: d.name):
        row = [
            dataset.identifier, '{:.2a}'.format(DataSize(dataset.filesize)),
            ' '.join(dataset.created_at.isoformat()[:19].split('T')),
            '{} {}'.format(dataset.package_name, dataset.package_version)
        ]
        for i in range(len(row)):
            w = len(row[i]) + 1
            if w > widths[i]:
                widths[i] = w
        data.append(row)
    tp.table(data,
             headers=headers,
             width=widths,
             style='grid',
             out=util.TPrinter())
Пример #3
0
    def __init__(self, framework, sn=0):
        self.sn = sn
        self.framework = framework
        self.framework_id = framework.id
        self.name = framework.name
        self.url = framework.url
        self.memory = "{:.2GiB}".format(DataSize(framework.memory_str))
        self.cpus = framework.cpus

        self.tasks = framework.tasks
        self.tasks_len = framework.tasks_len
        self.uptime = framework.uptime
        self.uptime_descriptive = framework.uptime_descriptive

        if self.sn is None:
            sn_text = '-'
            sn_align = 'center'
            self.sn = '-'
        else:
            sn_align = 'left'
            sn_text = '%s:' % self.sn

        if self.cpus is None:
            self.cpus = "-"

        if self.memory is None:
            self.memory = "-"

        if int(self.tasks_len) < 1 or int(self.cpus) < 1:
            framework_color = 'bad_name'
        else:
            framework_color = 'name'

        self.item = [
            ('fixed', 4,
             urwid.Padding(
                 urwid.AttrMap(urwid.Text(sn_text, align=sn_align),
                               framework_color, 'focus'))),
            urwid.AttrMap(urwid.Text(self.name), framework_color, 'focus'),
            ('fixed', 19,
             urwid.Padding(
                 urwid.AttrMap(urwid.Text(self.uptime, align="right"),
                               framework_color, 'focus'))),
            ('fixed', 28,
             urwid.Padding(
                 urwid.AttrMap(
                     urwid.Text(self.uptime_descriptive, align="right"),
                     framework_color, 'focus'))),
            ('fixed', 10,
             urwid.Padding(
                 urwid.AttrMap(urwid.Text(self.memory, align="right"),
                               framework_color, 'focus'))),
            ('fixed', 5,
             urwid.Padding(
                 urwid.AttrMap(urwid.Text(str(self.cpus), align="right"),
                               framework_color, 'focus'))),
            ('fixed', 4,
             urwid.Padding(
                 urwid.AttrMap(urwid.Text(self.tasks_len, align="right"),
                               framework_color, 'focus'))),
            ('fixed', 20, self.get_tasks_str(self.tasks))
        ]

        w = urwid.Columns(self.item, focus_column=1, dividechars=1)
        self.__super.__init__(w)
Пример #4
0
def calculate(molecule,
              hamiltonian,
              basis,
              molcas_exe=None,
              el_calc_input=None,
              sym_group=None,
              charge=fixed_defaults['charge'],
              forces=fixed_defaults['forces'],
              title=fixed_defaults['title'],
              multiplicity=fixed_defaults['multiplicity'],
              start_orb=None,
              num_procs=None,
              mem_per_proc=None):
    """Calculate the energy of a molecule using Molcas.

    Args:
        el_calc_input (str): {el_calc_input}
        molecule (chemcoord.Cartesian or chemcoord.Zmat or str):
            If it is a string, it has to be a valid xyz-file.
        hamiltonian (str): {hamiltonian}
            But 'CCSD' and 'CCSD(T)' are not yet implemented.
        basis (str): {basis}
        molcas_exe (str): {molcas_exe}
        charge (int): {charge}
        forces (bool): {forces}
        title (str): {title}
        multiplicity (int): {multiplicity}
        start_orb (str): {start_orb}
        num_procs (int): {num_procs}
        mem_per_proc (str): {mem_per_proc}

    Returns:
        dict: A dictionary with at least the keys
        ``'structure'`` and ``'energy'`` which contains the energy in Hartree.
        If forces were calculated, the key ``'gradient'`` contains the
        gradient in Hartree / Angstrom.
    """
    if molcas_exe is None:
        molcas_exe = conf_defaults['molcas_exe']
    if num_procs is None:
        num_procs = conf_defaults['num_procs']
    if mem_per_proc is None:
        mem_per_proc = conf_defaults['mem_per_proc']
    if __name__ == '__main__' and el_calc_input is None:
        raise ValueError('el_calc_input has to be provided when executing '
                         'from an interactive session.')
    if el_calc_input is None:
        el_calc_input = '{}.inp'.format(splitext(inspect.stack()[-1][1])[0])

    input_str = generate_input_file(
        molecule=molecule,
        hamiltonian=hamiltonian,
        basis=basis,
        charge=charge,
        el_calc_input=el_calc_input,
        forces=forces,
        sym_group=sym_group,
        title=title,
        multiplicity=multiplicity,
        start_orb=start_orb,
    )

    output_path = '{}.log'.format(splitext(el_calc_input)[0])
    if dirname(el_calc_input):
        makedirs(dirname(el_calc_input), exist_ok=True)
    with open(el_calc_input, 'w') as f:
        f.write(input_str)

    my_env = os.environ.copy()
    my_env['MOLCAS_NPROCS'] = str(num_procs)
    my_env['MOLCAS_MEM'] = str(DataSize(mem_per_proc) / 1e6)
    with cd(dirname(el_calc_input) if dirname(el_calc_input) else '.'):
        run([molcas_exe, '-f', basename(el_calc_input)],
            env=my_env,
            stdout=subprocess.PIPE)
    return parse_output(output_path)
Пример #5
0
 def size(self):
     return DataSize(str(self.size_in_bytes))
def getLatestRelease(github_config_file: str, **kwargs) -> dict:

    github_config_file_type: str = kwargs.get('github_config_file_type', None)
    query_only: bool = kwargs.get('query_only', False)
    write_release_info: bool = kwargs.get('write_release_info', True)

    gitConfig = Config(file_path=github_config_file,
                       file_type=github_config_file_type)

    ic_set(gitConfig.get_bool('APP/DEBUG'))

    ic("debug ON (\'APP/DEBUG\')")
    ic(f'Configuration File      : {Path(github_config_file).resolve()}')
    if github_config_file_type:
        ic(f'Configuration File Type : {github_config_file_type}')

    repo = gitRepo()
    repo.id = gitConfig.get(keys='GITHUB/REPO_ID', raiseNDF=False)
    repo.owner = gitConfig.get(keys='GITHUB/REPO_OWNER', raiseNDF=False)
    repo.name = gitConfig.get(keys='GITHUB/REPO_NAME', raiseNDF=False)
    assert (
        (repo.owner is None and repo.name is None and repo.id is not None
         and isinstance(repo.id, int)) or
        (repo.owner is not None and repo.name is not None and repo.id is None
         and isinstance(repo.owner, str) and isinstance(repo.name, str))
    ), f"((REPO_OWNER \"{repo.owner}\" AND REPO_NAME \"{repo.name}\") XOR (REPO_ID \"{repo.id}\")) must be supplied as parameters in \'{github_config_file}\'"

    git = gitConnect(key=gitConfig.get('GITHUB/KEY'))

    if repo.id:
        thisRepo = getRepo(g=git, id=repo.id)
        repo.owner = thisRepo.owner.login
        repo.name = thisRepo.name
    else:
        thisRepo = getRepo(g=git, repoOwner=repo.owner, repoName=repo.name)
        repo.id = thisRepo.id

    repo.fullName = thisRepo.full_name
    repo.url = thisRepo.html_url
    repo.private = thisRepo.private

    ic(repo)

    releases = [r for r in thisRepo.get_releases()]
    allowPrelease = gitConfig.get_bool(['APP', 'ALLOW_PRERELEASE'])
    keepArchive = gitConfig.get_bool(['APP', 'KEEP_ARCHIVE'])
    filteredReleases = releases if allowPrelease else [
        r for r in releases if r.prerelease == False
    ]

    basePath = gitConfig.get(keys=['APP', 'OUTPUT_FOLDER'], raiseNDF=False)
    if not basePath:
        basePath = '.'
        gitConfig.set(keys=['APP', 'OUTPUT_FOLDER'], value=basePath)
    gitConfig.save()

    if ((not query_only) or write_release_info):
        basePath = Path(basePath).resolve()
        ic(f"Output Path : {basePath}")

        Path(basePath).mkdir(parents=True, exist_ok=True)

    try:
        ic(f"Release count          = {len(releases)}")
        if not len(releases) == len(filteredReleases):
            ic(f"Filtered Release count = {len(filteredReleases)}")

        if len(filteredReleases) == 0:
            release = {
                "message":
                f"No releases found{'' if allowPrelease else ' (PreReleases are not permitted)'}"
            }
            raise Warning(release["message"])
        else:
            # ic (f"{len(releases)} release{', using this one' if len(releases) == 1 else 's, using newest'}")

            release = releaseToDict(filteredReleases[0])
            release[
                "message"] = f"Release with tag \'{filteredReleases[0].tag_name}\' retrieved"
            if filteredReleases[0].prerelease:
                release["message"] += " (this is a PreRelease)"

            if not query_only:
                release["downloaded_at"] = datetime.utcnow().strftime(DTMask)

                with urlopen(release['zip_url']) as zipresp:

                    theBytes = zipresp.read()

                    if keepArchive:
                        releaseFolder = basePath.joinpath(
                            'releases', repo.owner, repo.name)
                        Path(releaseFolder).mkdir(parents=True, exist_ok=True)
                        release["zip_file_name"] = str(
                            releaseFolder.joinpath(
                                f"{release['tag_name']}.zip"))
                        with open(release["zip_file_name"], 'wb') as f:
                            f.write(theBytes)

                        def getDT(dateStr: str) -> datetime:
                            try:
                                retval = datetime.strptime(dateStr, DTMask)
                            except ValueError:
                                retval = datetime.strptime(
                                    dateStr, DTMask[:-2])
                            try:
                                retval = retval.astimezone(tz=None)
                            except:
                                ...
                            return retval

                        setFileDateTimes(
                            filePath=release["zip_file_name"],
                            datetimes=dateTruple(
                                created=getDT(release['created_at']),
                                modified=getDT(release['published_at']),
                                accessed=datetime.now()))

                    else:
                        release["zip_file_name"] = None

                release["zip_file_size"] = DataSize(
                    f"{len(theBytes)}B").__format__('m')
                release["zip_file_bytes"] = len(theBytes)

                with ZipFile(BytesIO(theBytes)) as zfile:
                    release['files'] = []
                    for f in zfile.filelist:
                        thisPath = basePath
                        origPath = Path(f.filename)

                        for p in list(origPath.parts[1:]):
                            thisPath = thisPath.joinpath(p)

                        if origPath.parts[-1] == '.gitignore':
                            # Ignore these files
                            ...
                        elif f.is_dir():
                            # Creating folder
                            Path(thisPath).mkdir(parents=True, exist_ok=True)
                        else:
                            fileTime = datetime(*f.date_time)
                            d = dict()
                            d["name"] = str(thisPath)
                            d["size"] = DataSize(f"{f.file_size}B").__format__(
                                'm')
                            d["bytes"] = f.file_size
                            d["date"] = fileTime.strftime(DTMask)

                            release['files'].append(d)

                            with open(thisPath, "wb") as thisFile:
                                thisFile.write(zfile.read(f))
                            setFileDateTimes(filePath=thisPath,
                                             datetimes=dateTruple(
                                                 created=fileTime,
                                                 modified=fileTime,
                                                 accessed=fileTime))
    except Warning as w:
        pass

    finally:
        ic(repo.fullName)
        if "message" in release and release["message"]:
            ic(release["message"])

        releaseDict = {
            "config": gitConfig.get('APP'),
            "repo": dict(repo),
            "release": release,
            "allReleases": releasesToDictList(releases)
        }

        if write_release_info:
            # write data out from what we've got here. use the Config object to write it
            yamlDump = Config(file_path=basePath.joinpath(
                f"{repo.owner}.{repo.name}.release.yaml"),
                              raiseFileNotFound=False,
                              config=releaseDict)
            yamlDump.set('config/OUTPUT_FOLDER', str(basePath))
            yamlDump.save()
            yamlDump = None

    return releaseDict
Пример #7
0
    def print_results(self, url, framework_id):

        self.print_cmd_params(self.args)

        metrics = self.get_json_by_rest(self.metrics_url)
        mesos_metrics = MesosMetrics()
        mesos_metrics.parse(metrics)
        mesos_metrics.print_metrics()

        data = self.get_json_by_rest(url, framework_id)
        frameworks = data["frameworks"]

        if self.app_name and len(frameworks) != 1:
            self.framework_id = self.get_framework_id(self.frameworks_url,
                                                      self.app_name,
                                                      self.app_name_regex)
        table = []
        if self.app_name:
            print("Watching framework: ", self.app_name)
        print("Fetching frameworks from: ", self.frameworks_url, "Total: ",
              len(frameworks))
        print("Fetching metrics from: ", self.metrics_url)
        self.print_legend()

        for framework in frameworks:
            resources = framework["resources"]
            name = framework["name"]
            if name == "marathon" or name == "chronos":
                continue
            memory = str(int(resources["mem"])) + "MB"
            cpus = int(resources["cpus"])
            url = framework["webui_url"]
            tasks = framework["tasks"]
            tasks_len = str(len(tasks))
            ts_epoch = int(framework["registered_time"])
            now = time.time()
            diff = (now - ts_epoch)
            uptime = datetime.datetime.fromtimestamp(ts_epoch).strftime(
                '%Y-%m-%d %H:%M:%S')
            uptime_descriptive = str(datetime.timedelta(seconds=diff))
            if cpus < 1:
                color = Fore.RED
            else:
                color = Fore.GREEN

            table.append([
                color + name,
                Fore.MAGENTA + "{:.2GiB}".format(DataSize(memory)),
                Fore.CYAN + str(cpus), Fore.YELLOW + tasks_len,
                Fore.GREEN + uptime, Fore.GREEN + uptime_descriptive,
                Fore.WHITE + url, Fore.GREEN + self.get_tasks_str(tasks)
            ])

        print(
            tabulate(sorted(table, key=lambda x: x[0]),
                     headers=[
                         's/n', 'framework', 'memory', '#cpu', '#tasks',
                         'up_since', 'uptime', 'url', 'tasks'
                     ],
                     tablefmt="rst",
                     showindex="always"))