示例#1
0
def test_concatMap():
    """
    concatMap f == concat (map f)
    """
    f = lambda x: [x * 2]
    xs = [1, 2]
    assert concatMap(f, xs) == list(concat(map(f, xs)))
示例#2
0
def test_concatMap():
    """
    concatMap f == concat (map f)
    """
    f = lambda x: [x * 2]
    xs = [1, 2]
    assert concatMap(f, xs) == list(concat(map(f, xs)))
示例#3
0
def readTurbomoleBasis(path):
    """Read Turbomole basis set"""
    bss = topParseB.parseFile(path)
    atoms = [xs.atomLabel.lower() for xs in bss]
    names = concat([xs.basisName.upper().split() for xs in bss])
    formats = [xs.format[:] for xs in bss]
    formats_int = map(lambda fss: [[int(x) for x in xs]
                                   for xs in fss], formats)
    rss = [rs.coeffs[:] for rs in bss]
    rawData = [[x.contractions[:] for x in rss[i]] for i in range(len(rss))]
    fst = lambda xs: xs[0]
    snd = lambda xs: xs[1]
    expos = list(map(mapFloat, [concatMap(fst, swapCoeff(2, rawData[i]))
                                for i in range(len(rawData))]))
    coeffs = list(map(mapFloat, [concatMap(snd, swapCoeff(2, rawData[i]))
                                 for i in range(len(rawData))]))
    basisData = zipWith(AtomBasisData)(expos)(coeffs)
    basiskey = zipWith3(AtomBasisKey)(atoms)(names)(formats_int)

    return basiskey, basisData
示例#4
0
        def go(t1, t2):
            """
            Cp2k Basis have exactly the same number of contracts
            for all angular quantum number (i.e S, P, D, Z) and all
            the same exponent. Using this fact the contracts representation
            is built sharing the exponents between all the contracts
            """
            index, acc = t1
            n, l = t2
            xss = css[index:n + index]
            rss = concatMap(expandBasis_cp2k(l, es), xss)

            return (index + n, acc + rss)
示例#5
0
    def get_property(self, prop):
        """
        Look for the optional arguments to parse a property, which are stored
        in the properties dictionary.
        """
        # Read the JSON dictionary than contains the parsers names
        ds = self.prop_dict[prop]

        # extension of the output file containing the property value
        file_ext = ds['file_ext']

        # If there is not work_dir returns None
        work_dir = self.archive.get('work_dir')

        # Plams dir
        plams_dir = self.archive['plams_dir'].path

        # Search for the specified output file in the folders
        file_pattern = ds.get('file_pattern')
        if file_pattern is None:
            file_pattern = '{}*.{}'.format(self.job_name, file_ext)

        output_files = concatMap(partial(find_file_pattern, file_pattern),
                                 [plams_dir, work_dir])
        if output_files:
            file_out = output_files[0]
            fun = getattr(import_parser(ds), ds['function'])
            # Read the keywords arguments from the properties dictionary
            kwargs = ds.get('kwargs') if ds.get('kwargs') is not None else {}
            kwargs['plams_dir'] = plams_dir
            return ignored_unused_kwargs(fun, [file_out], kwargs)
        else:
            msg = """
            Property {} not found. No output file called: {}. Folder used:
            plams_dir = {}\n
            work_dir {}\n
            """.format(prop, file_pattern, plams_dir, work_dir)
            raise FileNotFoundError(msg)