def test_jsonio_atoms(): def assert_equal(atoms1, atoms2): assert atoms1 == atoms2 assert set(atoms1.arrays) == set(atoms2.arrays) for name in atoms1.arrays: assert np.array_equal(atoms1.arrays[name], atoms2.arrays[name]), name atoms = bulk('Ti') print('atoms', atoms) txt = encode(atoms) print('encoded', txt) atoms1 = decode(txt) print('decoded', atoms1) txt1 = encode(atoms1) assert txt == txt1 assert_equal(atoms, atoms1) BeH = molecule('BeH') assert BeH.has('initial_magmoms') new_BeH = decode(encode(BeH)) assert_equal(BeH, new_BeH) assert new_BeH.has('initial_magmoms') from ase.constraints import FixAtoms atoms = bulk('Ti') atoms.constraints = FixAtoms(indices=[0]) newatoms = decode(encode(atoms)) c1 = atoms.constraints c2 = newatoms.constraints assert len(c1) == len(c2) == 1 # Can we check constraint equality somehow? # Would make sense for FixAtoms assert np.array_equal(c1[0].index, c2[0].index)
def test_jsonio(): """Test serialization of ndarrays and other stuff.""" from datetime import datetime import numpy as np import io from ase.io.jsonio import encode, decode, read_json, write_json assert decode(encode(np.int64(42))) == 42 c = np.array([0.1j]) assert (decode(encode(c)) == c).all() fd = io.StringIO() obj1 = {'hello': 'world'} write_json(fd, obj1) fd.seek(0) obj2 = read_json(fd) print(obj1) print(obj2) for obj in [0.5 + 1.5j, datetime.now()]: s = encode(obj) o = decode(s) print(obj) print(s) print(obj) assert obj == o, (obj, o, s)
def _old2new(self, values): if self.version == 4: return values # should be ok for reading by convert.py script if len(values) == 26: extra = decode(values[25]) return values[:-1] + (encode(extra['key_value_pairs']), encode(extra['data'])) elif len(values) == 29: keywords = decode(values[-4]) kvp = decode(values[-3]) kvp.update(dict((keyword, 1) for keyword in keywords)) return values[:-4] + (encode(kvp),) + values[-2:] assert False
def _convert_tuple_to_row(self, values): values = self._old2new(values) dct = { 'id': values[0], 'unique_id': values[1], 'ctime': values[2], 'mtime': values[3], 'user': values[4], 'numbers': deblob(values[5], np.int32), 'positions': deblob(values[6], shape=(-1, 3)), 'cell': deblob(values[7], shape=(3, 3)), 'pbc': (values[8] & np.array([1, 2, 4])).astype(bool) } if values[9] is not None: dct['initial_magmoms'] = deblob(values[9]) if values[10] is not None: dct['initial_charges'] = deblob(values[10]) if values[11] is not None: dct['masses'] = deblob(values[11]) if values[12] is not None: dct['tags'] = deblob(values[12], np.int32) if values[13] is not None: dct['momenta'] = deblob(values[13], shape=(-1, 3)) if values[14] is not None: dct['constraints'] = values[14] if values[15] is not None: dct['calculator'] = values[15] dct['calculator_parameters'] = decode(values[16]) if values[17] is not None: dct['energy'] = values[17] if values[18] is not None: dct['free_energy'] = values[18] if values[19] is not None: dct['forces'] = deblob(values[19], shape=(-1, 3)) if values[20] is not None: dct['stress'] = deblob(values[20]) if values[21] is not None: dct['dipole'] = deblob(values[21]) if values[22] is not None: dct['magmoms'] = deblob(values[22]) if values[23] is not None: dct['magmom'] = values[23] if values[24] is not None: dct['charges'] = deblob(values[24]) if values[25] != '{}': dct['key_value_pairs'] = decode(values[25]) if len(values) >= 27 and values[26] != 'null': dct['data'] = values[26] return AtomsRow(dct)
def is_bundle(filename, allowempty=False): """Check if a filename exists and is a BundleTrajectory. If allowempty=True, an empty folder is regarded as an empty BundleTrajectory.""" if not os.path.isdir(filename): return False if allowempty and not os.listdir(filename): return True # An empty BundleTrajectory metaname = os.path.join(filename, 'metadata.json') if os.path.isfile(metaname): f = open(metaname, 'r') mdata = jsonio.decode(f.read()) f.close() else: metaname = os.path.join(filename, 'metadata') if os.path.isfile(metaname): f = open(metaname, 'rb') mdata = pickle.load(f) f.close() else: return False try: return mdata['format'] == 'BundleTrajectory' except KeyError: return False
def data(self): """Data dict.""" if self._data is None: raise AttributeError if not isinstance(self._data, dict): self._data = decode(self._data) # lazy decoding return FancyDict(self._data)
def _read_json(self): if isinstance(self.filename, basestring): with open(self.filename) as fd: bigdct = decode(fd.read()) else: bigdct = decode(self.filename.read()) if self.filename is not sys.stdin: self.filename.seek(0) ids = bigdct.get('ids') if ids is None: # Allow for missing "ids" and "nextid": assert 1 in bigdct return bigdct, [1], 2 if not isinstance(ids, list): ids = ids.tolist() return bigdct, ids, bigdct['nextid']
def get_atoms(self) -> Atoms: text = self.get_text() atoms = decode(text) if not isinstance(atoms, Atoms): typename = type(atoms).__name__ raise ValueError(f'Cannot convert {typename} to Atoms') return atoms
def __getitem__(self, i=-1): b = self.backend[i] atoms = Atoms(positions=b.positions, numbers=self.numbers, cell=b.cell, masses=self.masses, pbc=self.pbc, celldisp=self.celldisp, info=b.get('info'), constraint=[dict2constraint(d) for d in decode(self.constraints)], momenta=b.get('momenta'), magmoms=b.get('magmoms'), charges=b.get('charges'), tags=b.get('tags')) atoms._readTags(self.new_tags) if 'calculator' in b: results = {} c = b.calculator for prop in all_properties: if prop in c: results[prop] = c.get(prop) calc = SinglePointCalculator(atoms, **results) calc.name = b.calculator.name atoms.set_calculator(calc) return atoms
def test_jsonio_atoms_info(): atoms_ref = bulk('Ti') atoms_ref.info['any_name_for_a_dictionary'] = {0: 'anything'} text = encode(atoms_ref) atoms = decode(text) key = next(iter(atoms.info['any_name_for_a_dictionary'])) assert isinstance(key, int)
def check(obj): txt = encode(obj) newobj = decode(txt, always_array=False) print(obj, '-->', newobj) assert type(obj) is type(newobj), '{} vs {}'.format( type(obj), type(newobj)) assert np.shape(obj) == np.shape(newobj) assert np.array_equal(obj, newobj)
def data(self): """Data dict.""" if isinstance(self._data, str): self._data = decode(self._data) # lazy decoding elif isinstance(self._data, bytes): from ase.db.core import bytes_to_object self._data = bytes_to_object(self._data) # lazy decoding return FancyDict(self._data)
def read(cls, filename): """Read from json file.""" with open(filename, 'r') as f: bs = decode(f.read()) # Handle older BS files without __ase_objtype__: if not isinstance(bs, cls): return cls(**bs) return bs
def Json2Atoms(jsonstring): """Read a JSON string and return an Atoms object""" from ase.io.jsonio import decode from ase.db.row import AtomsRow dct = decode(jsonstring) row = AtomsRow(dct) return row.toatoms(attach_calculator=False, add_additional_information=True)
def _read_metadata(self): """Read the metadata.""" assert self.state == 'read' metafile = os.path.join(self.filename, 'metadata.json') if os.path.exists(metafile): f = open(metafile, 'r') metadata = jsonio.decode(f.read()) else: metafile = os.path.join(self.filename, 'metadata') f = open(metafile, 'rb') metadata = pickle.load(f) f.close() return metadata
def make_atoms_from_doc(doc): """ This is the inversion function for `make_doc_from_atoms`; it takes Mongo documents created by that function and turns them back into an ase.Atoms object. Args: doc Dictionary/json/Mongo document created by the `make_doc_from_atoms` function. Returns: atoms ase.Atoms object with an ase.SinglePointCalculator attached """ atoms = Atoms( [ Atom( atom["symbol"], decode(json.dumps(atom["position"])), tag=atom["tag"], momentum=decode(json.dumps(atom["momentum"])), magmom=atom["magmom"], charge=atom["charge"], ) for atom in doc["atoms"]["atoms"] ], cell=decode(json.dumps(doc["atoms"]["cell"])), pbc=doc["atoms"]["pbc"], info=doc["atoms"]["info"], constraint=[ dict2constraint(constraint_dict) for constraint_dict in doc["atoms"]["constraints"] ], ) results = doc["results"] calc = SinglePointCalculator( energy=results.get("energy", None), forces=results.get("forces", None), stress=results.get("stress", None), atoms=atoms, ) atoms.set_calculator(calc) return atoms
def _read_json(self): if isinstance(self.filename, str): with open(self.filename) as fd: bigdct = decode(fd.read()) else: bigdct = decode(self.filename.read()) if self.filename is not sys.stdin: self.filename.seek(0) if not isinstance(bigdct, dict) or not ('ids' in bigdct or 1 in bigdct): from ase.io.formats import UnknownFileTypeError raise UnknownFileTypeError('Does not resemble ASE JSON database') ids = bigdct.get('ids') if ids is None: # Allow for missing "ids" and "nextid": assert 1 in bigdct return bigdct, [1], 2 if not isinstance(ids, list): ids = ids.tolist() return bigdct, ids, bigdct['nextid']
def constraints(self): """List of constraints.""" if not isinstance(self._constraints, list): # Lazy decoding: cs = decode(self._constraints) self._constraints = [] for c in cs: # Convert to new format: name = c.pop('__name__', None) if name: c = {'name': name, 'kwargs': c} if c['name'].startswith('ase'): c['name'] = c['name'].rsplit('.', 1)[1] self._constraints.append(c) return [dict2constraint(d) for d in self._constraints]
def _convert_tuple_to_row(self, values): values = self._old2new(values) dct = {'id': values[0], 'unique_id': values[1], 'ctime': values[2], 'mtime': values[3], 'user': values[4], 'numbers': deblob(values[5], np.int32), 'positions': deblob(values[6], shape=(-1, 3)), 'cell': deblob(values[7], shape=(3, 3)), 'pbc': (values[8] & np.array([1, 2, 4])).astype(bool)} if values[9] is not None: dct['initial_magmoms'] = deblob(values[9]) if values[10] is not None: dct['initial_charges'] = deblob(values[10]) if values[11] is not None: dct['masses'] = deblob(values[11]) if values[12] is not None: dct['tags'] = deblob(values[12], np.int32) if values[13] is not None: dct['momenta'] = deblob(values[13], shape=(-1, 3)) if values[14] is not None: dct['constraints'] = values[14] if values[15] is not None: dct['calculator'] = values[15] dct['calculator_parameters'] = values[16] if values[17] is not None: dct['energy'] = values[17] if values[18] is not None: dct['free_energy'] = values[18] if values[19] is not None: dct['forces'] = deblob(values[19], shape=(-1, 3)) if values[20] is not None: dct['stress'] = deblob(values[20]) if values[21] is not None: dct['dipole'] = deblob(values[21]) if values[22] is not None: dct['magmoms'] = deblob(values[22]) if values[23] is not None: dct['magmom'] = values[23] if values[24] is not None: dct['charges'] = deblob(values[24]) if values[25] != '{}': dct['key_value_pairs'] = decode(values[25]) if values[26] != 'null': dct['data'] = values[26] return AtomsRow(dct)
def db_read_data(dbname, row_id): #-# import sqlite3 from ase.io.jsonio import encode, decode from ase import Atom, Atoms db = sqlite3.connect(dbname) cursor = db.cursor() # Fetch "data" from the database cursor.execute("SELECT data FROM systems WHERE id=?", row_id) row = cursor.fetchone() data = decode(row[0]) db.commit() db.close() return data
def is_bundle(filename, allowempty=False): """Check if a filename exists and is a BundleTrajectory. If allowempty=True, an empty folder is regarded as an empty BundleTrajectory.""" filename = Path(filename) if not filename.is_dir(): return False if allowempty and not os.listdir(filename): return True # An empty BundleTrajectory metaname = filename / 'metadata.json' if metaname.is_file(): mdata = jsonio.decode(metaname.read_text()) else: return False try: return mdata['format'] == 'BundleTrajectory' except KeyError: return False
def __init__(self, dct): if isinstance(dct, dict): dct = dct.copy() if 'calculator_parameters' in dct: # Earlier version of ASE would encode the calculator # parameter dict again and again and again ... while isinstance(dct['calculator_parameters'], basestring): dct['calculator_parameters'] = decode( dct['calculator_parameters']) else: dct = atoms2dict(dct) self._constraints = dct.pop('constraints', []) self._data = dct.pop('data', None) kvp = dct.pop('key_value_pairs', {}) self._keys = list(kvp.keys()) self.__dict__.update(kvp) self.__dict__.update(dct) if 'cell' not in dct: self.cell = np.zeros((3, 3)) self.pbc = np.zeros(3, bool)
def read_atoms(backend, header: Tuple = None, traj: TrajectoryReader = None, _try_except: bool = True) -> Atoms: if _try_except: try: return read_atoms(backend, header, traj, False) except Exception as ex: from distutils.version import LooseVersion if (traj is not None and LooseVersion(__version__) < traj.ase_version): msg = ('You are trying to read a trajectory file written ' + 'with ASE-{v1} from ASE-{v2}. ' + 'It might help to update your ASE').format( v1=traj.ase_version, v2=__version__) raise VersionTooOldError(msg) from ex else: raise b = backend if header: pbc, numbers, masses, constraints = header else: pbc = b.pbc numbers = b.numbers masses = b.get('masses') constraints = b.get('constraints', '[]') atoms = Atoms(positions=b.positions, numbers=numbers, cell=b.cell, masses=masses, pbc=pbc, info=b.get('info'), constraint=[dict2constraint(d) for d in decode(constraints)], momenta=b.get('momenta'), magmoms=b.get('magmoms'), charges=b.get('charges'), tags=b.get('tags')) return atoms
def is_bundle(filename): """Check if a filename exists and is a BundleTrajectory.""" if not os.path.isdir(filename): return False metaname = os.path.join(filename, 'metadata.json') if os.path.isfile(metaname): f = open(metaname, 'r') mdata = jsonio.decode(f.read()) f.close() else: metaname = os.path.join(filename, 'metadata') if os.path.isfile(metaname): f = open(metaname, 'rb') mdata = pickle.load(f) f.close() else: return False try: return mdata['format'] == 'BundleTrajectory' except KeyError: return False
def __init__(self, dct): if isinstance(dct, dict): dct = dct.copy() if 'calculator_parameters' in dct: # Earlier version of ASE would encode the calculator # parameter dict again and again and again ... while isinstance(dct['calculator_parameters'], basestring): dct['calculator_parameters'] = decode( dct['calculator_parameters']) else: dct = atoms2dict(dct) assert 'numbers' in dct self._constraints = dct.pop('constraints', []) self._data = dct.pop('data', None) kvp = dct.pop('key_value_pairs', {}) self._keys = list(kvp.keys()) self.__dict__.update(kvp) self.__dict__.update(dct) if 'cell' not in dct: self.cell = np.zeros((3, 3)) self.pbc = np.zeros(3, bool)
def read_atoms(backend, header=None): b = backend if header: pbc, numbers, masses, constraints = header else: pbc = b.pbc numbers = b.numbers masses = b.get('masses') constraints = b.get('constraints', '[]') atoms = Atoms(positions=b.positions, numbers=numbers, cell=b.cell, masses=masses, pbc=pbc, info=b.get('info'), constraint=[dict2constraint(d) for d in decode(constraints)], momenta=b.get('momenta'), magmoms=b.get('magmoms'), charges=b.get('charges'), tags=b.get('tags')) return atoms
def toatoms(self, attach_calculator=False, add_additional_information=False): """Create Atoms object.""" atoms = Atoms(self.numbers, self.positions, cell=self.cell, pbc=self.pbc, magmoms=self.get('initial_magmoms'), charges=self.get('initial_charges'), tags=self.get('tags'), masses=self.get('masses'), momenta=self.get('momenta'), constraint=self.constraints) if attach_calculator: params = decode(self.get('calculator_parameters', '{}')) atoms.calc = get_calculator(self.calculator)(**params) else: results = {} for prop in all_properties: if prop in self: results[prop] = self[prop] if results: atoms.calc = SinglePointCalculator(atoms, **results) atoms.calc.name = self.calculator if add_additional_information: atoms.info = {} atoms.info['unique_id'] = self.unique_id if self._keys: atoms.info['key_value_pairs'] = self.key_value_pairs data = self.get('data') if data: atoms.info['data'] = data return atoms
def __getitem__(self, i=-1): b = self.backend[i] atoms = Atoms( positions=b.positions, numbers=self.numbers, cell=b.cell, masses=self.masses, pbc=self.pbc, info=b.get('info'), constraint=[dict2constraint(d) for d in decode(self.constraints)], momenta=b.get('momenta'), magmoms=b.get('magmoms'), charges=b.get('charges'), tags=b.get('tags')) if 'calculator' in b: results = {} c = b.calculator for prop in all_properties: if prop in c: results[prop] = c.get(prop) calc = SinglePointCalculator(atoms, **results) calc.name = b.calculator.name atoms.set_calculator(calc) return atoms
def print_bundletrajectory_info(filename): """Prints information about a BundleTrajectory. Mainly intended to be called from a command line tool. """ if not BundleTrajectory.is_bundle(filename): raise ValueError('Not a BundleTrajectory!') if BundleTrajectory.is_empty_bundle(filename): print(filename, 'is an empty BundleTrajectory.') return # Read the metadata fn = os.path.join(filename, 'metadata.json') if os.path.exists(fn): f = open(fn, 'r') metadata = jsonio.decode(f.read()) else: fn = os.path.join(filename, 'metadata') f = open(fn, 'rb') metadata = pickle.load(f) f.close() print('Metadata information of BundleTrajectory "%s":' % (filename, )) for k, v in metadata.items(): if k != 'datatypes': print(" %s: %s" % (k, v)) f = open(os.path.join(filename, 'frames'), 'rb') nframes = int(f.read()) print('Number of frames: %i' % (nframes, )) print('Data types:') for k, v in metadata['datatypes'].items(): if v == 'once': print(' %s: First frame only.' % (k, )) elif v: print(' %s: All frames.' % (k, )) # Look at first frame if metadata['backend'] == 'pickle': backend = PickleBundleBackend(True) elif metadata['backend'] == 'ulm': backend = UlmBundleBackend(True, False) else: raise NotImplementedError('Backend %s not supported.' % (metadata['backend'], )) frame = os.path.join(filename, 'F0') small = backend.read_small(frame) print('Contents of first frame:') for k, v in small.items(): if k == 'constraints': if v: print(' %i constraints are present') else: print(' Constraints are absent.') elif k == 'pbc': print(' Periodic boundary conditions: %s' % (str(v), )) elif k == 'natoms': print(' Number of atoms: %i' % (v, )) elif hasattr(v, 'shape'): print(' %s: shape = %s, type = %s' % (k, str(v.shape), str(v.dtype))) if k == 'cell': print(' [[%12.6f, %12.6f, %12.6f],' % tuple(v[0])) print(' [%12.6f, %12.6f, %12.6f],' % tuple(v[1])) print(' [%12.6f, %12.6f, %12.6f]]' % tuple(v[2])) else: print(' %s: %s' % (k, str(v))) # Read info from separate files. if metadata['subtype'] == 'split': nsplit = small['fragments'] else: nsplit = False for k, v in metadata['datatypes'].items(): if v and k not in small: info = backend.read_info(frame, k, nsplit) infoline = ' %s: ' % (k, ) for k, v in info.items(): infoline += '%s = %s, ' % (k, str(v)) infoline = infoline[:-2] + '.' # Fix punctuation. print(infoline)
def read(filename): """Read from json file.""" with open(filename, 'r') as f: dct = decode(f.read()) return BandStructure(**dct)
def _read_data(self, index): self._fd.seek(self._offsets[index]) size = readints(self._fd, 1)[0] data = decode(self._fd.read(size).decode()) return data
from ase.build import bulk from ase.io.jsonio import encode, decode atoms = bulk('Ti') print(atoms) #txt = encode({1:2, 3:4, 'hello': atoms}) txt = encode(atoms) print(txt) atoms1 = decode(txt) print(atoms1) txt1 = encode(atoms1) assert txt == txt1 assert atoms == atoms1
def _read_data(self, index): self._fd.seek(self._offsets[index]) size = int(readints(self._fd, 1)[0]) data = decode(self._fd.read(size).decode(), False) self._little_endian = data.pop('_little_endian', True) return data
def _read_metadata(self): """Read the metadata.""" assert self.state == 'read' return jsonio.decode(self.metadata_path.read_text())
def print_bundletrajectory_info(filename): """Prints information about a BundleTrajectory. Mainly intended to be called from a command line tool. """ if not BundleTrajectory.is_bundle(filename): raise ValueError('Not a BundleTrajectory!') if BundleTrajectory.is_empty_bundle(filename): print(filename, 'is an empty BundleTrajectory.') return # Read the metadata fn = os.path.join(filename, 'metadata.json') if os.path.exists(fn): f = open(fn, 'r') metadata = jsonio.decode(f.read()) else: fn = os.path.join(filename, 'metadata') f = open(fn, 'rb') metadata = pickle.load(f) f.close() print('Metadata information of BundleTrajectory "%s":' % (filename,)) for k, v in metadata.items(): if k != 'datatypes': print(" %s: %s" % (k, v)) f = open(os.path.join(filename, 'frames'), 'rb') nframes = int(f.read()) print('Number of frames: %i' % (nframes,)) print('Data types:') for k, v in metadata['datatypes'].items(): if v == 'once': print(' %s: First frame only.' % (k,)) elif v: print(' %s: All frames.' % (k,)) # Look at first frame if metadata['backend'] == 'pickle': backend = PickleBundleBackend(True) elif metadata['backend'] == 'ulm': backend = UlmBundleBackend(True, False) else: raise NotImplementedError('Backend %s not supported.' % (metadata['backend'],)) frame = os.path.join(filename, 'F0') small = backend.read_small(frame) print('Contents of first frame:') for k, v in small.items(): if k == 'constraints': if v: print(' %i constraints are present') else: print(' Constraints are absent.') elif k == 'pbc': print(' Periodic boundary conditions: %s' % (str(v),)) elif k == 'natoms': print(' Number of atoms: %i' % (v,)) elif hasattr(v, 'shape'): print(' %s: shape = %s, type = %s' % (k, str(v.shape), str(v.dtype))) if k == 'cell': print(' [[%12.6f, %12.6f, %12.6f],' % tuple(v[0])) print(' [%12.6f, %12.6f, %12.6f],' % tuple(v[1])) print(' [%12.6f, %12.6f, %12.6f]]' % tuple(v[2])) else: print(' %s: %s' % (k, str(v))) # Read info from separate files. if metadata['subtype'] == 'split': nsplit = small['fragments'] else: nsplit = False for k, v in metadata['datatypes'].items(): if v and k not in small: info = backend.read_info(frame, k, nsplit) infoline = ' %s: ' % (k,) for k, v in info.items(): infoline += '%s = %s, ' % (k, str(v)) infoline = infoline[:-2] + '.' # Fix punctuation. print(infoline)
def _read_data(self, index): self._fd.seek(self._offsets[index]) size = int(readints(self._fd, 1)[0]) data = decode(self._fd.read(size).decode()) return data
"""Test serialization of ndarrays and other stuff.""" import numpy as np from ase.io.jsonio import encode, decode assert decode(encode(np.int64(42))) == 42 c = np.array([0.1j]) assert (decode(encode(c)) == c).all()
def _read_data(self, index): self._fd.seek(self._offsets[index]) size = np.fromfile(self._fd, np.int64, 1)[0] data = decode(self._fd.read(size).decode()) return data
def read(self, filename): """Read from json file.""" with open(filename, 'r') as f: dct = decode(f.read()) self.__dict__.update(dct)