コード例 #1
0
ファイル: options.py プロジェクト: pymontecarlo/pymontecarlo
 def _write_detectors(self, options, element):
     subelement = etree.SubElement(element, "detectors")
     for key, detectors in options.detectors.items():
         for detector in np.array(detectors, ndmin=1):
             handler = find_convert_handler("pymontecarlo.fileformat.options.detector", detector)
             subsubelement = handler.convert(detector)
             subsubelement.set("_key", key)
             subelement.append(subsubelement)
コード例 #2
0
ファイル: results.py プロジェクト: pymontecarlo/pymontecarlo
def append(results, filepath):
    with FileLock(filepath), h5py.File(filepath, 'r+') as hdf5file:
        # Check UUID of base options
        source = BytesIO(hdf5file.attrs['options'])
        reader = OptionsReader()
        reader.read(source)
        options = reader.get()

        if options.uuid != results.options.uuid:
            raise ValueError('UUID of base options do not match: %s != %s' % \
                             (options.uuid, results.options.uuid))

        # Save results
        identifiers = np.array(hdf5file.attrs['identifiers'], 'U').tolist()
        for container in results:
            identifier = container.options.uuid
            identifiers.append(identifier)

            group = hdf5file.create_group('result-' + identifier)

            # Save each result
            for key, result in container.items():
                subgroup = group.create_group(key)
                handler = find_convert_handler('pymontecarlo.fileformat.results.result',
                                               result, subgroup)
                handler.convert(result, subgroup)

            # Save options
            writer = OptionsWriter()
            writer.convert(container.options)
            element = writer.get()
            group.attrs['options'] = etree.tostring(element)

        # Update identifiers
        del hdf5file.attrs['identifiers']
        hdf5file.attrs.create('identifiers', identifiers,
                              dtype=h5py.special_dtype(vlen=str))
コード例 #3
0
ファイル: options.py プロジェクト: pymontecarlo/pymontecarlo
 def _write_models(self, options, element):
     subelement = etree.SubElement(element, "models")
     for model in options.models:
         handler = find_convert_handler("pymontecarlo.fileformat.options.model", model)
         subelement.append(handler.convert(model))
コード例 #4
0
ファイル: options.py プロジェクト: pymontecarlo/pymontecarlo
 def _write_limits(self, options, element):
     subelement = etree.SubElement(element, "limits")
     for limit in options.limits:
         handler = find_convert_handler("pymontecarlo.fileformat.options.limit", limit)
         subelement.append(handler.convert(limit))
コード例 #5
0
ファイル: options.py プロジェクト: pymontecarlo/pymontecarlo
 def _write_geometries(self, options, element):
     subelement = etree.SubElement(element, "geometry")
     for geometry in np.array(options.geometry, ndmin=1):
         handler = find_convert_handler("pymontecarlo.fileformat.options.geometry", geometry)
         subelement.append(handler.convert(geometry))
コード例 #6
0
ファイル: options.py プロジェクト: pymontecarlo/pymontecarlo
 def _write_beams(self, options, element):
     subelement = etree.SubElement(element, "beam")
     for beam in np.array(options.beam, ndmin=1):
         handler = find_convert_handler("pymontecarlo.fileformat.options.beam", beam)
         subelement.append(handler.convert(beam))
コード例 #7
0
ファイル: results.py プロジェクト: pymontecarlo/pymontecarlo
 def _write_result(self, result, group):
     handler = find_convert_handler('pymontecarlo.fileformat.results.result', result)
     handler.convert(result, group)
コード例 #8
0
ファイル: updater.py プロジェクト: pymontecarlo/pymontecarlo
    def _update_version3(self, filepath):
        logging.debug('Updating from "version 3"')

        manager = {}

        def _load_photonintensity(zipfile, key):
            fp = zipfile.open(key + '.csv', 'r')
            reader = csv.reader(StringIO(fp.read().decode('ascii')))
            next(reader)

            intensities = {}
            for row in reader:
                transition = from_string(row[0])
                # skip row[1] (energy)

                intensities[PhotonKey(transition, False, PhotonKey.P)] = \
                    (float(row[6]), float(row[7]))
                intensities[PhotonKey(transition, False, PhotonKey.C)] = \
                    (float(row[2]), float(row[3]))
                intensities[PhotonKey(transition, False, PhotonKey.B)] = \
                    (float(row[4]), float(row[5]))
                intensities[PhotonKey(transition, False, PhotonKey.T)] = \
                    (float(row[8]), float(row[9]))
                intensities[PhotonKey(transition, True, PhotonKey.P)] = \
                    (float(row[14]), float(row[15]))
                intensities[PhotonKey(transition, True, PhotonKey.C)] = \
                    (float(row[10]), float(row[11]))
                intensities[PhotonKey(transition, True, PhotonKey.B)] = \
                    (float(row[12]), float(row[13]))
                intensities[PhotonKey(transition, True, PhotonKey.T)] = \
                    (float(row[16]), float(row[17]))

            return PhotonIntensityResult(intensities)

        manager['PhotonIntensityResult'] = _load_photonintensity

        def _load_photonspectrum(zipfile, key):
            fp = zipfile.open(key + '.csv', 'r')
            reader = csv.reader(StringIO(fp.read().decode('ascii')))
            next(reader)

            energies_eV = []
            total_val = []
            total_unc = []
            background_val = []
            background_unc = []
            for row in reader:
                energies_eV.append(float(row[0]))
                total_val.append(float(row[1]))
                total_unc.append(float(row[2]))
                background_val.append(float(row[3]))
                background_unc.append(float(row[4]))

            total = np.array([energies_eV, total_val, total_unc]).T
            background = np.array([energies_eV, background_val, background_unc]).T

            return PhotonSpectrumResult(total, background)

        manager['PhotonSpectrumResult'] = _load_photonspectrum

        def _load_phirhoz(zipfile, key):
            # Find all phi-rho-z files
            arcnames = [name for name in zipfile.namelist() if name.startswith(key)]

            # Read files
            distributions = {}
            for arcname in arcnames:
                parts = os.path.splitext(arcname)[0].split('+')
                transition = from_string(parts[-2].replace('_', ' '))
                suffix = parts[-1]

                absorption = suffix.startswith('e')
                if suffix[1:] == 'nf':
                    flag = PhotonKey.PRIMARY
                elif suffix[1:] == 'cf':
                    flag = PhotonKey.CHARACTERISTIC_FLUORESCENCE
                elif suffix[1:] == 'bf':
                    flag = PhotonKey.BREMSSTRAHLUNG_FLUORESCENCE
                elif suffix[1:] == 't':
                    flag = PhotonKey.TOTAL
                elif suffix[1:] == 'f':
                    flag = PhotonKey.FLUORESCENCE
                key = PhotonKey(transition, absorption, flag)

                fp = zipfile.open(arcname, 'r')
                reader = csv.reader(StringIO(fp.read().decode('ascii')))
                next(reader)

                zs = []
                values = []
                uncs = []
                for row in reader:
                    zs.append(float(row[0]))
                    values.append(float(row[1]))
                    uncs.append(float(row[2]))

                datum = np.array([zs, values, uncs]).T
                distributions[key] = datum

            return PhotonDepthResult(distributions)

        manager['PhiRhoZResult'] = _load_phirhoz

        def _load_time(zipfile, key):
            element = xmlutil.parse(zipfile.open(key + '.xml', 'r'))

            child = element.find('time')
            if child is not None:
                simulation_time = float(child.get('val', 0.0))
            else:
                simulation_time = 0.0

            child = element.find('speed')
            if child is not None:
                simulation_speed = \
                    float(child.get('val', 0.0)), float(child.get('unc', 0.0))
            else:
                simulation_speed = (0.0, 0.0)

            return TimeResult(simulation_time, simulation_speed)

        manager['TimeResult'] = _load_time

        def _load_showersstatistics(zipfile, key):
            element = xmlutil.parse(zipfile.open(key + '.xml', 'r').read())

            child = element.find('showers')
            if child is not None:
                showers = float(child.get('val', 0))
            else:
                showers = 0

            return ShowersStatisticsResult(showers)

        manager['ShowersStatisticsResult'] = _load_showersstatistics

        def _load_electronfraction(zipfile, key):
            element = xmlutil.parse(zipfile.open(key + '.xml', 'r'))

            child = element.find('absorbed')
            if child is not None:
                absorbed = \
                    float(child.get('val', 0.0)), float(child.get('unc', 0.0))
            else:
                absorbed = (0.0, 0.0)

            child = element.find('backscattered')
            if child is not None:
                backscattered = \
                    float(child.get('val', 0.0)), float(child.get('unc', 0.0))
            else:
                backscattered = (0.0, 0.0)

            child = element.find('transmitted')
            if child is not None:
                transmitted = \
                    float(child.get('val', 0.0)), float(child.get('unc', 0.0))
            else:
                transmitted = (0.0, 0.0)

            return ElectronFractionResult(absorbed, backscattered, transmitted)

        manager['ElectronFractionResult'] = _load_electronfraction

        def _load_trajectory(zipfile, key):
            tmpdir = tempfile.mkdtemp()
            filename = key + '.h5'
            zipfile.extract(filename, tmpdir)

            hdf5file = h5py.File(os.path.join(tmpdir, filename))

            particles_ref = list(PARTICLES)
            particles_ref = dict(zip(map(str, particles_ref), particles_ref))

            collisions_ref = list(COLLISIONS)
            collisions_ref = dict(zip(map(str, collisions_ref), collisions_ref))

            trajectories = []

            for dataset in hdf5file['trajectories'].values():
                primary = bool(dataset.attrs['primary'])
                particle = particles_ref.get(dataset.attrs['particle'].decode('ascii'))
                collision = collisions_ref.get(dataset.attrs['collision'].decode('ascii'))
                exit_state = int(dataset.attrs['exit_state'])
                interactions = dataset[:]

                trajectory = Trajectory(primary, particle, collision,
                                        exit_state, interactions)
                trajectories.append(trajectory)

            hdf5file.close()
            shutil.rmtree(tmpdir, ignore_errors=True)

            return TrajectoryResult(trajectories)

        manager['TrajectoryResult'] = _load_trajectory

        def _load_backscatteredelectronenergy(zipfile, key):
            data = np.loadtxt(zipfile.open(key + '.csv', 'r'), delimiter=',')
            return BackscatteredElectronEnergyResult(data)

        manager['BackscatteredElectronEnergyResult'] = _load_backscatteredelectronenergy

        def _load_transmittedelectronenergy(zipfile, key):
            data = np.loadtxt(zipfile.open(key + '.csv', 'r'), delimiter=',')
            return TransmittedElectronEnergyResult(data)

        manager['TransmittedElectronEnergyResult'] = _load_transmittedelectronenergy

        # Create HDF5
        newfilepath = os.path.splitext(filepath)[0] + '.h5'
        hdf5file = h5py.File(newfilepath, 'w')
        hdf5file.attrs['version'] = b'4'

        zipfile = ZipFile(filepath, 'r', allowZip64=True)

        ## Read options
        try:
            zipinfo = zipfile.getinfo(OPTIONS_FILENAME)
        except KeyError:
            raise IOError("Zip file (%s) does not contain a %s" % \
                          (filepath, OPTIONS_FILENAME))
        with zipfile.open(zipinfo, 'r') as fp:
            source = fp.read()
        source = _update_options(source)
        hdf5file.attrs['options'] = source

        ## Parse keys.ini
        try:
            zipinfo = zipfile.getinfo(KEYS_INI_FILENAME)
        except KeyError:
            raise IOError("Zip file (%s) does not contain a %s" % \
                          (filepath, KEYS_INI_FILENAME))

        config = ConfigParser()
        config.read(StringIO(zipfile.open(zipinfo, 'r').read().decode('ascii')))

        ## Load each results
        items = list(getattr(config, SECTION_KEYS))

        for key, tag in items:
            loader = manager[tag]
            result = loader(zipfile, key)

            handler = find_convert_handler('pymontecarlo.fileformat.results.result', result)

            group = hdf5file.create_group(key)
            handler.convert(result, group)

        zipfile.close()
        hdf5file.close()

        # Create raw ZIP
        oldzip = ZipFile(filepath, 'r')

        if any([filename.startswith('raw/') for filename in oldzip.namelist()]):
            zipfilepath = os.path.splitext(filepath)[0] + '_raw.zip'
            newzip = ZipFile(zipfilepath, 'w', compression=ZIP_DEFLATED)

            for filename in oldzip.namelist():
                if not filename.startswith('raw/'): continue
                data = oldzip.read(filename)
                newzip.writestr(filename[4:], data)

            newzip.close()

        oldzip.close()

        return self._update_version4(newfilepath)