Example #1
0
def RePack(output_file, input_files):
    """Write a new data pack to |output_file| based on a list of filenames
  (|input_files|)"""
    resources = {}
    encoding = None
    for filename in input_files:
        new_content = ReadDataPack(filename)

        # Make sure we have no dups.
        duplicate_keys = set(new_content.resources.keys()) & set(
            resources.keys())
        if len(duplicate_keys) != 0:
            raise exceptions.KeyError("Duplicate keys: " +
                                      str(list(duplicate_keys)))

        # Make sure encoding is consistent.
        if encoding in (None, BINARY):
            encoding = new_content.encoding
        elif new_content.encoding not in (BINARY, encoding):
            raise exceptions.KeyError("Inconsistent encodings: " +
                                      str(encoding) + " vs " +
                                      str(new_content.encoding))

        resources.update(new_content.resources)

    # Encoding is 0 for BINARY, 1 for UTF8 and 2 for UTF16
    if encoding is None:
        encoding = BINARY
    WriteDataPack(resources, output_file, encoding)
Example #2
0
def RePackFromDataPackStrings(inputs,
                              whitelist,
                              suppress_removed_key_output=False):
    """Returns a data pack string that combines the resources from inputs.

  Args:
      inputs: a list of data pack strings that need to be combined.
      whitelist: a list of resource IDs that should be kept in the output string
                 or None to include all resources.
      suppress_removed_key_output: Do not print removed keys.

  Returns:
      DataPackContents: a tuple containing the new combined data pack and its
                        encoding.

  Raises:
      KeyError: if there are duplicate keys or resource encoding is
      inconsistent.
  """
    resources = {}
    encoding = None
    for content in inputs:
        # Make sure we have no dups.
        duplicate_keys = set(content.resources.keys()) & set(resources.keys())
        if duplicate_keys:
            raise exceptions.KeyError('Duplicate keys: ' +
                                      str(list(duplicate_keys)))

        # Make sure encoding is consistent.
        if encoding in (None, BINARY):
            encoding = content.encoding
        elif content.encoding not in (BINARY, encoding):
            raise exceptions.KeyError('Inconsistent encodings: ' +
                                      str(encoding) + ' vs ' +
                                      str(content.encoding))

        if whitelist:
            whitelisted_resources = dict([(key, content.resources[key])
                                          for key in content.resources.keys()
                                          if key in whitelist])
            resources.update(whitelisted_resources)
            removed_keys = [
                key for key in content.resources.keys() if key not in whitelist
            ]
            if not suppress_removed_key_output:
                for key in removed_keys:
                    print 'RePackFromDataPackStrings Removed Key:', key
        else:
            resources.update(content.resources)

    # Encoding is 0 for BINARY, 1 for UTF8 and 2 for UTF16
    if encoding is None:
        encoding = BINARY
    return DataPackContents(resources, encoding)
Example #3
0
    def readNameList(self, params, fail=False, verbose=False):
        """
        Read parameter value from registered namelist
        :param fail: If True fail if param not found
        :param verbose (Optional -- default False). Provide verbose information.
        :param params -- a list of parameters.
        :example self.readNameList(['RHCRIT', 'VF1'])
        :return:An OrderedDict with the values indexed by the param names
        """

        result = collections.OrderedDict()
        for param in params:
            # have it as meta function?
            if param in self._metaFn:  # have a meta funcion -- takes priority.
                result[param] = self.readMetaNameList(param, verbose=verbose)
            elif param in self._convNameList:  # in the conversion index
                nlValue = self.readNameListVar(self._convNameList[param],
                                               verbose=verbose)
                if len(nlValue) != 1:
                    raise exceptions.ValueError("Should only have one key")
                for k in nlValue.keys(
                ):  # in this case shoudl only have one key and we don't want it as a list
                    result[param] = nlValue[k]
                # just want the value and as single param SHOULD return
            elif fail:  # not found it and want to fail
                raise exceptions.KeyError("Param %s not found" % param)
            else:
                pass
        return result  # return the result.
Example #4
0
def _setKernelProperty(self, name, value):
  import exceptions
  #logging.info('_setKernelProperty: %s %s',name,str(value))
  if Interface.setPropertyKernel(self.get(),name,str(value)):
    return
  msg = 'Geant4Kernel::SetProperty [Unhandled]: Cannot set Kernel.'+name+' = '+str(value)
  raise exceptions.KeyError(msg)
 def __getitem__(self, var):
     """Fake the remote server 'isRunning' option"""
     if var == 'isRunning':
         return self.isRunning()
     elif var == 'frame_number':
         return self.idx
     raise exceptions.KeyError(var)
Example #6
0
def _set(self, name, value):
  import exceptions
  #logging.info('_set: %s %s',name,str(value))
  a = Interface.toAction(self)
  if Interface.setProperty(a,name,str(value)):
    return
  msg = 'Geant4Action::SetProperty [Unhandled]: Cannot set '+a.name()+'.'+name+' = '+str(value)
  raise exceptions.KeyError(msg)
Example #7
0
    def question(self, question_id):
        """A getter for a question with a known `_id`."""

        oid = bson.ObjectId(question_id)
        for question in self.questions:
            if question._id == oid:
                return question

        raise exceptions.KeyError("Question not found.")
Example #8
0
def _getKernelProperty(self, name):
    import exceptions
    #print '_getKernelProperty:',str(type(self)),name
    ret = Interface.getPropertyKernel(self.get(), name)
    if ret.status > 0:
        return ret.data
    elif hasattr(self.get(), name):
        return getattr(self.get(), name)
    elif hasattr(self, name):
        return getattr(self, name)
    msg = 'Geant4Kernel::GetProperty [Unhandled]: Cannot access Kernel.' + name
    raise exceptions.KeyError(msg)
Example #9
0
def _get(self, name):
  import exceptions, traceback
  #logging.info('_get: %s  %s',str(type(self)),name)
  a = Interface.toAction(self)
  ret = Interface.getProperty(a,name)
  if ret.status > 0:
    return ret.data
  elif hasattr(self.action,name):
    return getattr(self.action,name)
  elif hasattr(a,name):
    return getattr(a,name)
  #elif hasattr(self,name):
  #  return getattr(self,name)
  #traceback.print_stack()
  msg = 'Geant4Action::GetProperty [Unhandled]: Cannot access property '+a.name()+'.'+name
  raise exceptions.KeyError(msg)
Example #10
0
def RePack(output_file, input_files):
    """Write a new data pack to |output_file| based on a list of filenames
  (|input_files|)"""
    resources = {}
    for filename in input_files:
        new_resources = data_pack.ReadDataPack(filename)

        # Make sure we have no dups.
        duplicate_keys = set(new_resources.keys()) & set(resources.keys())
        if len(duplicate_keys) != 0:
            raise exceptions.KeyError("Duplicate keys: " +
                                      str(list(duplicate_keys)))

        resources.update(new_resources)

    data_pack.WriteDataPack(resources, output_file)
Example #11
0
    def fopen(self, path, lock=LOCK_EX):
        """Open a file in path, handling the locking"""
        self._lock.acquire()

        fd = self.cache.get(path, -1)
        if self.remap(fd, path, lock):
            return self.mm, self.fd

        # Initialize the indexed file, if exclusive lock was required
        # (writing)
        ex = os.path.exists(path) and os.path.exists(path + '.lk')
        if not ex:
            if lock in [LOCK_EX, LOCK_NBEX]:
                self.init(path)
            else:
                self._lock.release()
                raise exceptions.KeyError(
                    'Non-existent FileBuffer for reading: ' + path)

        # Lock must precede mmap
        try:
            locker.lock(path, lock)
        except:
            self._lock.release()
            raise

        self.path = path
        # Open the file descriptor
        self.fd = os.open(path, flags)
        # Create the memory map
        self.mm = mmap.mmap(self.fd, length=0)

        # Manage caching
        if self.cache_len > 0:
            clean_cache(self)
            self.cache[path] = self.fd

        self.info = self._get_info()
        return self.mm, self.fd
Example #12
0
def write_lonlatdata_in_COARDS_format(ncfile, data, metadata, **writeopt):
    # dump data(nlon,nlat) corresponding to data on a positively oriented,
    # uniformly spaced lon-lat grid to open netcdf file ncfile
    # data is transposed on writing, corresponding to COARDS axis requirements (order TZYX)
    # metadata is a dictionary that must contain at least these entries:
    #   lon:          longitude grid  ([degrees_E])
    #   lat:          latitude  grid  ([degrees_N])
    #   [depth:       depth grid ]    ([m], optional)
    #   [time:        time  grid ]    ([sec since offset], optional)
    #       [time_offset: time_offset ]   (datetime like, optional)
    #       [time_unit:   unit ]          (as string, optional)
    #   variable_name: variablename to be used for data
    #   long_name:    long title for variable_name (attribute associated with variable)
    #   units:        units for data (standard conformance not assessed; attribute associated with variable)
    # Global attributes must be in entry global_attributes - all other attributes are assumed to be variable attributes
    # writeopt must contain "data_layout" (permutation of 'tzyx', corresponding to axis in data)
    # and possibly "time_frame_number" to write specific time frames
    # variable_name is declared according to appearence of (lon,lat,depth,time) in metadata in COARDS order
    # (lon,lat,depth,time) also becomes netcdf dimensions with same name
    #
    # COARDS notes: dimensions should appear in the relative order T, then Z, then Y, then X in the CDL
    # -----------------------------------------------------------------------------------------
    # --- mandatory dimension lon ---
    if ncfile.dimensions.has_key('lon'):  # lon dimension exist, check size
        assert ncfile.dimensions['lon'].size == size(metadata["lon"])
    else:
        ncfile.createDimension('lon', size(metadata["lon"]))
    # ---  mandatory dimension lat --
    if ncfile.dimensions.has_key('lat'):  # lat dimension exist, check size
        assert ncfile.dimensions['lat'].size == size(metadata["lat"])
    else:
        ncfile.createDimension('lat', size(metadata["lat"]))
    # ---- optional dimensions time, depth --
    #      for time, signal unlimited dimension by None, if requested

    if ncfile.dimensions.has_key('depth'):  # lat dimension exist, check size
        assert ncfile.dimensions['depth'].size == size(metadata["depth"])
    elif metadata.has_key("depth"):
        ncfile.createDimension('depth', size(metadata["depth"]))

    if metadata.has_key("time") and not ncfile.dimensions.has_key('time'):
        if metadata["time"] is None:
            ncfile.createDimension('time', None)  # unlimited
        else:
            ncfile.createDimension('time',
                                   size(metadata["time"]))  # specific size

    # -------- create axis variables as needed  --------

    if not ncfile.variables.has_key(
            'lon'):  # if exist, assume OK and do not redefine
        lon = ncfile.createVariable('lon', 'd', ('lon', ))
        lon.long_name = "Uniformly spaced longitudes"
        lon.cartesian_axis = "X"
        lon.units = "degrees_E"
        lon.ipositive = 1
        lon[:] = metadata['lon']
    # ----
    if not ncfile.variables.has_key(
            'lat'):  # if exist, assume OK and do not redefine
        lat = ncfile.createVariable('lat', 'd', ('lat', ))
        lat.long_name = "Uniformly spaced latitudes"
        lat.cartesian_axis = "Y"
        lat.units = "degrees_N"
        lat.ipositive = 1
        lat[:] = metadata['lat']
    # ----
    if metadata.has_key("depth") and not ncfile.variables.has_key('depth'):
        depth = ncfile.createVariable('depth', 'd', ('depth', ))
        depth.long_name = "depth below sea surface"
        depth.units = "m"
        depth[:] = metadata['depth']
    # ----
    if metadata.has_key("time") and not ncfile.variables.has_key('time'):
        time = ncfile.createVariable('time', 'i', ('time', ))
        if metadata.has_key("time_offset"):
            time_offset = metadata["time_offset"].strftime(
                '%Y-%m-%d %H:%M:%S')  # datetime has strftime method
        else:
            time_offset = "undeclared offset"
        if metadata.has_key("time_unit"):
            tunit = metadata["time_unit"]
        else:
            tunit = "undeclared unit"  # default
        time.long_name = "time (since %s) corresponding to this time frame" % time_offset
        time.units = "%s since %s" % (tunit, time_offset)
        # do not set, if time dimension is unlimited
        if metadata["time"] is not None:
            time[:] = metadata['time']

    # -------- create data variable, if needed  --------

    vardims = ()  # observe COARDS order
    for dim in ('time', 'depth', 'lat', 'lon'):
        if ncfile.dimensions.has_key(dim):
            vardims = vardims + (dim, )

    if not ncfile.variables.has_key(
            metadata['variable_name']
    ):  # if exist, assume OK and do not redefine
        var = ncfile.createVariable(metadata['variable_name'], data.dtype,
                                    vardims)
    else:
        var = ncfile.variables[
            metadata['variable_name']]  # variable already exist

    # ---- dump data as full array or time frame ----
    #      do not check data rank, to allow a declaration cycle and e.g. setting data to zero
    reorder = reorder_axes_to_COARDS(writeopt["data_layout"])
    if writeopt.has_key("time_frame_number"):
        it = writeopt["time_frame_number"]
        var[it, :] = transpose(data, axes=reorder)
        # store time for this time frame, if it is provided as a single number
        if metadata.has_key("time") and isinstance(metadata['time'],
                                                   numbers.Number):
            ncfile.variables['time'][it] = metadata['time']
    else:
        var[:] = transpose(data, axes=reorder)

    # dump meta data
    # check mandatory metadata - remaining mandatory keys generate other exceptions, if not present
    if not metadata.has_key('units'):
        raise exceptions.KeyError("metadata has no unit entry")
    if not metadata.has_key('long_name'):
        raise exceptions.KeyError("metadata has no long_name entry")
    # ------ global attributes ------
    ncfile.Conventions = "COARDS"
    if metadata.has_key('global_attributes'):
        gattr = metadata['global_attributes']
        for key in gattr.keys():
            setattr(ncfile, key, gattr[key])
    # ------ variable attributes ------
    for key in metadata.keys():
        if key in ('lon', 'lat', 'depth', 'time', 'time_unit', 'time_offset',
                   'variable_name',
                   'global_attributes'):  # these are handled above
            continue
        if key == 'units':
            var.units = metadata['units']
            continue
        if key == 'long_name':
            var.long_name = metadata['long_name']
            continue
        # all other metadata stored as global attributes
        setattr(var, key, metadata[key])
Example #13
0
    def writeNameList(self, verbose=False, fail=False, **params):
        """
        Modify existing namelist files using information generated via genConversion
        Existing files will be copied to .bak
        :param verbose (optional -- default is False). If True provide more information on what is going on.
        :param fail (optional default is False). If True fail if a parameter not found.
        :keyword arguments are parameters and values.
        :return:  ordered dict of parameters and values used.
        """
        if self._readOnly:
            raise exceptions.IOError("Model is read only")

        params_used = collections.OrderedDict()  #
        files = collections.OrderedDict()  # list of files to be modified.
        for param, value in params.iteritems(
        ):  # extract data from conversion indexed by file --
            # could this code be moved into genVarToNameList as really a different view of the same data.
            # NO as we would need to do this only once we've finished generating namelist translate tables.
            # potential optimisation might be to cache this and trigger error in writeNameList if called after genNameList
            # search functions first
            if param in self._metaFn:  # got a meta function.
                if verbose:
                    "Running function %s" % self._metaFn[param].func_name
                metaFnValues = self._metaFn[param](
                    value)  # call the meta param function which returns a dict
                params_used[param] = metaFnValues  # and update return var
                for conv, v in metaFnValues.iteritems(
                ):  # iterate over result of fn.
                    if conv.file not in files:
                        files[conv.file] = [
                        ]  # if not come across the file set it to empty list
                    files[conv.file].append(
                        (v, conv))  # append the  value  & conversion info.
            elif param in self._convNameList:  # got it in convNameList ?
                for conv in self._convNameList[param]:
                    if conv.file not in files:
                        files[conv.file] = [
                        ]  # if not come across the file set it to empty list
                    files[conv.file].append(
                        (value, conv))  # append the value  & conversion
                    params_used[param] = value  # and update return var
            elif fail:
                raise exceptions.KeyError(
                    "Failed to find %s in metaFn or convNameList " % param)
            else:
                pass

        # now have conversion tuples ordered by file so let's process the files
        for file in files.keys():  # iterate over files
            # need to create backup? Only do if no back up exists. This allows generateNameList to be run multiple times
            # doing updates. First time it runs we assume we have a directory ready to be modified.
            filePath = os.path.join(self.dirPath,
                                    file)  # full path to namelist file
            # check file exists if not raise exception
            if not os.path.isfile(filePath):
                #raise exceptions.IOError("file %s does not exist"%(filePath))
                continue  # skip this file.
            backup_file = filePath + "_nl.bak"  # and full path to backup fie.
            if not os.path.isfile(backup_file):
                shutil.copyfile(filePath, backup_file)
            # now create the namelist file. Need a temp file
            with tempfile.NamedTemporaryFile(dir=self.dirPath,
                                             delete=False) as tmpNL:
                # Now construct the patch for the  namelist file for all conversion tuples.
                nlPatch = collections.OrderedDict(
                )  # path to exisiting namelist file
                for (value, conv) in files[file]:
                    if conv.namelist not in nlPatch:
                        nlPatch[conv.namelist] = collections.OrderedDict(
                        )  # dom't have ordered dict so make it
                    if type(
                            value
                    ) is np.ndarray:  # convert numpy array to list for writing.
                        value = value.tolist()
                    elif isinstance(value, unicode):
                        value = str(
                            value
                        )  # f90nml can't cope with unicode so convert it to string.
                    nlPatch[conv.namelist][conv.var] = copy.copy(
                        value
                    )  # copy the variable to be stored rather than the name.
                    if verbose:
                        print "Setting %s,%s to %s in %s" % (
                            conv.namelist, conv.var, value, filePath)
                try:
                    p = f90nml.patch(filePath, nlPatch,
                                     tmpNL.name)  # patch the namelist file
                    tmpNL.close()  # close the temp file once done.
                except StopIteration:
                    print "Problem in f90nml for %s writing to %s" % (
                        filePath, tmpNL.name), nlPatch
                    raise  # raise exception.

                if verbose: print "Patched %s to %s" % (filePath, tmpNL.name)
                shutil.move(
                    tmpNL.name,
                    filePath)  # and copy the patched file back in place.

        return params_used
Example #14
0
 def __getitem__(self, key):
     if key not in self.__dict__.keys():
         raise exceptions.KeyError()
     return self.__dict__[key]
Example #15
0
 def __normalize_ix(self, ix):
     if ix < 0:
         ix = len(self) + ix
         if ix < 0:
             raise exceptions.KeyError('Not enough items in list')
     return ix
Example #16
0
 def get(self, key, defval=UNDEFINED):
     if self.has_key(key):
         return self[key]
     if defval is UNDEFINED:
         raise exceptions.KeyError("Key %r is not present." % key)
     return defval