Esempio n. 1
0
 def connect(self, filename=settings['filename']):
     """
     Opens / initialises new HDF5 file.
     We rely on PyTables and keep all session management staff there.
     """
     if not self.connected:
         try:
             if tb.isHDF5File(filename):
                 self._data = tb.openFile(filename,
                                          mode="a",
                                          title=filename)
                 self.connected = True
             else:
                 raise TypeError('"%s" is not an HDF5 file format.' %
                                 filename)
         except IOError:
             # create a new file if specified file not found
             self._data = tb.openFile(filename, mode="w", title=filename)
             self.connected = True
         except:
             raise NameError(
                 "Incorrect file path, couldn't find or create a file.")
         self.objects_by_ref = {}
         self.name_indices = {}
     else:
         logger.info("Already connected.")
Esempio n. 2
0
    def OpenDatabase(self, DatabaseName, PRINT=0):
        """

        Open a specific database.

        :param DatabaseName: name of the database to open
        :type DatabaseName: str
        :param PRINT: select whether to print message or not. \n
                        0 = Yes \n
                        1 = No \n
        :type PRINT: int

        """

        self.file = []
        try:
            test = tables.isHDF5File(DatabaseName)

            if test:
                self.file = tables.openFile(DatabaseName, mode="a")
                if PRINT == 0:
                    print '{0} database opened'.format(DatabaseName)

        except IOError:

            print '{0} does not exist'.format(DatabaseName)
            raise DatabaseError('{0} database does not exist'.format(
                            DatabaseName))
Esempio n. 3
0
def read_dsg_vars_hdf5( filename ):
    if not pt.isHDF5File( filename ):
        raise ValueError, 'not a HDF5 file! (%s)' % filename

    fd = pt.openFile( filename, mode = 'r' )
    aux1 = fd.getNode( '/dsgvar/inx' ).read()
    aux2 = fd.getNode( '/dsgvar/val' ).read()
    aux3 = fd.getNode( '/dsgvar/nsbdsg' ).read()
    dsg_vars = DesignVariables( indx = nm.asarray( aux1, dtype = nm.int32 ),
                               cxyz =  nm.asarray( aux2 ),
                               null_space_b = nm.asarray( aux3 ) )
    dsg_vars.indx = dsg_vars.indx.transpose()
    dsg_vars.indx[:,1] -= 1

    # No. of design variables.
    dsg_vars.n_dsg = dsg_vars.null_space_b.shape[1]
    # No. of control points.
    dsg_vars.n_cp = dsg_vars.indx.shape[0]
    # Design vector and initial design vector. 
    dsg_vars.val0 = nm.zeros( (dsg_vars.n_dsg,), dtype = nm.float64 )
    dsg_vars.val = dsg_vars.val0.copy()
    
    fd.close()

    return dsg_vars
Esempio n. 4
0
 def connect(self, filename):
     """
     Opens / initialises new HDF5 file.
     We rely on PyTables and keep all session management staff there.
     """
     if not self.connected:
         try:
             if tb.isHDF5File(filename):
                 self._data = tb.openFile(filename, mode='a',
                                          title=filename)
                 self.connected = True
             else:
                 raise TypeError('"%s" is not an HDF5 file format.' %
                                 filename)
         except IOError:
             # create a new file if specified file not found
             self._data = tb.openFile(filename, mode='w', title=filename)
             self.connected = True
         except:
             raise NameError("Incorrect file path, couldn't find or "
                             "create a file.")
         self.objects_by_ref = {}
         self.name_indices = {}
     else:
         logger.info('Already connected.')
Esempio n. 5
0
def read_dsg_vars_hdf5(filename):
    if not pt.isHDF5File(filename):
        raise ValueError, 'not a HDF5 file! (%s)' % filename

    fd = pt.openFile(filename, mode='r')
    aux1 = fd.getNode('/dsgvar/inx').read()
    aux2 = fd.getNode('/dsgvar/val').read()
    aux3 = fd.getNode('/dsgvar/nsbdsg').read()
    dsg_vars = DesignVariables(indx=nm.asarray(aux1, dtype=nm.int32),
                               cxyz=nm.asarray(aux2),
                               null_space_b=nm.asarray(aux3))
    dsg_vars.indx = dsg_vars.indx.transpose()
    dsg_vars.indx[:, 1] -= 1

    # No. of design variables.
    dsg_vars.n_dsg = dsg_vars.null_space_b.shape[1]
    # No. of control points.
    dsg_vars.n_cp = dsg_vars.indx.shape[0]
    # Design vector and initial design vector.
    dsg_vars.val0 = nm.zeros((dsg_vars.n_dsg, ), dtype=nm.float64)
    dsg_vars.val = dsg_vars.val0.copy()

    fd.close()

    return dsg_vars
Esempio n. 6
0
    def openWorld(self):
        '''Open existing world project'''
        fileLocation, _ = QtGui.QFileDialog.getOpenFileName(self, 'Open file')
        if not fileLocation:
            self.statusBar().showMessage('Canceled open world.')
            return

        if tables.isHDF5File(fileLocation) < 0 :
            self.statusBar().showMessage(fileLocation + ' does not exist')
            return
        elif tables.isHDF5File(fileLocation) == 0 :
            self.statusBar().showMessage(fileLocation + ' is not valid')
            return

        h5file = tables.openFile(fileLocation, mode='r')
        
        # restore our world settings
        settings = dict(h5file.root.settings.read()) 
        self.mapSize = (int(settings[b'width']), int(settings[b'height']))
        self.algorithm=settings[b'algorithm']
        self.roughness=settings[b'roughness']
        self.hemisphere=settings[b'hemisphere']         
        self.avgLandmass=settings[b'avgLandmass']
        self.avgElevation=settings[b'avgElevation']
        self.hasMountains=settings[b'hasMountains']
        self.isIsland=settings[b'isIsland']
        self.seaLevel=settings[b'seaLevel']
        
        #TODO: apply to edit screen
        
        # restore our numpy datasets
        self.resetDatasets()
        for array in h5file.walkNodes("/", "Array"):
            exec('self.' + array.name + '= array.read()')
        
        # print h5file
        # print dict(h5file.root.settings.read())
        
        h5file.close()
        self.fileLocation = fileLocation
        del h5file
        
        #print self.elevation[0]
        
        self.updateWorld()
        self.statusBar().showMessage('Imported world.')
        self.viewHeightMap()
Esempio n. 7
0
    def openWorld(self):
        '''Open existing world project'''
        fileLocation, _ = QtGui.QFileDialog.getOpenFileName(self, 'Open file')
        if not fileLocation:
            self.statusBar().showMessage('Canceled open world.')
            return

        if tables.isHDF5File(fileLocation) < 0:
            self.statusBar().showMessage(fileLocation + ' does not exist')
            return
        elif tables.isHDF5File(fileLocation) == 0:
            self.statusBar().showMessage(fileLocation + ' is not valid')
            return

        h5file = tables.openFile(fileLocation, mode='r')

        # restore our world settings
        settings = dict(h5file.root.settings.read())
        self.mapSize = (int(settings[b'width']), int(settings[b'height']))
        self.algorithm = settings[b'algorithm']
        self.roughness = settings[b'roughness']
        self.hemisphere = settings[b'hemisphere']
        self.avgLandmass = settings[b'avgLandmass']
        self.avgElevation = settings[b'avgElevation']
        self.hasMountains = settings[b'hasMountains']
        self.isIsland = settings[b'isIsland']
        self.seaLevel = settings[b'seaLevel']

        #TODO: apply to edit screen

        # restore our numpy datasets
        self.resetDatasets()
        for array in h5file.walkNodes("/", "Array"):
            exec('self.' + array.name + '= array.read()')

        # print h5file
        # print dict(h5file.root.settings.read())

        h5file.close()
        self.fileLocation = fileLocation
        del h5file

        #print self.elevation[0]

        self.updateWorld()
        self.statusBar().showMessage('Imported world.')
        self.viewHeightMap()
Esempio n. 8
0
def get_homog_coefs_linear(ts, coor, mode,
                           micro_filename=None, regenerate=False,
                           coefs_filename=None):

    oprefix = output.prefix
    output.prefix = 'micro:'

    required, other = get_standard_keywords()
    required.remove( 'equations' )

    conf = ProblemConf.from_file(micro_filename, required, other, verbose=False)
    if coefs_filename is None:
        coefs_filename = conf.options.get('coefs_filename', 'coefs')
        coefs_filename = op.join(conf.options.get('output_dir', '.'),
                                 coefs_filename) + '.h5'

    if not regenerate:
        if op.exists( coefs_filename ):
            if not pt.isHDF5File( coefs_filename ):
                regenerate = True
        else:
            regenerate = True

    if regenerate:
        options = Struct( output_filename_trunk = None )

        app = HomogenizationApp( conf, options, 'micro:' )
        coefs = app()
        if type(coefs) is tuple:
            coefs = coefs[0]

        coefs.to_file_hdf5( coefs_filename )
    else:
        coefs = Coefficients.from_file_hdf5( coefs_filename )

    out = {}
    if mode == None:
        for key, val in six.iteritems(coefs.__dict__):
            out[key] = val

    elif mode == 'qp':
        for key, val in six.iteritems(coefs.__dict__):
            if type( val ) == nm.ndarray or type(val) == nm.float64:
                out[key] = nm.tile( val, (coor.shape[0], 1, 1) )
            elif type(val) == dict:
                for key2, val2 in six.iteritems(val):
                    if type(val2) == nm.ndarray or type(val2) == nm.float64:
                        out[key+'_'+key2] = \
                                          nm.tile(val2, (coor.shape[0], 1, 1))

    else:
        out = None

    output.prefix = oprefix

    return out
Esempio n. 9
0
def get_homog_coefs_linear(ts, coor, mode,
                           micro_filename=None, regenerate=False,
                           coefs_filename=None):

    oprefix = output.prefix
    output.prefix = 'micro:'

    required, other = get_standard_keywords()
    required.remove( 'equations' )

    conf = ProblemConf.from_file(micro_filename, required, other, verbose=False)
    if coefs_filename is None:
        coefs_filename = conf.options.get('coefs_filename', 'coefs')
        coefs_filename = op.join(conf.options.get('output_dir', '.'),
                                 coefs_filename) + '.h5'

    if not regenerate:
        if op.exists( coefs_filename ):
            if not pt.isHDF5File( coefs_filename ):
                regenerate = True
        else:
            regenerate = True

    if regenerate:
        options = Struct( output_filename_trunk = None )

        app = HomogenizationApp( conf, options, 'micro:' )
        coefs = app()
        if type(coefs) is tuple:
            coefs = coefs[0]

        coefs.to_file_hdf5( coefs_filename )
    else:
        coefs = Coefficients.from_file_hdf5( coefs_filename )

    out = {}
    if mode == None:
        for key, val in coefs.__dict__.iteritems():
            out[key] = val

    elif mode == 'qp':
        for key, val in coefs.__dict__.iteritems():
            if type( val ) == nm.ndarray or type(val) == nm.float64:
                out[key] = nm.tile( val, (coor.shape[0], 1, 1) )
            elif type(val) == dict:
                for key2, val2 in val.iteritems():
                    if type(val2) == nm.ndarray or type(val2) == nm.float64:
                        out[key+'_'+key2] = \
                                          nm.tile(val2, (coor.shape[0], 1, 1))

    else:
        out = None

    output.prefix = oprefix

    return out
Esempio n. 10
0
 def __getattr__(self, name):
     if self.__dict__.has_key(name):
         return self.__dict__[name]
     else:
         hf_names = filter(lambda n: tb.isHDF5File(os.path.join(self.path, n)), os.listdir(self.path))
         for fname in hf_names:
             if name == os.path.splitext(fname)[0]:
                 hf = tb.openFile(os.path.join(self.path, name+'.hdf5')).root
                 setattr(self, name, hf)
                 return hf
         raise IOError, 'File with base %s not found in directory %s'%(name, self.path)
Esempio n. 11
0
    def checkOpening(self, filepath):
        """
        Check if a database can be open.

        :Parameter filepath: the full path of the file
        """

        try:
            # Check if file doesn't exist
            if os.path.isdir(filepath):
                error = translate('DBsTreeModel', 
                    'Openning cancelled: {0} is a folder.',
                    'A logger error message').format(filepath)
                raise ValueError

            elif not os.path.isfile(filepath):
                error = translate('DBsTreeModel', 
                    'Opening failed: file {0} cannot be found.',
                    'A logger error message').format(filepath)
                raise ValueError

            # Check if file is already open.
            elif self.getDBDoc(filepath) is not None:
                error = translate('DBsTreeModel', 
                    'Opening cancelled: file {0} already open.',
                    'A logger error message').format(filepath)

                raise ValueError

        except ValueError:
            print(error)
            return False

        # Check the file format
        try:
            if not tables.isHDF5File(filepath):
                error = translate('DBsTreeModel', \
                    'Opening cancelled: file {0} has not HDF5 format.', 
                    'A logger error message').format(filepath)
                print(error)
                return False
        except Exception:
            error = translate('DBsTreeModel', 
                """Opening failed: I cannot find out if file {0} has HDF5 """
                """format.""", 
                'A logger error message').format(filepath)
            print(error)
            return False
        else:
            return True
Esempio n. 12
0
    def checkOpening(self, filepath):
        """
        Check if a database can be open.

        :Parameter filepath: the full path of the file
        """

        try:
            # Check if file doesn't exist
            if os.path.isdir(filepath):
                error = translate('DBsTreeModel',
                                  'Openning cancelled: {0} is a folder.',
                                  'A logger error message').format(filepath)
                raise ValueError

            elif not os.path.isfile(filepath):
                error = translate('DBsTreeModel',
                                  'Opening failed: file {0} cannot be found.',
                                  'A logger error message').format(filepath)
                raise ValueError

            # Check if file is already open.
            elif self.getDBDoc(filepath) is not None:
                error = translate('DBsTreeModel',
                                  'Opening cancelled: file {0} already open.',
                                  'A logger error message').format(filepath)

                raise ValueError

        except ValueError:
            print(error)
            return False

        # Check the file format
        try:
            if not tables.isHDF5File(filepath):
                error = translate('DBsTreeModel', \
                    'Opening cancelled: file {0} has not HDF5 format.',
                    'A logger error message').format(filepath)
                print(error)
                return False
        except (tables.NodeError, OSError):
            error = translate(
                'DBsTreeModel',
                """Opening failed: I cannot find out if file {0} has HDF5 """
                """format.""", 'A logger error message').format(filepath)
            print(error)
            return False
        else:
            return True
Esempio n. 13
0
    def __init__(self, filename, subtype='', extension=None, cornerDir=None,
                 cornerFileList=None):
        HDFFile.__init__(self, filename, subtype, extension)

        # make sure filename is actually an input file
        if getLongName(filename) != HDFnasaomil2_File.OMIAURANO2_FILE_NAME:
            raise IOError('Attempt to read non-NASA OMI L2 file as such.')
        
        # start by assuming we aren't going to find anything
        self.pixCorners = None

        # see if the corner directory even exists.  If it doesn't, we obviously can't 
        # find a corner file
        if os.path.isdir(cornerDir):

            # convert the corner files into full pathnames
            # unless we were given null string (signal to search directory)
            if cornerFileList != ['']:
                cornerFileList = [os.path.join(cornerDir, f) for f in cornerFileList]

            # get orbit number of file for matching
            forbitnumber = getOrbitNumber(filename)

            # try using the list
        
            for f in cornerFileList:
                if f != '' and getLongName(f) == HDFnasaomil2_File.OMIAURANO2_CORNER_FILE_NAME:
                    try:
                        if getOrbitNumber(f) == forbitnumber:
                            self.pixCorners = f
                            break
                    except:
                        pass

            # if necessary, search entire corner file directory
            if self.pixCorners == None:        
                allPossible = [os.path.join(cornerDir, f) for f in os.listdir(cornerDir)]
                for f in allPossible:
                    try:
                        if tables.isHDF5File(f) \
                                and getLongName(f) == HDFnasaomil2_File.OMIAURANO2_CORNER_FILE_NAME \
                                and getOrbitNumber(f) == forbitnumber:
                            self.pixCorners = f
                            break
                    except:
                        pass
                
        if self.pixCorners == None:
            print "No valid corner file found for {0}.".format(filename)
Esempio n. 14
0
def load_file(servername, arrayserverprefix, filepath, config):
    try:
        if tables.isHDF5File(filepath):
            if is_pandas_hdf5(filepath):
                generate_config_pandashdf5(
                    servername, arrayserverprefix, filepath, config)
            else:
                generate_config_hdf5(
                    servername, arrayserverprefix, filepath, config)
        elif os.path.splitext(filepath)[-1] in ['.npy', '.npz']:
            generate_config_numpy(servername, arrayserverprefix, filepath, config)
        else:
            generate_config_csv(servername, arrayserverprefix, filepath, config)
        return            
    except Exception as e:
        log.exception(e)
Esempio n. 15
0
def generate_config_hdf5(servername, arrayserverprefix, datapath, config):
    assert arrayserverprefix.startswith('/') and not arrayserverprefix.endswith('/')
    if tables.isHDF5File(datapath):
        f = tables.openFile(datapath)
    else:
        return None
    for node in f.walkNodes("/"):
        if isinstance(node, (tables.array.Array, tables.table.Table)):
            arrayserverurl = arrayserverprefix
            if node._v_pathname != "/": arrayserverurl += node._v_pathname
            obj = config.array_obj([config.source_obj(
                servername,
                'hdf5',
                serverpath=datapath,
                localpath=node._v_pathname)])
            config.create_dataset(arrayserverurl, obj)
Esempio n. 16
0
def read_spline_box_hdf5(filename):
    if not pt.isHDF5File(filename):
        raise ValueError, 'not a HDF5 file! (%s)' % filename

    fd = pt.openFile(filename, mode='r')
    boxes = fd.listNodes('/box')
    n_box = len(boxes)
    dim = len(fd.listNodes(boxes[0].ax))

    sp_boxes = SplineBoxes(dim=dim,
                           n_box=n_box,
                           n_vertex=0,
                           spbs=OneTypeList(SplineBox))
    for box in boxes:
        spb = SplineBox()
        sp_boxes.spbs.append(spb)

        spb.ib = int(box._v_name)
        spb.cpi = nm.asarray(box.cpi.read()) - 1
        spb.gpi = nm.asarray(box.gpi.read()) - 1
        spb.cxyz = nm.asarray(box.cxyz.read()).transpose()
        spb.cxyz0 = spb.cxyz.copy()
        spb.ax = []
        for axi in fd.listNodes(box.ax):
            spb.ax.append(nm.asarray(axi.bsc.read()))

        sp_boxes.n_vertex = max(sp_boxes.n_vertex, nm.amax(spb.gpi) + 1)
        print nm.amin(spb.gpi), nm.amax(spb.gpi)

        ##
        # Fix cpi by rebuilding :).
        off = 0
        n0, n1, n2 = spb.cpi.shape
        aux = nm.arange(n0 * n1).reshape(n1, n0).transpose()
        for ii in xrange(n2):
            spb.cpi[:, :, ii] = aux + off
            off += n0 * n1

    fd.close()

    for perm in cycle([n_box] * 2):
        if perm[0] == perm[1]: continue
        gpi1 = sp_boxes.spbs[perm[0]].gpi
        gpi2 = sp_boxes.spbs[perm[1]].gpi
        assert_(len(nm.intersect1d(gpi1, gpi2)) == 0)

    return sp_boxes
Esempio n. 17
0
def read_spline_box_hdf5( filename ):
    if not pt.isHDF5File( filename ):
        raise ValueError, 'not a HDF5 file! (%s)' % filename

    fd = pt.openFile( filename, mode = 'r' )
    boxes = fd.listNodes( '/box' )
    n_box = len( boxes )
    dim = len( fd.listNodes( boxes[0].ax ) )

    sp_boxes = SplineBoxes( dim = dim, n_box = n_box, n_vertex = 0,
                           spbs = OneTypeList( SplineBox ) )
    for box in boxes:
        spb = SplineBox()
        sp_boxes.spbs.append( spb )

        spb.ib = int( box._v_name )
        spb.cpi = nm.asarray( box.cpi.read() ) - 1
        spb.gpi = nm.asarray( box.gpi.read() ) - 1
        spb.cxyz = nm.asarray( box.cxyz.read() ).transpose()
        spb.cxyz0 = spb.cxyz.copy()
        spb.ax = []
        for axi in fd.listNodes( box.ax ):
            spb.ax.append( nm.asarray( axi.bsc.read() ) )

        sp_boxes.n_vertex = max( sp_boxes.n_vertex, nm.amax( spb.gpi ) + 1 )
        print nm.amin( spb.gpi ), nm.amax( spb.gpi )

        ##
        # Fix cpi by rebuilding :).
        off = 0
        n0, n1, n2 = spb.cpi.shape
        aux = nm.arange( n0 * n1 ).reshape( n1, n0 ).transpose()
        for ii in xrange( n2 ):
            spb.cpi[:,:,ii] = aux + off
            off += n0 * n1

    fd.close()

    for perm in cycle( [n_box] * 2 ):
        if perm[0] == perm[1]: continue
        gpi1 = sp_boxes.spbs[perm[0]].gpi
        gpi2 = sp_boxes.spbs[perm[1]].gpi
        assert_( len( nm.intersect1d( gpi1, gpi2 ) ) == 0 )
    
    return sp_boxes
Esempio n. 18
0
def load_file(servername, arrayserverprefix, filepath, config):
    try:
        if tables.isHDF5File(filepath):
            if is_pandas_hdf5(filepath):
                generate_config_pandashdf5(servername, arrayserverprefix,
                                           filepath, config)
            else:
                generate_config_hdf5(servername, arrayserverprefix, filepath,
                                     config)
        elif os.path.splitext(filepath)[-1] in ['.npy', '.npz']:
            generate_config_numpy(servername, arrayserverprefix, filepath,
                                  config)
        else:
            generate_config_csv(servername, arrayserverprefix, filepath,
                                config)
        return
    except Exception as e:
        log.exception(e)
Esempio n. 19
0
def _open_hdf_file(filename, mode='r'):
    '''opens an hdf file
    
    mode has several options
    ------------------------
    'r'  Read-only; no data can be modified.
    'w'  Write; a new file is created (an existing file with the same name would be deleted).
    'a'  Append; an existing file is opened for reading and writing, and if the file does not exist it is created.
    'r+' It is similar to 'a', but the file must already exist.
    
    :returns: :class:`tables.File` object
    '''
    if (mode != 'w') and os.path.exists(filename):
        if not tables.isHDF5File(filename):
            raise IOError('file %s is not an HDF5 file' % filename)
        if not tables.isPyTablesFile(filename):
            raise IOError('file %s is not a PyTables file' % filename)
    return tables.openFile(filename, mode)
Esempio n. 20
0
def generate_config_hdf5(servername, arrayserverprefix, datapath, config):
    assert arrayserverprefix.startswith(
        '/') and not arrayserverprefix.endswith('/')
    if tables.isHDF5File(datapath):
        f = tables.openFile(datapath)
    else:
        return None
    for node in f.walkNodes("/"):
        if isinstance(node, (tables.array.Array, tables.table.Table)):
            arrayserverurl = arrayserverprefix
            if node._v_pathname != "/": arrayserverurl += node._v_pathname
            obj = config.array_obj([
                config.source_obj(servername,
                                  'hdf5',
                                  serverpath=datapath,
                                  localpath=node._v_pathname)
            ])
            config.create_dataset(arrayserverurl, obj)
Esempio n. 21
0
def get_homog_coefs_linear( ts, coor, mode, region, ig,
                            micro_filename = None, regenerate = False ):

    oprefix = output.prefix
    output.prefix = 'micro:'
    
    required, other = get_standard_keywords()
    required.remove( 'equations' )
        
    conf = ProblemConf.from_file(micro_filename, required, other, verbose=False)

    coefs_filename = conf.options.get_default_attr('coefs_filename', 'coefs.h5')

    if not regenerate:
        if op.exists( coefs_filename ):
            if not pt.isHDF5File( coefs_filename ):
                regenerate = True
        else:
            regenerate = True

    if regenerate:
        options = Struct( output_filename_trunk = None )
            
        app = HomogenizationApp( conf, options, 'micro:' )
        coefs = app()

        coefs.to_file_hdf5( coefs_filename )
    else:
        coefs = Coefficients.from_file_hdf5( coefs_filename )

    out = {}
    if mode == None:
        for key, val in coefs.__dict__.iteritems():
            out[key] = val 
    elif mode == 'qp':
        for key, val in coefs.__dict__.iteritems():
            if type( val ) == nm.ndarray:
                out[key] = nm.tile( val, (coor.shape[0], 1, 1) )
    else:
        out = None

    output.prefix = oprefix

    return out
Esempio n. 22
0
def valid_file(fn):
    """
    Tests whether ``fn`` is a valid HDF file.

    Parameters
    -----------
    fn : string
        Pathname to snapshot file.

    Returns
    -------
    is_valid : boolean
        ``True`` if ``fn`` is a valid HDF file that can be read by the
        Py-Tables package.

    """
    # Can add other conditions as needed through logical and
    is_valid = pyT.isHDF5File(fn)
    return is_valid
Esempio n. 23
0
File: hdf5io.py Progetto: tkf/neo
 def connect(self, filename=settings['filename']):
     """
     Opens / initialises new HDF5 file.
     We rely on PyTables and keep all session management staff there.
     """
     if not self.connected:
         try:
             if tb.isHDF5File(filename):
                 self._data = tb.openFile(filename, mode = "a", title = filename)
                 self.connected = True
             else:
                 raise TypeError("The file specified is not an HDF5 file format.")
         except IOError:
             # create a new file if specified file not found
             self._data = tb.openFile(filename, mode = "w", title = filename)
             self.connected = True
         except:
             raise NameError("Incorrect file path, couldn't find or create a file.")
     else:
         logging.info("Already connected.")
Esempio n. 24
0
def test_hdf_type(filename):
        """ This is a simple function to return the type of HDF file that is passed to it"""
	filetype = None

	"""check to see if file is an hdf4 file
	returns 1 if HDF4 file
	returns 0 if not an HDF4 file"""
	hdf4flag = HDF4.ishdf(filename)

	if hdf4flag == 1:
		filetype = 'HDF4'

	
	#check to see if file is hdf5 (also support hdf5-eos)
	# returns >0 if True
	# returns 0 if False
	hdf5flag = HDF5.isHDF5File(filename)
	if hdf5flag > 0:
		filetype = 'HDF5'

	return filetype
Esempio n. 25
0
def getData(url):
    """
    Get tabular data from either a text file or HDF5 table, and return it as a
    numpy array.
    """
    path = getFromURL(url)
    if use_hdf5 and tables.isHDF5File(path):
        h5file = tables.openFile(path, mode="r")
        assert len(h5file.listNodes(h5file.root, classname="Table")) == 1, "File contains more than one table."
        table = h5file.listNodes(h5file.root, classname="Table")[0]
        arr = numpy.zeros((table.nrows, len(table.colnames)), "float")
        for i, row in enumerate(table):
            for j, colname in enumerate(table.colnames):
                arr[i, j] = row[colname]
    elif os.path.isfile(path):
        f = srblib.urlopen(url)
        arr = numpy.array([l.split() for l in f.readlines() if l[0] != "#"]).astype("float")
        f.close()
    else:
        raise Exception("Not a valid file.")
    return arr
def check_hdf5file(hdf5name):   
    """Check the validity of the current hdf5 file."""
    loghdf.info("Checking file %s..." % hdf5name)
    # Check that the file exists and is valid ...
    try:
        if not tables.isHDF5File(hdf5name):
            raise HDF5ExtError("The file '%s' is not a valid pytables!" % hdf5name) 
    except IOError:
        raise IOError("The file '%s' does not exist!" % hdf5name)
    hdf5file = tables.openFile(hdf5name, 'a')
    # Check the groups in order..................
    for gtype in hdf5file.listNodes("/"):
        gname = gtype._v_hdf5name
        if gname == 'values':
            for sgtype in hdf5file.listNodes("/%s" % gname):
                if sgtype._v_hdf5name not in ['modified', 'standard', 'noenso']:
                    raise NameError, "Unrecognized branch:%s" % sgtype._v_hdf5name
                if sgtype._v_hdf5name != 'noenso':
                    for phase in ('neutral', 'cold', 'warm', 'noenso'):
                        sgtype._g_checkHasChild(phase)
            loghdf.info("Group %s OK" % gname)
        elif gname in ('differences', 'apxpvalues'):
            for sgtype in hdf5file.listNodes("/%s" % gname):
                if sgtype._v_hdf5name not in ['modified', 'standard', 'noenso']:
                    raise NameError, "Unrecognized branch:%s" % sgtype._v_hdf5name
                if sgtype in ['modified', 'standard']:
                    for phase in ('CN', 'WN', 'CW', 'CG', 'NG', 'WG'):
                        sgtype._g_checkHasChild(phase)
                elif sgtype == 'noenso':
                    for phase in ('cold', 'neutral', 'warm'):
                        sgtype._g_checkHasChild(phase)                    
            loghdf.info("Group %s OK" % gname)
        elif gname == 'columns':
            loghdf.info("Group %s OK" % gname)
        elif gname == 'stations':
            loghdf.info("Group %s OK" % gname)
        else:
            raise HDF5ExtError("Unrecognized node '%s'" % gname)
    return hdf5file
Esempio n. 27
0
def getData(url):
    """
    Get tabular data from either a text file or HDF5 table, and return it as a
    numpy array.
    """
    path = getFromURL(url)
    if use_hdf5 and tables.isHDF5File(path):
        h5file = tables.openFile(path, mode='r')
        assert len(h5file.listNodes(
            h5file.root,
            classname="Table")) == 1, "File contains more than one table."
        table = h5file.listNodes(h5file.root, classname="Table")[0]
        arr = numpy.zeros((table.nrows, len(table.colnames)), 'float')
        for i, row in enumerate(table):
            for j, colname in enumerate(table.colnames):
                arr[i, j] = row[colname]
    elif os.path.isfile(path):
        f = srblib.urlopen(url)
        arr = numpy.array([l.split() for l in f.readlines()
                           if l[0] != '#']).astype('float')
        f.close()
    else:
        raise Exception("Not a valid file.")
    return arr
Esempio n. 28
0
 def __init__(self, filename, subtype='', extension=None):
     GeoFile.__init__(self, filename, subtype=subtype, extension=extension)
     if tables.isHDF5File(self.name):  # sanity check
         pass
     else:
         raise IOError('Attempt to read non-HDF 5 file as HDF 5.')
Esempio n. 29
0
def isopacplot(filename, format=None):
    """Determines if a file is in of opacplot format."""
    return (format in [None, "opp"]) and isinstance(filename, basestring) and \
            ((os.path.isfile(filename) and tb.isHDF5File(filename)) or \
              not os.path.exists(filename))
Esempio n. 30
0
#       print cmd_opt_dict.keys()
#       print opts_from_file_dict.keys()
        if not an_opt in exclude_from_override:
            print "(INFO) Overriding %s with %s"%(an_opt, opts_from_file_dict[an_opt])
            options.__dict__[an_opt] = opts_from_file_dict[an_opt]
            
            
    
    

print "\n(INFO) Reading in file %s ...\n"%(options.infilename)
attributes = {}
tau = 0
num_max = 0
if HAS_TABLES:
    if tables.isHDF5File(options.infilename):
        NOT_HDF=False
        h = tables.openFile(options.infilename)
        table_list = [f for f in h.walkGroups(h.root.data_pool) if f._v_children.has_key('accu_data')]
        print "Found following accu_data objects:\n\n"
        for i,tb in enumerate(table_list):
            print "\tNumber:",i, tb
            for key in tb._v_attrs._v_attrnamesuser:
                val = tb._f_getAttr(key)
                print "\t\t",key, '\t',val
                if key.endswith('tau'):
                    if float(tb._f_getAttr(key)) > float(tau):
                        #print "*** Was",tau
                        tau = val
                        #print "*** Now",tau
Esempio n. 31
0
 def list(self):
     hf_names = filter(lambda n: tb.isHDF5File(os.path.join(self.path, n)), os.listdir(self.path))
     return [os.path.splitext(fname)[0] for fname in hf_names]
Esempio n. 32
0
def transform_tensor(input_view, target_view, transformed_view, R, t, suffix='.tif', convert_to_gray=True):
    """
    Method that applies a previously estimated rigid transformation to an input tensor
    
    Parameters
    ----------

    input_view : str
	directory of hdf5 file of the input view
    target_view : str
	directory of hdf5 file of the target view
    transformed_view : str
	directory of hdf5 file of the transformed input view
    R : numpy array of shape (dim_points, 3)
	rotational component of the estimated rigid transformation
    t : numpy array of shape (3)
	translational component of the estimated rigid transformation
    suffix : str
	suffix of the slice images (Default: '.tif')
    convert_to_gray: boolean
	if True the slices are read and saved in greyscale (Default: True)

    """

    if (os.path.isdir(target_view)):
        filesTarget = sorted([target_view + '/' + f for f in os.listdir(target_view) if f[0] != '.' and f.endswith(suffix)])
        img_z = np.asarray(Image.open(filesTarget[0]))
        height_target, width_target = img_z.shape
        depth_target = len(filesTarget)
    elif (tables.isHDF5File(target_view)):
        hf5_target_view = tables.openFile(target_view, 'r')
        depth_target, height_target, width_target = hf5_target_view.root.full_image.shape
        hf5_target_view.close()
    else:
        raise Exception('%s is neither a hdf5 file nor a valid directory'%input_view)

    if (os.path.isdir(input_view)):
        filesInput = sorted([input_view + '/' + f for f in os.listdir(input_view) if f[0] != '.' and f.endswith(suffix)])
        img_z = np.asarray(Image.open(filesInput[0]))
        height_input, width_input = img_z.shape
        depth_input = len(filesInput)
        pixels_input = np.empty(shape=(depth_input, height_input, width_input), dtype=np.uint8)
        for z, image_file in enumerate(filesInput):
            img_z = Image.open(image_file)
            if convert_to_gray:
                img_z = img_z.convert('L')
            pixels_input[z, :, :] = np.asarray(img_z)
    elif (tables.isHDF5File(input_view)):
        hf5_input_view = tables.openFile(input_view, 'r')
        depth_input, height_input, width_input = hf5_input_view.root.full_image.shape
        pixels_input = hf5_input_view.root.full_image[0:depth_input,0:height_input,0:width_input]
        hf5_input_view.close()
    else:
        raise Exception('%s is neither a hdf5 file nor a valid directory'%input_view)


    pixels_transformed_input = np.empty((depth_target, height_target, width_target), dtype=np.uint8)

    h5_output=False
    if (transformed_view.endswith('.h5')):
        target_shape = (depth_target, height_target, width_target)
        atom = tables.UInt8Atom()
        h5f = tables.openFile(transformed_view, 'w')
        ca = h5f.createCArray(h5f.root, 'full_image', atom, target_shape)
        h5_output=True
    else:
        if not os.path.exists(transformed_view):
            os.makedirs(transformed_view)
        else:
            files = glob.glob(transformed_view + '/*')
            for f in files:
                os.remove(f)
    
    coords_2d_target = np.vstack(np.indices((width_target,height_target)).swapaxes(0,2).swapaxes(0,1))
    invR = np.linalg.inv(R)
    invR_2d_transpose = np.transpose(np.dot(invR[:, 0:2], np.transpose(coords_2d_target - t[0:2])))
    pbar = ProgressBar(maxval=depth_target,widgets=['Rotating %d slices: ' % (depth_target),
                                Percentage(), ' ', ETA()])
    rangez=range(0,depth_target,1)
    for z in pbar(rangez):
        R_t_3d = np.transpose(invR_2d_transpose + invR[:, 2] * (z - t[2]))
        good_indices = np.arange(R_t_3d.shape[1])
        good_indices = good_indices[(R_t_3d[0, :] > 0) * (R_t_3d[1, :] > 0) * (R_t_3d[2, :] > 0) * (R_t_3d[0, :] < (width_input - 1)) * (R_t_3d[1, :] < (height_input - 1)) * (R_t_3d[2, :] < (depth_input - 1))]
        R_t_3d = R_t_3d.take(good_indices,axis=1)
        R_t_3d = np.round(R_t_3d).astype(int)
        coords_2d_target_tmp = coords_2d_target.take(good_indices, axis=0)
        coords_3d_target_tmp = np.hstack((coords_2d_target_tmp, np.ones((coords_2d_target_tmp.shape[0], 1)).astype(int) * z))
        pixels_transformed_input[coords_3d_target_tmp[:, 2], coords_3d_target_tmp[:, 1], coords_3d_target_tmp[:, 0]] = pixels_input[R_t_3d[2, :], R_t_3d[1, :], R_t_3d[0, :]]
        if h5_output:
            ca[z, :, :] = pixels_transformed_input[z,:,:]
        else:
            im = Image.fromarray(np.uint8(pixels_transformed_input[z]))
            im.save(transformed_view + '/slice_' + str(z).zfill(4) + ".tif", 'TIFF')
    if h5_output:
        h5f.close()
Esempio n. 33
0
def isopacplot(filename, format=None):
    """Determines if a file is in of opacplot format."""
    return (format in [None, "opp"]) and isinstance(filename, basestring) and \
            ((os.path.isfile(filename) and tb.isHDF5File(filename)) or \
              not os.path.exists(filename))
Esempio n. 34
0
def transform_tensor(input_view, target_view, transformed_view, R, t, suffix='.tif', convert_to_gray=True):
    """
    Method that applies a previously estimated rigid transformation to an input tensor
    
    Parameters
    ----------

    input_view : str
	directory of hdf5 file of the input view
    target_view : str
	directory of hdf5 file of the target view
    transformed_view : str
	directory of hdf5 file of the transformed input view
    R : numpy array of shape (dim_points, 3)
	rotational component of the estimated rigid transformation
    t : numpy array of shape (3)
	translational component of the estimated rigid transformation
    suffix : str
	suffix of the slice images (Default: '.tif')
    convert_to_gray: boolean
	if True the slices are read and saved in greyscale (Default: True)

    """

    if (os.path.isdir(target_view)):
        filesTarget = sorted(
            [target_view + '/' + f for f in os.listdir(target_view) if f[0] != '.' and f.endswith(suffix)])
        img_z = np.asarray(Image.open(filesTarget[0]))
        height_target, width_target = img_z.shape
        depth_target = len(filesTarget)
    elif (tables.isHDF5File(target_view)):
        hf5_target_view = tables.openFile(target_view, 'r')
        depth_target, height_target, width_target = hf5_target_view.root.full_image.shape
        hf5_target_view.close()
    else:
        raise Exception('%s is neither a hdf5 file nor a valid directory' % input_view)

    if (os.path.isdir(input_view)):
        filesInput = sorted(
            [input_view + '/' + f for f in os.listdir(input_view) if f[0] != '.' and f.endswith(suffix)])
        img_z = np.asarray(Image.open(filesInput[0]))
        height_input, width_input = img_z.shape
        depth_input = len(filesInput)
        pixels_input = np.empty(shape=(depth_input, height_input, width_input), dtype=np.uint8)
        for z, image_file in enumerate(filesInput):
            img_z = Image.open(image_file)
            if convert_to_gray:
                img_z = img_z.convert('L')
            pixels_input[z, :, :] = np.asarray(img_z)
    elif (tables.isHDF5File(input_view)):
        hf5_input_view = tables.openFile(input_view, 'r')
        depth_input, height_input, width_input = hf5_input_view.root.full_image.shape
        pixels_input = hf5_input_view.root.full_image[0:depth_input, 0:height_input, 0:width_input]
        hf5_input_view.close()
    else:
        raise Exception('%s is neither a hdf5 file nor a valid directory' % input_view)

    pixels_transformed_input = np.empty((depth_target, height_target, width_target), dtype=np.uint8)

    h5_output = False
    if (transformed_view.endswith('.h5')):
        target_shape = (depth_target, height_target, width_target)
        atom = tables.UInt8Atom()
        h5f = tables.openFile(transformed_view, 'w')
        ca = h5f.createCArray(h5f.root, 'full_image', atom, target_shape)
        h5_output = True
    else:
        if not os.path.exists(transformed_view):
            os.makedirs(transformed_view)
        else:
            files = glob.glob(transformed_view + '/*')
            for f in files:
                os.remove(f)

    coords_2d_target = np.vstack(np.indices((width_target, height_target)).swapaxes(0, 2).swapaxes(0, 1))
    invR = np.linalg.inv(R)
    invR_2d_transpose = np.transpose(np.dot(invR[:, 0:2], np.transpose(coords_2d_target - t[0:2])))
    pbar = ProgressBar(maxval=depth_target, widgets=['Rotating %d slices: ' % (depth_target),
                                                     Percentage(), ' ', ETA()])
    rangez = range(0, depth_target, 1)
    for z in pbar(rangez):
        R_t_3d = np.transpose(invR_2d_transpose + invR[:, 2] * (z - t[2]))
        good_indices = np.arange(R_t_3d.shape[1])
        good_indices = good_indices[
            (R_t_3d[0, :] > 0) * (R_t_3d[1, :] > 0) * (R_t_3d[2, :] > 0) * (R_t_3d[0, :] < (width_input - 1)) * (
                    R_t_3d[1, :] < (height_input - 1)) * (R_t_3d[2, :] < (depth_input - 1))]
        R_t_3d = R_t_3d.take(good_indices, axis=1)
        R_t_3d = np.round(R_t_3d).astype(int)
        coords_2d_target_tmp = coords_2d_target.take(good_indices, axis=0)
        coords_3d_target_tmp = np.hstack(
            (coords_2d_target_tmp, np.ones((coords_2d_target_tmp.shape[0], 1)).astype(int) * z))
        pixels_transformed_input[coords_3d_target_tmp[:, 2], coords_3d_target_tmp[:, 1], coords_3d_target_tmp[:, 0]] = \
        pixels_input[R_t_3d[2, :], R_t_3d[1, :], R_t_3d[0, :]]
        if h5_output:
            ca[z, :, :] = pixels_transformed_input[z, :, :]
        else:
            im = Image.fromarray(np.uint8(pixels_transformed_input[z]))
            im.save(transformed_view + '/slice_' + str(z).zfill(4) + ".tif", 'TIFF')
    if h5_output:
        h5f.close()