예제 #1
0
    def generate_file_name(self):
        dtg = DateTimeGenerator()
        self.__dict__.update(dtg.new_filename(self._name))
        '''
        this sets:
            _unix_timestamp
            _localtime    
            _timestamp
            _timemark
            _datemark
            _uuid
            _filename
            _folder
            _relpath
            _filepath        
        '''

        if self._folder and not os.path.isdir(self._folder):
            os.makedirs(self._folder)
예제 #2
0
파일: hdf_lib.py 프로젝트: mpfirrmann/qkit
    def __init__(self, *args, **kwargs):
        """
        Creates an empty data set including the file, for which the currently
        set file name generator is used.

        kwargs:
            name (string) : default is 'data'
        """
        
        name = kwargs.pop('name', 'data')
        
        "if path was omitted, a new filepath will be created"        
        path = kwargs.pop('path',None)        
        self._filename_generator = DateTimeGenerator()
        self.generate_file_name(name, filepath = path, **kwargs)
        
        "setup the  file"
        self.hf = H5_file(self._filepath)
        
        self.hf.flush()
예제 #3
0
파일: hdf_lib.py 프로젝트: mpfirrmann/qkit
class Data(object):
    "this is a basic hdf5 class adopted to our needs"
   

    def __init__(self, *args, **kwargs):
        """
        Creates an empty data set including the file, for which the currently
        set file name generator is used.

        kwargs:
            name (string) : default is 'data'
        """
        
        name = kwargs.pop('name', 'data')
        
        "if path was omitted, a new filepath will be created"        
        path = kwargs.pop('path',None)        
        self._filename_generator = DateTimeGenerator()
        self.generate_file_name(name, filepath = path, **kwargs)
        
        "setup the  file"
        self.hf = H5_file(self._filepath)
        
        self.hf.flush()
        
        
    def generate_file_name(self, name, **kwargs):
        # for now just a copy from the origial file
        

        self._name = name

        filepath = kwargs.get('filepath', None)
        if filepath:
            self._filepath = filepath
        else:
            self._localtime = time.localtime()
            self._timestamp = time.asctime(self._localtime)
            self._timemark = time.strftime('%H%M%S', self._localtime)
            self._datemark = time.strftime('%Y%m%d', self._localtime)
            self._filepath =  self._filename_generator.new_filename(self)

        self._folder, self._filename = os.path.split(self._filepath)
        if self._folder and not os.path.isdir(self._folder):
            os.makedirs(self._folder)
        

    def __getitem__(self, name):
        return self.hf[name]

    def __setitem__(self, name, val):
        self.hf[name] = val

    def __repr__(self):
        ret = "HDF5Data '%s', filename '%s'" % (self._name, self._filename)
        return ret

    def get_filepath(self):
        return self._filepath

    def get_folder(self):
        return self._folder
    
    def add_comment(self,comment, folder = "data" ):
        if folder == "data":
            #existing_comment = self.hf.dgrp.attrs.get('comment',None)
            #if existing_comment:
            #    comment = existing_comment+\n+comment
            self.hf.dgrp.attrs.create('comment',comment)
        if folder == "analysis":
            #existing_comment = self.hf.agrp.attrs.get('comment',None)
            #if existing_comment:
            #    comment = existing_comment+\n+comment
            self.hf.agrp.attrs.create("comment",comment)
            
    def add_coordinate(self,  name, unit = "", comment = "",folder="data",**meta):
        ds =  hdf_dataset(self.hf,name,unit=unit,comment= comment, folder=folder)
        return ds
    
    def add_value_vector(self, name, x = None, unit = "", comment = "",folder="data",**meta):
        ds =  hdf_dataset(self.hf,name, x=x, unit=unit, comment=comment, folder=folder)
        return ds

    def add_value_matrix(self, name, x = None , y = None, unit = "", comment = "",folder="data",**meta):
        ds =  hdf_dataset(self.hf,name, x=x, y=y, unit=unit, comment=comment, folder=folder)
        return ds
 
    def add_value_box(self, name, x = None , y = None, z = None, unit = "", comment = "",folder="data",**meta):
        #ds =  hdf_dataset(self.hf,name, x=x, y=y, z=z, unit=unit, comment=comment, folder=folder)
        pass
    
    
    
    def add_view(self,name,x = None, y = None, x_axis=0, y_axis=0, filter  = None, comment = ""):
        """a view is a way to display plot x-y data.
            x, y are the datasets to display, e.g. 
            x = "data0/temperature"
            y = "analysis0/frequency_fit"
            (if "folder/" is omitted "data0" is assumed)
            x_axis is the slice dimension on multidim arrays
            y_axis is the slice dimension on multidim arrays
            filter is a string of reguar python code, which 
            accesses the x,y dataset returns arrays of (x,y) 
            (Fixme: not jet implemented)                                     
        """
        ds =  dataset_view(self.hf,name, x=x, y=y, x_axis=x_axis, y_axis=y_axis, comment=comment)
        return ds
    
    def get_dataset(self,ds_url):
        return hdf_dataset(self.hf,ds_url = ds_url)
        
    def save_finished():
        pass
    
    def flush(self):
        self.hf.flush()
    
    def close_file(self):
        self.hf.close_file()
    def close(self):
        self.hf.close_file()